commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
4f3854eaf8d6e4b0ad9a77e871a946916ab3fec6 | Migrate listings.syndication, FeedType.content_type should not be unique. | wtrevino/django-listings,wtrevino/django-listings | listings/syndication/migrations/0002_auto__del_unique_feedtype_content_type.py | listings/syndication/migrations/0002_auto__del_unique_feedtype_content_type.py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Removing unique constraint on 'FeedType', fields ['content_type']
db.delete_unique('syndication_feedtype', ['content_type'])
def backwards(self, orm):
# Adding unique constraint on 'FeedType', fields ['content_type']
db.create_unique('syndication_feedtype', ['content_type'])
models = {
'sites.site': {
'Meta': {'ordering': "('domain',)", 'object_name': 'Site', 'db_table': "'django_site'"},
'domain': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'syndication.feed': {
'Meta': {'object_name': 'Feed'},
'feed_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['syndication.FeedType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'site': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sites.Site']", 'symmetrical': 'False'})
},
'syndication.feedtype': {
'Meta': {'object_name': 'FeedType'},
'content_type': ('django.db.models.fields.CharField', [], {'default': "'application/xml'", 'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'template': ('django.db.models.fields.files.FileField', [], {'max_length': '100'})
}
}
complete_apps = ['syndication'] | mit | Python | |
7b1fb0eb7f063c00c89b57ceca64a01881a7d4d9 | add const_thrust helper | nesl/crazyflie_ros,whoenig/crazyflie_ros,marktsai0316/crazyflie_ros,robotpilot/crazyflie_ros,marktsai0316/crazyflie_ros,whoenig/crazyflie_ros,nesl/crazyflie_ros,robotpilot/crazyflie_ros | crazyflie_demo/scripts/const_thrust.py | crazyflie_demo/scripts/const_thrust.py | #!/usr/bin/env python
import rospy
from geometry_msgs.msg import Twist
if __name__ == '__main__':
rospy.init_node('crazyflie_demo_const_thrust', anonymous=True)
p = rospy.Publisher('cmd_vel', Twist)
twist = Twist()
r = rospy.Rate(50)
#for i in range(0, 100):
# p.publish(twist)
# r.sleep()
twist.linear.z = 12000
while not rospy.is_shutdown():
p.publish(twist)
r.sleep()
| mit | Python | |
50331d662d67d9f625f9f9198988522b38b2d1f0 | add task for midnight search index update, https://www.pivotaltracker.com/story/show/13730025 | ofer43211/unisubs,pculture/unisubs,ReachingOut/unisubs,pculture/unisubs,ofer43211/unisubs,wevoice/wesub,ujdhesa/unisubs,ofer43211/unisubs,wevoice/wesub,pculture/unisubs,eloquence/unisubs,norayr/unisubs,ReachingOut/unisubs,norayr/unisubs,eloquence/unisubs,pculture/unisubs,ujdhesa/unisubs,ofer43211/unisubs,wevoice/wesub,eloquence/unisubs,ujdhesa/unisubs,ReachingOut/unisubs,norayr/unisubs,norayr/unisubs,ReachingOut/unisubs,wevoice/wesub,eloquence/unisubs,ujdhesa/unisubs | apps/search/tasks.py | apps/search/tasks.py | from utils.celery_search_index import LogEntry
from utils.celery_utils import task
from celery.schedules import crontab
from celery.decorators import periodic_task
from django.core.management import call_command
@periodic_task(run_every=crontab(minute=0, hour=0))
def update_search_index():
call_command('update_index', verbosity=2) | agpl-3.0 | Python | |
3c2438049da743b53cb7a536ddc2db1a05302a33 | Add grab.tools.logs to configure logging module | giserh/grab,SpaceAppsXploration/grab,subeax/grab,codevlabs/grab,DDShadoww/grab,istinspring/grab,liorvh/grab,SpaceAppsXploration/grab,kevinlondon/grab,maurobaraldi/grab,lorien/grab,subeax/grab,subeax/grab,shaunstanislaus/grab,lorien/grab,alihalabyah/grab,pombredanne/grab-1,raybuhr/grab,shaunstanislaus/grab,alihalabyah/grab,giserh/grab,istinspring/grab,pombredanne/grab-1,DDShadoww/grab,maurobaraldi/grab,huiyi1990/grab,liorvh/grab,kevinlondon/grab,codevlabs/grab,raybuhr/grab,huiyi1990/grab | grab/tools/logs.py | grab/tools/logs.py | import logging
def default_logging(grab_log='/tmp/grab.log'):
"""
Customize logging output to display all log messages
except grab network logs.
Redirect grab network logs into file.
"""
logging.basicConfig(level=logging.DEBUG)
glog = logging.getLogger('grab')
glog.propagate = False
hdl = logging.FileHandler(grab_log, 'w')
glog.addHandler(hdl)
| mit | Python | |
4662b430087404dbf011cf9ad97ee1e3188bfb9d | create wrapper for c_curve | lcdb/lcdb-workflows,lcdb/lcdb-workflows,lcdb/lcdb-workflows | wrappers/preseq/observed_complexity/wrapper.py | wrappers/preseq/observed_complexity/wrapper.py | __author__ = "Behram Radmanesh"
__copyright__ = "Copyright 2016, Behram Radmanesh"
__email__ = "behram.radmanesh@nih.gov"
__license__ = "MIT"
# import snakemake's ability to execute shell commands
from snakemake.shell import shell
# execute preseq c_curve
shell("preseq c_curve {snakemake.params} {snakemake.input[0]} {snakemake.output[0]}")
| mit | Python | |
ec1b3be5545d5ae530d3dc7dd8d90e6fe4730926 | add unittest for heap | hubo1016/vlcp,hubo1016/vlcp,hubo1016/vlcp,hubo1016/vlcp | tests/testIndexedHeap.py | tests/testIndexedHeap.py | '''
Created on 2017/9/29
:author: hubo
'''
import unittest
from random import randrange, sample
from vlcp.utils.indexedheap import IndexedHeap
class Test(unittest.TestCase):
def testRandomSort(self):
data = [(randrange(0,10000), randrange(0,10000)) for _ in range(0,1000)]
data = list((v,k)
for k, v in dict((d[1], d[0]) for d in data).items())
heap = IndexedHeap()
for d in data:
heap.push(d[1], d)
# Remove min item
minv = heap.top()
self.assertEqual(minv, min(data)[1])
heap.remove(minv)
data.remove(min(data))
# Remove last
last = heap.heap[-1][0]
heap.remove(last[1])
data.remove(last)
self.assertEqual(len(heap), len(data))
# Random remove
remove_sample = sample(data, 100)
data = [d for d in data if d not in remove_sample]
for d in remove_sample:
heap.remove(d[1])
result = []
while heap:
result.append(heap.pop())
self.assertListEqual(result, [d[1] for d in sorted(data)])
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testRandomSort']
unittest.main() | apache-2.0 | Python | |
5db58544133c66c5cbb4122c99a95a0ca6ddfa26 | Create RomeOculus.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/Alessandruino/RomeOculus.py | home/Alessandruino/RomeOculus.py | i01 = Runtime.createAndStart("i01","InMoov")
i01.startHead("/dev/tty.usbmodem1411")
#i01.startLeftArm("COM5")
#leftHand = i01.startLeftHand("COM5")
#i01.leftArm.bicep.setMinMax(5,80)
#i01.leftArm.bicep.moveTo(30)
oculus = Runtime.start("oculus","OculusDIY")
oculus.arduino.connect("/dev/tty.usbmodem14541")
def onOculusData(data):
print data.yaw
print data.pitch
i01.head.neck.moveTo(int(data.pitch))
i01.head.rothead.moveTo(int(data.yaw))
oculus.addOculusDataListener(python)
| apache-2.0 | Python | |
c5fba0cc8acb482a0bc1c49ae5187ebc1232dba3 | Add tests for the different input variations. | asfaltboy/directions.py,jwass/directions.py,samtux/directions.py | tests/test_directions.py | tests/test_directions.py | import unittest
from shapely.geometry import LineString, Point
from directions.base import _parse_points
class DirectionsTest(unittest.TestCase):
def setUp(self):
self.p = [(1,2), (3,4), (5,6), (7,8)]
self.line = LineString(self.p)
def test_origin_dest(self):
result = _parse_points(self.p[0], self.p[-1])
self.assertEqual([self.p[0], self.p[-1]], result)
def test_origin_dest_waypoints(self):
result = _parse_points(self.p[0], self.p[-1], self.p[1:-1])
self.assertEqual(self.p, result)
def test_line(self):
result = _parse_points(self.line)
self.assertEqual(self.p, result)
def test_points(self):
p0 = Point(self.line.coords[0])
p1 = Point(self.line.coords[-1])
result = _parse_points(p0, p1)
self.assertEqual([self.p[0], self.p[-1]], result)
def test_points_array(self):
p0 = Point(self.p[0])
p1 = Point(self.p[-1])
result = _parse_points([p0, p1])
self.assertEqual([self.p[0], self.p[-1]], result)
def test_mixed_types(self):
origin = 'blah'
destination = Point(self.p[-1])
points = self.p[1:-1]
expected = list(self.p) # Copy it
expected[0] = 'blah'
result = _parse_points(origin, destination, points)
self.assertEqual(expected, result)
def test_no_dest_waypoints(self):
# Can't specify waypoints without destination
with self.assertRaises(ValueError):
_parse_points('origin', waypoints=['p1'])
def test_bad_input(self):
# Test points not length 2
with self.assertRaises(ValueError):
_parse_points(self.p[0], (1.0, 2.0, 3.0))
| bsd-3-clause | Python | |
635682c9d206cd9ae6ea184f9361937b0a272b90 | Add monadic utilities MonadicDict and MonadicDictCursor. | genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2 | wqflask/utility/monads.py | wqflask/utility/monads.py | """Monadic utilities
This module is a collection of monadic utilities for use in
GeneNetwork. It includes:
* MonadicDict - monadic version of the built-in dictionary
* MonadicDictCursor - monadic version of MySQLdb.cursors.DictCursor
that returns a MonadicDict instead of the built-in dictionary
"""
from collections import UserDict
from functools import partial
from MySQLdb.cursors import DictCursor
from pymonad.maybe import Just, Nothing
class MonadicDict(UserDict):
"""
Monadic version of the built-in dictionary.
Keys in this dictionary can be any python object, but values must
be monadic values.
"""
def __init__(self, d, convert=True):
"""
Initialize monadic dictionary.
If convert is False, values in dictionary d must be
monadic. If convert is True, values in dictionary d are
converted to monadic values.
"""
if convert:
super().__init__({key:(Nothing if value is None else Just(value))
for key, value in d.items()})
else:
super().__init__(d)
def __getitem__(self, key):
"""
Get key from dictionary.
If key exists in the dictionary, return a Just value. Else,
return Nothing.
"""
try:
return Just(self.data[key])
except KeyError:
return Nothing
def __setitem__(self, key, value):
"""
Set key in dictionary.
value must be a monadic value---either Nothing or a Just
value. If value is a Just value, set it in the dictionary. If
value is Nothing, do nothing.
"""
value.bind(partial(super().__setitem__, key))
def __delitem__(self, key):
"""
Delete key from dictionary.
If key exists in the dictionary, delete it. Else, do nothing.
"""
try:
super().__delitem__(key)
except KeyError:
pass
class MonadicDictCursor(DictCursor):
"""
Monadic version of MySQLdb.cursors.DictCursor.
Monadic version of MySQLdb.cursors.DictCursor that returns a
MonadicDict instead of the built-in dictionary.
"""
def fetchone(self):
return MonadicDict(super().fetchone())
def fetchmany(self, size=None):
return [MonadicDict(row) for row in super().fetchmany(size=size)]
def fetchall(self):
return [MonadicDict(row) for row in super().fetchall()]
| agpl-3.0 | Python | |
d21e0721b614423e07e81809fb60dd936494bfff | Add validators test | johnwlockwood/txnats | tests/test_validators.py | tests/test_validators.py | import attr
from twisted.trial import unittest
import txnats
from txnats.validators import is_instance_of_nats_protocol
@attr.s
class Foo(object):
protocol = attr.ib(default=None,
validator=attr.validators.optional(
is_instance_of_nats_protocol
)
)
class IsNatsProtocolTest(unittest.TestCase):
maxDiff = None
def test_foo_valid(self):
"""
Ensure Foo.protocol may be a NatsProtocol.
"""
Foo(protocol=txnats.io.NatsProtocol())
def test_foo_none(self):
"""
Ensure Foo.protocol may be None.
"""
Foo()
def test_foo_invalid(self):
"""
Ensure any other value is invalid for Foo.protocol.
"""
with self.assertRaises(ValueError):
Foo(protocol=4)
| mit | Python | |
45f26b56d177798efca4825f063372b505df6a76 | Add gameman test | strata8/savman | tests/gameman_test.py | tests/gameman_test.py | import pytest
from savman import gameman
@pytest.fixture
def dir1(tmpdir):
return tmpdir.mkdir('dir1')
@pytest.fixture
def dir2(tmpdir):
return tmpdir.mkdir('dir2')
@pytest.fixture
def customfile(tmpdir, dir1, dir2):
file = tmpdir.join('custom.txt')
custom = '''
---
name: My Game
directory: {}
include:
- folder1/* # Include all files from folder
exclude:
- '*.png'
---
name: My Game 2
directory: {}
'''.format(str(dir1), str(dir2))
file.write(custom)
return file
def test_load_custom(customfile, dir1, dir2):
gman = gameman.GameMan('DUMMY')
gman.load_custom(str(customfile))
assert 'MyGame' in gman.games
assert 'MyGame2' in gman.games
game1 = gman.games['MyGame']
game2 = gman.games['MyGame2']
assert game1.name == 'My Game'
assert game2.name == 'My Game 2'
assert game1.locations[0].path == str(dir1)
assert game2.locations[0].path == str(dir2)
assert 'folder1/*' in game1.locations[0].include
assert '*.png' in game1.locations[0].exclude | mit | Python | |
1e3781bc3527f72053fdc4aad4f4887c567c457c | Add unicode test. | paul-xxx/micropython,ericsnowcurrently/micropython,MrSurly/micropython-esp32,praemdonck/micropython,Vogtinator/micropython,slzatz/micropython,warner83/micropython,Vogtinator/micropython,Peetz0r/micropython-esp32,cloudformdesign/micropython,ernesto-g/micropython,methoxid/micropystat,pramasoul/micropython,jlillest/micropython,ahotam/micropython,blmorris/micropython,dhylands/micropython,danicampora/micropython,PappaPeppar/micropython,galenhz/micropython,bvernoux/micropython,KISSMonX/micropython,cwyark/micropython,jlillest/micropython,vitiral/micropython,martinribelotta/micropython,heisewangluo/micropython,utopiaprince/micropython,mianos/micropython,redbear/micropython,ChuckM/micropython,vitiral/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython,dhylands/micropython,mgyenik/micropython,TDAbboud/micropython,redbear/micropython,pozetroninc/micropython,xhat/micropython,blmorris/micropython,suda/micropython,AriZuu/micropython,paul-xxx/micropython,torwag/micropython,cwyark/micropython,dxxb/micropython,ChuckM/micropython,tobbad/micropython,ryannathans/micropython,tdautc19841202/micropython,feilongfl/micropython,heisewangluo/micropython,dhylands/micropython,SungEun-Steve-Kim/test-mp,neilh10/micropython,lowRISC/micropython,swegener/micropython,mpalomer/micropython,ericsnowcurrently/micropython,cnoviello/micropython,xyb/micropython,firstval/micropython,pozetroninc/micropython,alex-robbins/micropython,jimkmc/micropython,warner83/micropython,tdautc19841202/micropython,Timmenem/micropython,EcmaXp/micropython,oopy/micropython,deshipu/micropython,cloudformdesign/micropython,alex-march/micropython,praemdonck/micropython,MrSurly/micropython-esp32,turbinenreiter/micropython,tobbad/micropython,MrSurly/micropython,utopiaprince/micropython,dmazzella/micropython,pfalcon/micropython,HenrikSolver/micropython,dinau/micropython,kerneltask/micropython,vitiral/micropython,suda/micropython,ericsnowcurrently/micropython,hosaka/micropython,hiway/micropython,adamkh/micropython,micropython/micropython-esp32,ceramos/micropython,firstval/micropython,noahwilliamsson/micropython,blazewicz/micropython,xuxiaoxin/micropython,jmarcelino/pycom-micropython,kostyll/micropython,pozetroninc/micropython,rubencabrera/micropython,toolmacher/micropython,jlillest/micropython,alex-march/micropython,jlillest/micropython,noahchense/micropython,aethaniel/micropython,tobbad/micropython,aethaniel/micropython,dxxb/micropython,ericsnowcurrently/micropython,swegener/micropython,mhoffma/micropython,Peetz0r/micropython-esp32,martinribelotta/micropython,turbinenreiter/micropython,hiway/micropython,slzatz/micropython,ganshun666/micropython,adafruit/micropython,jimkmc/micropython,cnoviello/micropython,kostyll/micropython,trezor/micropython,heisewangluo/micropython,pfalcon/micropython,ruffy91/micropython,utopiaprince/micropython,lbattraw/micropython,matthewelse/micropython,aethaniel/micropython,xuxiaoxin/micropython,infinnovation/micropython,Vogtinator/micropython,adamkh/micropython,galenhz/micropython,swegener/micropython,adamkh/micropython,galenhz/micropython,tuc-osg/micropython,mhoffma/micropython,dxxb/micropython,Vogtinator/micropython,henriknelson/micropython,MrSurly/micropython-esp32,danicampora/micropython,pfalcon/micropython,lowRISC/micropython,omtinez/micropython,feilongfl/micropython,TDAbboud/micropython,noahchense/micropython,SungEun-Steve-Kim/test-mp,micropython/micropython-esp32,mgyenik/micropython,MrSurly/micropython,stonegithubs/micropython,vitiral/micropython,bvernoux/micropython,utopiaprince/micropython,chrisdearman/micropython,suda/micropython,tdautc19841202/micropython,ruffy91/micropython,neilh10/micropython,emfcamp/micropython,PappaPeppar/micropython,dinau/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,martinribelotta/micropython,SHA2017-badge/micropython-esp32,adafruit/micropython,EcmaXp/micropython,feilongfl/micropython,vriera/micropython,Peetz0r/micropython-esp32,tobbad/micropython,danicampora/micropython,utopiaprince/micropython,aethaniel/micropython,mhoffma/micropython,mgyenik/micropython,hiway/micropython,cnoviello/micropython,pfalcon/micropython,Timmenem/micropython,hosaka/micropython,kostyll/micropython,adafruit/circuitpython,galenhz/micropython,henriknelson/micropython,misterdanb/micropython,ryannathans/micropython,chrisdearman/micropython,lbattraw/micropython,PappaPeppar/micropython,pramasoul/micropython,neilh10/micropython,Peetz0r/micropython-esp32,deshipu/micropython,omtinez/micropython,noahwilliamsson/micropython,ryannathans/micropython,stonegithubs/micropython,selste/micropython,paul-xxx/micropython,cnoviello/micropython,kerneltask/micropython,skybird6672/micropython,ChuckM/micropython,lowRISC/micropython,chrisdearman/micropython,noahchense/micropython,lbattraw/micropython,drrk/micropython,kostyll/micropython,orionrobots/micropython,puuu/micropython,infinnovation/micropython,swegener/micropython,torwag/micropython,noahchense/micropython,rubencabrera/micropython,deshipu/micropython,warner83/micropython,torwag/micropython,SungEun-Steve-Kim/test-mp,TDAbboud/micropython,pozetroninc/micropython,jmarcelino/pycom-micropython,noahwilliamsson/micropython,mgyenik/micropython,slzatz/micropython,mianos/micropython,jimkmc/micropython,SungEun-Steve-Kim/test-mp,KISSMonX/micropython,ceramos/micropython,drrk/micropython,turbinenreiter/micropython,tuc-osg/micropython,hiway/micropython,selste/micropython,ericsnowcurrently/micropython,stonegithubs/micropython,alex-robbins/micropython,pfalcon/micropython,orionrobots/micropython,pramasoul/micropython,adafruit/circuitpython,oopy/micropython,Timmenem/micropython,infinnovation/micropython,omtinez/micropython,redbear/micropython,alex-robbins/micropython,EcmaXp/micropython,Timmenem/micropython,turbinenreiter/micropython,praemdonck/micropython,bvernoux/micropython,hosaka/micropython,adafruit/micropython,toolmacher/micropython,ernesto-g/micropython,blazewicz/micropython,skybird6672/micropython,cwyark/micropython,AriZuu/micropython,tralamazza/micropython,dmazzella/micropython,dxxb/micropython,xyb/micropython,drrk/micropython,PappaPeppar/micropython,xhat/micropython,heisewangluo/micropython,neilh10/micropython,tuc-osg/micropython,trezor/micropython,vriera/micropython,firstval/micropython,jmarcelino/pycom-micropython,mgyenik/micropython,blmorris/micropython,lowRISC/micropython,bvernoux/micropython,tralamazza/micropython,matthewelse/micropython,ruffy91/micropython,warner83/micropython,martinribelotta/micropython,ryannathans/micropython,trezor/micropython,ganshun666/micropython,xyb/micropython,alex-march/micropython,ernesto-g/micropython,neilh10/micropython,tralamazza/micropython,xhat/micropython,adafruit/circuitpython,vitiral/micropython,lbattraw/micropython,henriknelson/micropython,oopy/micropython,tralamazza/micropython,heisewangluo/micropython,vriera/micropython,galenhz/micropython,stonegithubs/micropython,chrisdearman/micropython,jlillest/micropython,rubencabrera/micropython,matthewelse/micropython,infinnovation/micropython,ceramos/micropython,noahwilliamsson/micropython,matthewelse/micropython,drrk/micropython,hosaka/micropython,MrSurly/micropython-esp32,SHA2017-badge/micropython-esp32,paul-xxx/micropython,ChuckM/micropython,pramasoul/micropython,xuxiaoxin/micropython,toolmacher/micropython,TDAbboud/micropython,danicampora/micropython,ahotam/micropython,oopy/micropython,adafruit/circuitpython,AriZuu/micropython,tuc-osg/micropython,torwag/micropython,TDAbboud/micropython,mpalomer/micropython,adamkh/micropython,toolmacher/micropython,orionrobots/micropython,paul-xxx/micropython,ernesto-g/micropython,selste/micropython,mianos/micropython,misterdanb/micropython,xuxiaoxin/micropython,HenrikSolver/micropython,cloudformdesign/micropython,warner83/micropython,dinau/micropython,selste/micropython,dmazzella/micropython,praemdonck/micropython,selste/micropython,misterdanb/micropython,lowRISC/micropython,cloudformdesign/micropython,stonegithubs/micropython,Peetz0r/micropython-esp32,slzatz/micropython,adamkh/micropython,puuu/micropython,praemdonck/micropython,orionrobots/micropython,kerneltask/micropython,ernesto-g/micropython,feilongfl/micropython,misterdanb/micropython,trezor/micropython,methoxid/micropystat,deshipu/micropython,Timmenem/micropython,noahchense/micropython,skybird6672/micropython,dmazzella/micropython,emfcamp/micropython,redbear/micropython,MrSurly/micropython,feilongfl/micropython,methoxid/micropystat,ruffy91/micropython,toolmacher/micropython,mianos/micropython,ryannathans/micropython,drrk/micropython,xyb/micropython,blazewicz/micropython,xyb/micropython,mpalomer/micropython,EcmaXp/micropython,mhoffma/micropython,mpalomer/micropython,micropython/micropython-esp32,KISSMonX/micropython,HenrikSolver/micropython,deshipu/micropython,mhoffma/micropython,vriera/micropython,hiway/micropython,KISSMonX/micropython,alex-robbins/micropython,AriZuu/micropython,blmorris/micropython,jmarcelino/pycom-micropython,pramasoul/micropython,ChuckM/micropython,vriera/micropython,adafruit/micropython,ganshun666/micropython,AriZuu/micropython,torwag/micropython,infinnovation/micropython,rubencabrera/micropython,henriknelson/micropython,rubencabrera/micropython,methoxid/micropystat,hosaka/micropython,alex-robbins/micropython,jmarcelino/pycom-micropython,puuu/micropython,matthewelse/micropython,danicampora/micropython,xhat/micropython,jimkmc/micropython,cwyark/micropython,KISSMonX/micropython,turbinenreiter/micropython,aethaniel/micropython,dinau/micropython,kostyll/micropython,orionrobots/micropython,cwyark/micropython,suda/micropython,omtinez/micropython,supergis/micropython,SHA2017-badge/micropython-esp32,skybird6672/micropython,firstval/micropython,kerneltask/micropython,bvernoux/micropython,dhylands/micropython,ganshun666/micropython,chrisdearman/micropython,ahotam/micropython,xuxiaoxin/micropython,micropython/micropython-esp32,adafruit/micropython,EcmaXp/micropython,ahotam/micropython,blmorris/micropython,cloudformdesign/micropython,martinribelotta/micropython,omtinez/micropython,trezor/micropython,tobbad/micropython,emfcamp/micropython,jimkmc/micropython,SungEun-Steve-Kim/test-mp,MrSurly/micropython,HenrikSolver/micropython,ceramos/micropython,skybird6672/micropython,suda/micropython,ahotam/micropython,oopy/micropython,tdautc19841202/micropython,dinau/micropython,henriknelson/micropython,emfcamp/micropython,mpalomer/micropython,mianos/micropython,lbattraw/micropython,alex-march/micropython,puuu/micropython,noahwilliamsson/micropython,ruffy91/micropython,PappaPeppar/micropython,micropython/micropython-esp32,ganshun666/micropython,blazewicz/micropython,puuu/micropython,slzatz/micropython,cnoviello/micropython,blazewicz/micropython,kerneltask/micropython,firstval/micropython,redbear/micropython,Vogtinator/micropython,xhat/micropython,tdautc19841202/micropython,misterdanb/micropython,emfcamp/micropython,alex-march/micropython,adafruit/circuitpython,HenrikSolver/micropython,matthewelse/micropython,tuc-osg/micropython,swegener/micropython,supergis/micropython,SHA2017-badge/micropython-esp32,supergis/micropython,methoxid/micropystat,supergis/micropython,dhylands/micropython,supergis/micropython,adafruit/circuitpython,ceramos/micropython,dxxb/micropython | tests/unicode/unicode.py | tests/unicode/unicode.py | # Test a UTF-8 encoded literal
s = "asdf©qwer"
for i in range(len(s)):
print("s[%d]: %s %X"%(i, s[i], ord(s[i])))
# Test all three forms of Unicode escape, and
# all blocks of UTF-8 byte patterns
s = "a\xA9\xFF\u0123\u0800\uFFEE\U0001F44C"
for i in range(-len(s), len(s)):
print("s[%d]: %s %X"%(i, s[i], ord(s[i])))
print("s[:%d]: %d chars, '%s'"%(i, len(s[:i]), s[:i]))
for j in range(i, len(s)):
print("s[%d:%d]: %d chars, '%s'"%(i, j, len(s[i:j]), s[i:j]))
print("s[%d:]: %d chars, '%s'"%(i, len(s[i:]), s[i:]))
# Test UTF-8 encode and decode
enc = s.encode()
print(enc, enc.decode() == s)
| mit | Python | |
40154d7de207df9689ac220cc8966735cb3ed5af | Test asyncio in python 3.6 | theia-log/theia,theia-log/theia | tests/test_asyncio.py | tests/test_asyncio.py | import asyncio
async def routine0(s,n):
print('CRT:',s,':',n)
async def routine(id, n):
print('TEST[%s] %d'%(id,n))
if not n:
return
n -= 1
await routine(id, n)
await routine0(id, n)
loop = asyncio.get_event_loop()
tasks = [
asyncio.ensure_future(routine('a',5)),
asyncio.ensure_future(routine('b',8))]
print('muf')
loop.run_until_complete(asyncio.wait(tasks))
print('puf')
loop.close()
| apache-2.0 | Python | |
c63651a5fba9dd67b345bfb95adef5d6206f5da3 | Add file lock | huangjunwen/tagcache | tagcache/lock.py | tagcache/lock.py | # -*- encoding: utf-8 -*-
import os
import fcntl
class FileLock(object):
def __init__(self, path):
self.path = path
self.fd = None
def acquire(self, write=False, block=True):
if self.fd is not None:
self.release()
try:
# open or create the file
open_flags = os.O_RDWR if write else os.O_RDONLY
open_flags |= os.O_CREAT
self.fd = os.open(self.path, open_flags)
# try to lock the file
lock_flags = fcntl.LOCK_EX if write else fcntl.LOCK_SH
if not block:
lock_flags |= fcntl.LOCK_NB
fcntl.flock(self.fd, lock_flags)
return self.fd
#except (OSError, IOError):
except:
# open file failed or lock failed
if self.fd is not None:
os.close(self.fd)
self.fd = None
return None
def release(self):
if self.fd is None:
return
os.close(self.fd)
self.fd = None
| mit | Python | |
3625646c34fed4c5081e73c175e257ee426a4c37 | Fix reproduce_state | ct-23/home-assistant,rohitranjan1991/home-assistant,philipbl/home-assistant,kennedyshead/home-assistant,GenericStudent/home-assistant,ct-23/home-assistant,fbradyirl/home-assistant,jamespcole/home-assistant,dmeulen/home-assistant,alanbowman/home-assistant,hexxter/home-assistant,deisi/home-assistant,molobrakos/home-assistant,w1ll1am23/home-assistant,adrienbrault/home-assistant,mahendra-r/home-assistant,MartinHjelmare/home-assistant,home-assistant/home-assistant,adrienbrault/home-assistant,Theb-1/home-assistant,Smart-Torvy/torvy-home-assistant,rohitranjan1991/home-assistant,mahendra-r/home-assistant,instantchow/home-assistant,toddeye/home-assistant,jabesq/home-assistant,robjohnson189/home-assistant,ewandor/home-assistant,JshWright/home-assistant,miniconfig/home-assistant,partofthething/home-assistant,nnic/home-assistant,auduny/home-assistant,luxus/home-assistant,srcLurker/home-assistant,nevercast/home-assistant,Duoxilian/home-assistant,jawilson/home-assistant,keerts/home-assistant,hmronline/home-assistant,mezz64/home-assistant,stefan-jonasson/home-assistant,florianholzapfel/home-assistant,aoakeson/home-assistant,deisi/home-assistant,alexmogavero/home-assistant,alexmogavero/home-assistant,bdfoster/blumate,kyvinh/home-assistant,florianholzapfel/home-assistant,mezz64/home-assistant,auduny/home-assistant,Zac-HD/home-assistant,jaharkes/home-assistant,balloob/home-assistant,ct-23/home-assistant,philipbl/home-assistant,keerts/home-assistant,jamespcole/home-assistant,happyleavesaoc/home-assistant,jaharkes/home-assistant,justyns/home-assistant,tchellomello/home-assistant,xifle/home-assistant,JshWright/home-assistant,tboyce1/home-assistant,alexmogavero/home-assistant,bdfoster/blumate,tboyce021/home-assistant,pottzer/home-assistant,nnic/home-assistant,keerts/home-assistant,mikaelboman/home-assistant,alanbowman/home-assistant,eagleamon/home-assistant,nkgilley/home-assistant,coteyr/home-assistant,ma314smith/home-assistant,aronsky/home-assistant,happyleavesaoc/home-assistant,varunr047/homefile,instantchow/home-assistant,mahendra-r/home-assistant,partofthething/home-assistant,morphis/home-assistant,morphis/home-assistant,ma314smith/home-assistant,jawilson/home-assistant,leoc/home-assistant,miniconfig/home-assistant,LinuxChristian/home-assistant,titilambert/home-assistant,qedi-r/home-assistant,Duoxilian/home-assistant,betrisey/home-assistant,molobrakos/home-assistant,instantchow/home-assistant,stefan-jonasson/home-assistant,robjohnson189/home-assistant,miniconfig/home-assistant,oandrew/home-assistant,eagleamon/home-assistant,LinuxChristian/home-assistant,mikaelboman/home-assistant,happyleavesaoc/home-assistant,leppa/home-assistant,Zac-HD/home-assistant,dorant/home-assistant,titilambert/home-assistant,balloob/home-assistant,persandstrom/home-assistant,emilhetty/home-assistant,kyvinh/home-assistant,hmronline/home-assistant,sfam/home-assistant,DavidLP/home-assistant,JshWright/home-assistant,deisi/home-assistant,tomduijf/home-assistant,nkgilley/home-assistant,LinuxChristian/home-assistant,Smart-Torvy/torvy-home-assistant,morphis/home-assistant,nugget/home-assistant,nevercast/home-assistant,tinloaf/home-assistant,robbiet480/home-assistant,open-homeautomation/home-assistant,pschmitt/home-assistant,persandstrom/home-assistant,open-homeautomation/home-assistant,joopert/home-assistant,caiuspb/home-assistant,ma314smith/home-assistant,kyvinh/home-assistant,florianholzapfel/home-assistant,jamespcole/home-assistant,mikaelboman/home-assistant,tchellomello/home-assistant,xifle/home-assistant,turbokongen/home-assistant,hexxter/home-assistant,Danielhiversen/home-assistant,leoc/home-assistant,betrisey/home-assistant,sffjunkie/home-assistant,MartinHjelmare/home-assistant,open-homeautomation/home-assistant,sffjunkie/home-assistant,aequitas/home-assistant,HydrelioxGitHub/home-assistant,robjohnson189/home-assistant,lukas-hetzenecker/home-assistant,nugget/home-assistant,Julian/home-assistant,hmronline/home-assistant,sander76/home-assistant,hexxter/home-assistant,deisi/home-assistant,tinloaf/home-assistant,PetePriority/home-assistant,MungoRae/home-assistant,Theb-1/home-assistant,ma314smith/home-assistant,devdelay/home-assistant,shaftoe/home-assistant,stefan-jonasson/home-assistant,badele/home-assistant,jabesq/home-assistant,xifle/home-assistant,justyns/home-assistant,mKeRix/home-assistant,betrisey/home-assistant,emilhetty/home-assistant,tboyce1/home-assistant,nugget/home-assistant,nnic/home-assistant,soldag/home-assistant,sfam/home-assistant,FreekingDean/home-assistant,toddeye/home-assistant,bdfoster/blumate,HydrelioxGitHub/home-assistant,FreekingDean/home-assistant,Julian/home-assistant,Zyell/home-assistant,jaharkes/home-assistant,mKeRix/home-assistant,leoc/home-assistant,PetePriority/home-assistant,Teagan42/home-assistant,Zyell/home-assistant,betrisey/home-assistant,srcLurker/home-assistant,Julian/home-assistant,rohitranjan1991/home-assistant,dorant/home-assistant,sdague/home-assistant,ct-23/home-assistant,devdelay/home-assistant,JshWright/home-assistant,aoakeson/home-assistant,HydrelioxGitHub/home-assistant,pschmitt/home-assistant,deisi/home-assistant,Theb-1/home-assistant,dmeulen/home-assistant,Smart-Torvy/torvy-home-assistant,nevercast/home-assistant,alexmogavero/home-assistant,luxus/home-assistant,luxus/home-assistant,Duoxilian/home-assistant,Zyell/home-assistant,tboyce1/home-assistant,xifle/home-assistant,mKeRix/home-assistant,MartinHjelmare/home-assistant,varunr047/homefile,LinuxChristian/home-assistant,philipbl/home-assistant,oandrew/home-assistant,varunr047/homefile,keerts/home-assistant,balloob/home-assistant,hmronline/home-assistant,LinuxChristian/home-assistant,sdague/home-assistant,w1ll1am23/home-assistant,MungoRae/home-assistant,Zac-HD/home-assistant,GenericStudent/home-assistant,robbiet480/home-assistant,mikaelboman/home-assistant,Teagan42/home-assistant,leoc/home-assistant,stefan-jonasson/home-assistant,happyleavesaoc/home-assistant,miniconfig/home-assistant,sander76/home-assistant,srcLurker/home-assistant,mKeRix/home-assistant,caiuspb/home-assistant,badele/home-assistant,aequitas/home-assistant,emilhetty/home-assistant,aoakeson/home-assistant,emilhetty/home-assistant,aronsky/home-assistant,oandrew/home-assistant,fbradyirl/home-assistant,tboyce021/home-assistant,florianholzapfel/home-assistant,shaftoe/home-assistant,dmeulen/home-assistant,badele/home-assistant,DavidLP/home-assistant,Smart-Torvy/torvy-home-assistant,ct-23/home-assistant,tinloaf/home-assistant,sffjunkie/home-assistant,postlund/home-assistant,mikaelboman/home-assistant,kennedyshead/home-assistant,DavidLP/home-assistant,Cinntax/home-assistant,jnewland/home-assistant,justyns/home-assistant,oandrew/home-assistant,morphis/home-assistant,jnewland/home-assistant,molobrakos/home-assistant,varunr047/homefile,eagleamon/home-assistant,robjohnson189/home-assistant,ewandor/home-assistant,jnewland/home-assistant,postlund/home-assistant,soldag/home-assistant,jaharkes/home-assistant,Danielhiversen/home-assistant,qedi-r/home-assistant,varunr047/homefile,tomduijf/home-assistant,MungoRae/home-assistant,aequitas/home-assistant,MungoRae/home-assistant,coteyr/home-assistant,open-homeautomation/home-assistant,srcLurker/home-assistant,dmeulen/home-assistant,bdfoster/blumate,pottzer/home-assistant,tomduijf/home-assistant,sffjunkie/home-assistant,persandstrom/home-assistant,hexxter/home-assistant,shaftoe/home-assistant,bdfoster/blumate,alanbowman/home-assistant,auduny/home-assistant,fbradyirl/home-assistant,Cinntax/home-assistant,joopert/home-assistant,MungoRae/home-assistant,tboyce1/home-assistant,philipbl/home-assistant,leppa/home-assistant,eagleamon/home-assistant,caiuspb/home-assistant,home-assistant/home-assistant,ewandor/home-assistant,kyvinh/home-assistant,Duoxilian/home-assistant,sfam/home-assistant,emilhetty/home-assistant,dorant/home-assistant,pottzer/home-assistant,lukas-hetzenecker/home-assistant,hmronline/home-assistant,sffjunkie/home-assistant,shaftoe/home-assistant,devdelay/home-assistant,jabesq/home-assistant,turbokongen/home-assistant,Julian/home-assistant,PetePriority/home-assistant,coteyr/home-assistant,Zac-HD/home-assistant,devdelay/home-assistant | homeassistant/helpers/state.py | homeassistant/helpers/state.py | """
homeassistant.helpers.state
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Helpers that help with state related things.
"""
import logging
from homeassistant.core import State
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_ON, STATE_OFF, SERVICE_TURN_ON, SERVICE_TURN_OFF,
SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PAUSE,
STATE_PLAYING, STATE_PAUSED, ATTR_ENTITY_ID)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods, attribute-defined-outside-init
class TrackStates(object):
"""
Records the time when the with-block is entered. Will add all states
that have changed since the start time to the return list when with-block
is exited.
"""
def __init__(self, hass):
self.hass = hass
self.states = []
def __enter__(self):
self.now = dt_util.utcnow()
return self.states
def __exit__(self, exc_type, exc_value, traceback):
self.states.extend(get_changed_since(self.hass.states.all(), self.now))
def get_changed_since(states, utc_point_in_time):
"""
Returns all states that have been changed since utc_point_in_time.
"""
point_in_time = dt_util.strip_microseconds(utc_point_in_time)
return [state for state in states if state.last_updated >= point_in_time]
def reproduce_state(hass, states, blocking=False):
""" Takes in a state and will try to have the entity reproduce it. """
if isinstance(states, State):
states = [states]
for state in states:
current_state = hass.states.get(state.entity_id)
if current_state is None:
_LOGGER.warning('reproduce_state: Unable to find entity %s',
state.entity_id)
continue
if state.domain == 'media_player' and state.state == STATE_PAUSED:
service = SERVICE_MEDIA_PAUSE
elif state.domain == 'media_player' and state.state == STATE_PLAYING:
service = SERVICE_MEDIA_PLAY
elif state.state == STATE_ON:
service = SERVICE_TURN_ON
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
else:
_LOGGER.warning("reproduce_state: Unable to reproduce state %s",
state)
continue
service_data = dict(state.attributes)
service_data[ATTR_ENTITY_ID] = state.entity_id
hass.services.call(state.domain, service, service_data, blocking)
| """
homeassistant.helpers.state
~~~~~~~~~~~~~~~~~~~~~~~~~~~
Helpers that help with state related things.
"""
import logging
from homeassistant.core import State
import homeassistant.util.dt as dt_util
from homeassistant.const import (
STATE_ON, STATE_OFF, SERVICE_TURN_ON, SERVICE_TURN_OFF,
SERVICE_MEDIA_PLAY, SERVICE_MEDIA_PAUSE,
STATE_PLAYING, STATE_PAUSED, ATTR_ENTITY_ID)
_LOGGER = logging.getLogger(__name__)
# pylint: disable=too-few-public-methods, attribute-defined-outside-init
class TrackStates(object):
"""
Records the time when the with-block is entered. Will add all states
that have changed since the start time to the return list when with-block
is exited.
"""
def __init__(self, hass):
self.hass = hass
self.states = []
def __enter__(self):
self.now = dt_util.utcnow()
return self.states
def __exit__(self, exc_type, exc_value, traceback):
self.states.extend(get_changed_since(self.hass.states.all(), self.now))
def get_changed_since(states, utc_point_in_time):
"""
Returns all states that have been changed since utc_point_in_time.
"""
point_in_time = dt_util.strip_microseconds(utc_point_in_time)
return [state for state in states if state.last_updated >= point_in_time]
def reproduce_state(hass, states, blocking=False):
""" Takes in a state and will try to have the entity reproduce it. """
if isinstance(states, State):
states = [states]
for state in states:
current_state = hass.states.get(state.entity_id)
if current_state is None:
_LOGGER.warning('reproduce_state: Unable to find entity %s',
state.entity_id)
continue
if state.domain == 'media_player' and state == STATE_PAUSED:
service = SERVICE_MEDIA_PAUSE
elif state.domain == 'media_player' and state == STATE_PLAYING:
service = SERVICE_MEDIA_PLAY
elif state.state == STATE_ON:
service = SERVICE_TURN_ON
elif state.state == STATE_OFF:
service = SERVICE_TURN_OFF
else:
_LOGGER.warning("reproduce_state: Unable to reproduce state %s",
state)
continue
service_data = dict(state.attributes)
service_data[ATTR_ENTITY_ID] = state.entity_id
hass.services.call(state.domain, service, service_data, blocking)
| apache-2.0 | Python |
dfe833e811ed7e2a3860555ef75fb9c64c76cc28 | Create test_fitting.py | ProjectPyRhO/PyRhO,ProjectPyRhO/PyRhO | tests/test_fitting.py | tests/test_fitting.py | import numpy as np
from pyrho import Parameters, fitModels
from pyrho.datasets import loadChR2
def test_fit_3_state_model():
init_params = Parameters()
init_params.add_many(
# Name Value Vary Min Max Expr
('g0', 1e5, True, 0.001, 1e6, None),
('phi_m',1e18, True, 1e15, 1e19, None),
('k_a', 5, True, 0.001, 1000, None),
('k_r', 0.1, True, 0.001, 1000, None),
('p', 0.8, True, 0.1, 5, None),
('q', 0.25, True, 0.1, 5, None),
('Gd', 0.1, True, 0.0001, 1, None),
('Gr0', 0.0002, True, 0.0001, 0.1, None),
('E', 0, True, -1000, 1000, None),
('v0', 43, True, -1e15, 1e15, None),
('v1', 17.1, True, -1e15, 1e15, None))
data = loadChR2()
fit_params, mini_objs = fitModels(data, nStates=3, params=init_params, postFitOpt=True, relaxFact=2)
values = fit_params[0].valuesdict()
# print(values, flush=True)
# OrderedDict([('g0', 28551.460430437444), ('phi_m', 7.45862659417406e+17), ('k_a', 6.622531560387775), ('k_r', 0.08504416795822778),
# ('p', 0.8377064000308175), ('q', 0.27409731460831976), ('Gd', 0.05918146172064458), ('Gr0', 0.0002), ('E', 0), ('v0', 43), ('v1', 17.1)])
assert np.isclose(values["g0"], 28551.460430437444)
assert np.isclose(values["phi_m"], 7.45862659417406e+17)
assert np.isclose(values["k_a"], 6.622531560387775)
assert np.isclose(values["k_r"], 0.08504416795822778)
assert np.isclose(values["p"], 0.8377064000308175)
assert np.isclose(values["q"], 0.27409731460831976)
assert np.isclose(values["Gd"], 0.05918146172064458)
assert np.isclose(values["Gr0"], 0.0002)
assert np.isclose(values["E"], 0)
assert np.isclose(values["v0"], 43)
assert np.isclose(values["v1"], 17.1)
| bsd-3-clause | Python | |
1fe84191c0f67af445e0b140efe67e90ae1e4c6f | Use set instead of ordered dict. | Sportamore/blues,Sportamore/blues,5monkeys/blues,Sportamore/blues,5monkeys/blues,5monkeys/blues | blues/slack.py | blues/slack.py | """
Slack Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
settings:
slack:
# Single config:
endpoint: https://hooks.slack.com/... # (Required)
channels: # (Required)
- "#deploy"
username: deploybot
icon_emoji: ":rocket"
# Multiple configs:
# - endpoint: https://hooks.slack.com/... # (Required)
# channels: # (Required)
# - "#deploy"
# username: deploybot
# icon_emoji: ":rocket"
# - ...
"""
from fabric.utils import warn
from refabric.contrib import blueprints
import urllib2
import json
blueprint = blueprints.get(__name__)
def notify(msg, quiet=False):
slack_config = blueprint.get('')
if isinstance(slack_config, dict):
slack_config = [slack_config]
for config in slack_config:
notify_with_config(msg, config, quiet)
def notify_with_config(msg, config, quiet):
channels = config.get('channels', [])
channel = config.get('channel', None)
# If channel is specified, add it to channels, and then run it through an
# OrderedDict, removing any duplicates.
if channel:
channels.append(channel)
if not channels:
warn('Empty slack channel list, skipping notification')
return False
username = config.get('username', 'deploybot')
icon_emoji = config.get('icon_emoji', ':rocket:')
endpoint = config.get('endpoint')
if not endpoint:
warn('No slack API endpoint found, skipping notification')
return False
for channel in set(channels):
send_request(endpoint=endpoint, channel=channel, username=username,
msg=msg, icon_emoji=icon_emoji, quiet=quiet)
def send_request(endpoint, channel, username, msg, icon_emoji, quiet=False):
data = json.dumps({
"channel": channel,
"username": username,
"text": msg,
"icon_emoji": icon_emoji,
})
req = urllib2.Request(endpoint, data, {'Content-Type': 'application/json'})
try:
urllib2.urlopen(req).close()
except urllib2.HTTPError as e:
if quiet:
warn(e)
else:
raise
| """
Slack Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
settings:
slack:
# Single config:
endpoint: https://hooks.slack.com/... # (Required)
channels: # (Required)
- "#deploy"
username: deploybot
icon_emoji: ":rocket"
# Multiple configs:
# - endpoint: https://hooks.slack.com/... # (Required)
# channels: # (Required)
# - "#deploy"
# username: deploybot
# icon_emoji: ":rocket"
# - ...
"""
from fabric.utils import warn
from refabric.contrib import blueprints
from collections import OrderedDict
import urllib2
import json
blueprint = blueprints.get(__name__)
def notify(msg, quiet=False):
slack_config = blueprint.get('')
if isinstance(slack_config, dict):
slack_config = [slack_config]
for config in slack_config:
notify_with_config(msg, config, quiet)
def notify_with_config(msg, config, quiet):
channels = config.get('channels', [])
channel = config.get('channel', None)
# If channel is specified, add it to channels, and then run it through an
# OrderedDict, removing any duplicates.
if channel:
channels.append(channel)
channels = list(OrderedDict.fromkeys(channels))
if not channels:
warn('Empty slack channel list, skipping notification')
return False
username = config.get('username', 'deploybot')
icon_emoji = config.get('icon_emoji', ':rocket:')
endpoint = config.get('endpoint')
if not endpoint:
warn('No slack API endpoint found, skipping notification')
return False
for channel in set(channels):
send_request(endpoint=endpoint, channel=channel, username=username,
msg=msg, icon_emoji=icon_emoji, quiet=quiet)
def send_request(endpoint, channel, username, msg, icon_emoji, quiet=False):
data = json.dumps({
"channel": channel,
"username": username,
"text": msg,
"icon_emoji": icon_emoji,
})
req = urllib2.Request(endpoint, data, {'Content-Type': 'application/json'})
try:
urllib2.urlopen(req).close()
except urllib2.HTTPError as e:
if quiet:
warn(e)
else:
raise
| mit | Python |
7963e426e2d1f58105d8712c0379114d93d32b07 | Add example with sklearn pipeline | lmcinnes/umap,lmcinnes/umap | examples/plot_feature_extraction_classification.py | examples/plot_feature_extraction_classification.py | """
UMAP as a Feature Extraction Technique for Classification
---------------------------------------------------------
The following script shows how UMAP can be used as a feature extraction
technique to improve the accuracy on a classification task. It also shows
how UMAP can be integrated in standard scikit-learn pipelines.
The first step is to create a dataset for a classification task, which is
performed with the function ``sklearn.datasets.make_classification``. The
dataset is then split into a training set and a test set using the
``sklearn.model_selection.train_test_split`` function.
Second, a linear SVM is fitted on the training set. To choose the best
hyperparameters automatically, a gridsearch is performed on the training set.
The performance of the model is then evaluated on the test set with the
accuracy metric.
Third, the previous step is repeated with a slight modification: UMAP is
used as a feature extraction technique. This small change results in a
substantial improvement compared to the model where raw data is used.
"""
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split, GridSearchCV
from sklearn.pipeline import Pipeline
from sklearn.svm import LinearSVC
from umap import UMAP
# Make a toy dataset
X, y = make_classification(n_samples=1000, n_features=300, n_informative=250,
n_redundant=0, n_repeated=0, n_classes=2,
random_state=1212)
# Split the dataset into a training set and a test set
X_train, X_test, y_train, y_test = train_test_split(
X, y, test_size=0.2, random_state=42)
# Classification with a linear SVM
svc = LinearSVC(dual=False, random_state=123)
params_grid = {"C": [10**k for k in range(-3, 4)]}
clf = GridSearchCV(svc, params_grid)
clf.fit(X_train, y_train)
print("Accuracy on the test set with raw data: {:.3f}".format(
clf.score(X_test, y_test)))
# Transformation with UMAP followed by classification with a linear SVM
umap = UMAP(random_state=456)
pipeline = Pipeline([("umap", umap),
("svc", svc)])
params_grid_pipeline = {"umap__n_neighbors": [5, 20],
"umap__n_components": [15, 25, 50],
"svc__C": [10**k for k in range(-3, 4)]}
clf_pipeline = GridSearchCV(pipeline, params_grid_pipeline)
clf_pipeline.fit(X_train, y_train)
print("Accuracy on the test set with UMAP transformation: {:.3f}".format(
clf_pipeline.score(X_test, y_test)))
| bsd-3-clause | Python | |
dbaf32db0f9a5c00731e2682dd171e00914d29f0 | implement AQOUT mean drawdown standard error calculation | Timothy-W-Hilton/TimPyUtils | timutils/std_error.py | timutils/std_error.py | """module to implement standard error calculation, with optional
effective sample size adjustment of Wilks (1995)
REFERENCES
Wilks, D. (1995), Statistical Methods in the Atmospheric Sciences: An
Introduction, Academic Press, New York.
"""
import numpy as np
import pandas as pd
class MeanStdError(object):
"""class to implement standard error calculation on a numpy array
"""
def __init__(self, arr):
"""Class constructor.
arr (array-like): array of numeric values to calculate
standard error of mean
"""
self.arr = arr
self.std_err = None
self.neff = None
self.std_err_neff = None
def calc(self, dim=0):
"""calculate the standard error of the mean
Calculate the standard error of the mean along a specified
dimension of self.arr using equation (1) of Efron and Tibshirani
(1991). The standard error is placed in field std_err, the
"effective sample size" standard error is placed in field
std_err_neff, and the effective sample size is placed in field
neff. neff is calculated according to Wilks (1995).
ARGS:
dim (int): the dimension of self.arr along which the mean, standard
error should be calculated
adjust_n (boolean): if True, use the "effective sample size" of
Wilks (1995) to account for autocorrelation in the data.
REFERENCES:
Efron, B. & Tibshirani, R., 1991. Statistical Data Analysis
in the Computer Age. Science, 253, 5018, p 390- 395.
Wilks, D., 1995 Statistical Methods in the Atmospheric
Sciences: An Introduction. Academic Press, New York
"""
self.neff = calc_neff(self.arr, dim=dim)
self.std_err = calc_std_err(self.arr, dim=dim)
self.std_err_neff = calc_std_err(self.arr, dim=dim, n=self.neff)
def calc_std_err(arr, dim=0, n=None):
"""calculate standard error of the mean
Calculates standard error of the mean for an array along a
specified axis. Uses the definition given by Efron and Tibshirani
(1991) equation 1.
ARGS:
arr (array-like): numpy array of numeric values
dim (int): the dimension of arr along which the effective sample
size should be calculated. Default is 0.
n (array-like): value of n to be used for each element of arr
along axis dim. Default is the number of elements along axis
dim of arr (i.e. arr.shape[dim]). That is, the default is to
treat each element of arr as an independent data point. A
different value of n may be useful to, for example, account for
autocorrelation in arr (see function calc_autocorr).
RETURNS:
A numpy array containing standard error of the mean along axis dim
of arr.
REFERENCES
Efron, B. & Tibshirani, R., 1991. Statistical Data Analysis
in the Computer Age. Science, 253, 5018, p 390- 395.
"""
if n is None:
n = arr.shape[dim]
x_bar = np.mean(arr, axis=dim).squeeze()
# calculate standard error according to Efron & Tibshirani (1991) eq 1.
se_x = np.sqrt(np.sum(arr - x_bar, dim) / (n * (n - 1)))
return se_x
def calc_neff(arr, dim=0):
"""calculate effective sample size accounting for lag-1 autocorrelation
Effective sample size is calculated according to Wilks (1995). If
arr is multi-dimensional it is flattened first
ARGS:
arr (array-like): numpy array of numeric values
dim (int): the dimension of arr along which the effective sample
size should be calculated. Default is 0.
RETURNS:
effective sample size accounting for lag-1 autocorrelation
REFERENCES:
Wilks, D., 1995 Statistical Methods in the Atmospheric
Sciences: An Introduction. Academic Press, New York
"""
rho = calc_autocorr(arr, dim, lag=1)
n = arr.shape[dim]
neff = n * ((1 - rho) / (1 + rho))
return neff
def calc_autocorr(arr, dim=0, lag=1):
"""calculate lag-1 autocorrelation along an array dimension
ARGS:
arr (array-like): numpy array of numeric values
dim (int): the dimension of arr along which the autocorrelation
should be calculated. Default is 0.
lag (int): lag to use for autocorrelation calculation.
RETURNS:
effective sample size accounting for lag-1 autocorrelation
"""
rho = np.apply_along_axis(_autocorr_helper, axis=dim, arr=arr, lag=lag)
return rho
def _autocorr_helper(arr1d, lag):
"""helper function for calc_autocorr. Gets around the synactic
problem of calling a method (autocorr) on the result of
np.apply_along_axis.
ARGS:
arr (array-like): one-dimensional numpy array of numeric values
lag (int): lag to use for autocorrelation calculation.
RETURNS:
A numpy array containing the lag-(lag) autocorrelation of arr1d
"""
return pd.Series(arr1d).autocorr(lag=lag)
| mit | Python | |
a6bbcdd9a28b4ad3ebc5319ab849bd9116b2f0c6 | Create 7kyu_how_many_points.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/7kyu/7kyu_how_many_points.py | Solutions/7kyu/7kyu_how_many_points.py | def get_los_angeles_points(results):
return sum(int(j.split(':')[0]) for i,j in results if __import__('re').fullmatch('Los\sAngeles\s[a-zA-Z]+$', i))
| mit | Python | |
7efa9d9e9c98fc15233cb9fea81ae13520c5e52d | Add example print queue manager | CON-In-A-Box/CIAB-SignIn,CON-In-A-Box/CIAB-SignIn,CON-In-A-Box/ConComSignIn,CON-In-A-Box/ConComSignIn,CON-In-A-Box/CIAB-SignIn,CON-In-A-Box/CIAB-SignIn,CON-In-A-Box/ConComSignIn,CON-In-A-Box/ConComSignIn | tools/queuemanager.py | tools/queuemanager.py | #!/usr/bin/env python3
"""
Manage the printing queue for local printers
"""
import argparse
import getpass
import time
import json
import requests
CLIENT = "ciab"
def connect(server, account, password):
''' Connect to a server '''
url = server+"/api/token"
param = {'grant_type':'password', 'username':account,
'password':password, 'client_id': CLIENT}
session = requests.post(url, data=param)
access = session.json()['access_token']
refresh = session.json()['refresh_token']
return (access, refresh)
def renew(server, token):
''' renew a token '''
url = server+"/api/token"
param = {'grant_type' :'refresh_token',
'refresh_token': token,
'client_id' : CLIENT}
session = requests.post(url, data=param)
access = session.json()['access_token']
refresh = session.json()['refresh_token']
return (access, refresh)
def get_queue(server, access):
''' Get the current print queue '''
data = requests.get(server + "/api/registration/ticket/printqueue",
headers={'Authorization': 'Bearer ' + access},
data={})
output = data.json()
return output
def claim_and_print(ticket, access):
''' Claim and ticket and print it '''
url = ticket['claim']['href'] + '?include=ticketType,member,event'
print(url)
data = requests.put(url,
headers={'Authorization': 'Bearer ' + access},
data={})
output = data.json()
####################
## PRINTING WORK HERE
####################
#
print("TODO: DO THE WORK HERE")
print(json.dumps(output, indent=4, sort_keys=True))
#
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description='This manages the print queue for badges in CIAB')
parser.add_argument('--admin', action="store", dest="email",
help='User with admin privilages to the print queue.')
parser.add_argument('--password', action="store", dest="password",
help='Password for the admin_user account.')
parser.add_argument('--server', action="store", dest="server",
help='Server for CIAB in <server>:<port> format.')
ARGS = parser.parse_args()
if ARGS.server is None:
ARGS.server = str(input("CIAB Server: "))
if 'http' not in ARGS.server:
ARGS.server = 'https://' + ARGS.server
if ARGS.email is None:
ARGS.email= str(input("Administrator email: "))
if ARGS.password is None:
ARGS.password = getpass.getpass("Administrator password: ")
ACCESS, REFRESH = connect(ARGS.server, ARGS.email, ARGS.password)
print ("<-- starting up -->")
while True:
queue = get_queue(ARGS.server, ACCESS)
if 'error' in queue:
print ("<-- token expired renewing -->")
if queue['error'] == 'invalid_token':
ACCESS, REFRESH = renew(ARGS.server, REFRESH)
if 'data' in queue and len(queue['data']) > 0:
claim_and_print(queue['data'][0], ACCESS)
time.sleep(0.1)
else:
time.sleep(1)
| apache-2.0 | Python | |
824623c9f836c1591d89f7292fc1f406a1af189a | add a stub test for job manipulation | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint | test/jobstest.py | test/jobstest.py | #!/usr/bin/python2.4
#
# Copyright (c) 2004-2005 rpath, Inc.
#
import testsuite
testsuite.setup()
import rephelp
class ReleaseTest(rephelp.RepositoryHelper):
def testBasicAttributes(self):
client = self.getMintClient("testuser", "testpass")
projectId = client.newProject("Foo", "foo", "rpath.org")
release = client.newRelease(projectId, "Test Release")
job = client.startImageJob(release.getId())
jobs = list(client.iterJobs(releaseId = release.getId()))
assert(jobs[0].getReleaseId() == release.getId())
if __name__ == "__main__":
testsuite.main()
| apache-2.0 | Python | |
af205246543fbb874ebf20b530fac04a3ba9808c | Add some notes to graph script | jay-tyler/data-structures,jonathanstallings/data-structures | graph.py | graph.py | from __future__ import unicode_literals
class Graph(object):
"""A class for a simple graph data structure."""
def __init__(self):
self.graph = {}
def __repr__(self): # Consider how we want to repr this.
return repr(self.graph)
def __len__(self):
return len(self.graph)
def __iter__(self):
return iter(self.graph)
def __getitem__(self, index):
return self.graph[index]
def __setitem__(self, index, value):
self.graph[index] = value
def __delitem__(self, index): # Add cleanup
del self.graph[index]
def add_node(self, n):
"""Add a new node to the graph."""
if not self.has_node(n):
raise KeyError('Node already in graph.')
self[n] = set()
def add_edge(self, n1, n2):
"""Add a new edge connecting n1 to n2."""
if not self.has_node(n2):
self.add_node(n2)
try:
self[n1].add(n2)
except KeyError:
self.add_node(n1)
self[n1].add(n2)
def del_node(self, n):
"""Delete a node from the graph."""
del self[n]
for edgeset in self.graph.values(): # Move cleanup to __delitem__
edgeset.discard(n)
def del_edge(self, n1, n2):
"""Delete the edge connecting two nodes from graph."""
self[n1].remove(n2)
def has_node(self, n):
"""Check if a given node is in the graph."""
return n in self
def nodes(self):
"""Return a list of all nodes in the graph."""
return [node for node in self]
def iter_edges(self):
for node in self:
for edge in self[node]:
yield (node, edge)
def edges(self):
return list(self.iter_edges())
def iter_neighbors(self, n):
for node in self:
if n in self[node]:
yield node
def neighbors(self, n):
return self[n]
def adjacent(self, n1, n2):
"""Check if there is an edge connecting 'n1' and 'n2'."""
return n2 in self[n1] or n1 in self[n2]
# helper start conditions for testing
def helper():
g = Graph()
g.add_node(5)
g.add_node(10)
g.add_node(20)
g.add_edge(10, 5)
g.add_edge(10, 20)
g.add_edge(5, 10)
return g
| mit | Python | |
5d554573031f2f7b60d963c587aa650a025f6c45 | Create tutorial3.py | tiggerntatie/ggame-tutorials | tutorial3.py | tutorial3.py | """
tutorial3.py
by E. Dennison
"""
from ggame import App, RectangleAsset, ImageAsset, SoundAsset, Sprite, Sound
from ggame import LineStyle, Color
SCREEN_WIDTH = 640
SCREEN_HEIGHT = 480
green = Color(0x00ff00, 1)
black = Color(0, 1)
noline = LineStyle(0, black)
bg_asset = RectangleAsset(SCREEN_WIDTH, SCREEN_HEIGHT, noline, green)
bg = Sprite(bg_asset, (0,0))
# Sounds
pew1_asset = SoundAsset("sounds/pew1.mp3")
pew1 = Sound(pew1_asset)
pop_asset = SoundAsset("sounds/reappear.mp3")
pop = Sound(pop_asset)
# A ball! This is already in the ggame-tutorials repository
ball_asset = ImageAsset("images/orb-150545_640.png")
ball = Sprite(ball_asset, (0, 0))
# Original image is too big. Scale it to 1/10 its original size
ball.scale = 0.1
ball.y = 200
# custom attributes
ball.dir = 1
ball.go = True
# Sounds
pew1_asset = SoundAsset("sounds/pew1.mp3")
pew1 = Sound(pew1_asset)
pop_asset = SoundAsset("sounds/reappear.mp3")
pop = Sound(pop_asset)
def reverse(b):
b.dir *= -1
pop.play()
# Set up function for handling screen refresh
def step():
if ball.go:
ball.x += ball.dir
if ball.x + ball.width > SCREEN_WIDTH or ball.x < 0:
ball.x -= ball.dir
reverse(ball)
# Handle the space key
def spaceKey(event):
ball.go = not ball.go
# Handle the "reverse" key
def reverseKey(event):
reverse(ball)
# Handle the mouse click
def mouseClick(event):
ball.x = event.x
ball.y = event.y
pew1.play()
myapp = App(SCREEN_WIDTH, SCREEN_HEIGHT)
# Set up event handlers for the app
myapp.listenKeyEvent('keydown', 'space', spaceKey)
myapp.listenKeyEvent('keydown', 'r', reverseKey)
myapp.listenMouseEvent('click', mouseClick)
myapp.run(step)
| mit | Python | |
bcaa60ce73134e80e11e7df709e7ba7dbc07d349 | Add tests for systemd module | justin8/portinus,justin8/portinus | tests/test_systemd.py | tests/test_systemd.py | #!/usr/bin/env python3
import unittest
from unittest import mock
from unittest.mock import MagicMock, patch
from portinus import systemd
class testSystemd(unittest.TestCase):
def setUp(self):
systemd.subprocess.check_output = MagicMock(return_value=True)
self.unit = systemd.Unit('foo')
def testBasicCalls(self):
self.unit.reload()
self.unit.restart()
self.unit.stop()
self.unit.enable()
self.unit.disable()
def testRemove(self):
with patch('os.path.exists', MagicMock(return_value=True)) as fake_path_exists, \
patch.object(systemd.Unit, 'stop') as fake_stop, \
patch.object(systemd.Unit, 'disable') as fake_disable, \
patch('os.remove') as fake_os_remove, \
patch.object(systemd.Unit, 'reload') as fake_reload:
self.unit.remove()
fake_path_exists.assert_called_with(self.unit.service_file_path)
self.assertTrue(fake_stop.called)
self.assertTrue(fake_disable.called)
fake_os_remove.assert_called_with(self.unit.service_file_path)
self.assertTrue(fake_reload.called)
with patch('os.path.exists', MagicMock(return_value=False)) as fake_path_exists, \
patch.object(systemd.Unit, 'stop') as fake_stop, \
patch.object(systemd.Unit, 'disable') as fake_disable, \
patch('os.remove') as fake_os_remove, \
patch.object(systemd.Unit, 'reload') as fake_reload:
self.unit.remove()
fake_path_exists.assert_called_with(self.unit.service_file_path)
self.assertFalse(fake_stop.called)
self.assertFalse(fake_disable.called)
fake_os_remove.assert_called_with(self.unit.service_file_path)
self.assertTrue(fake_reload.called)
def testCreateServiceFile(self):
with patch('builtins.open', mock.mock_open(), create=True) as fake_open:
self.unit.create_service_file()
fake_open.assert_called_once_with(self.unit.service_file_path, 'w')
@patch.object(systemd.Unit, 'set_content')
@patch.object(systemd.Unit, 'create_service_file')
def testEnsure(self, fake_create_service_file, fake_set_content):
test_content = 'qweasdzxc'
self.unit.ensure()
self.assertFalse(fake_set_content.called)
self.assertTrue(fake_create_service_file.called)
self.unit.ensure(content='qwe')
self.assertTrue(fake_set_content.called)
self.assertTrue(fake_create_service_file.called)
| mit | Python | |
0c3107739671398de1a206cfbb7673c25c543e60 | Update driver value in Seat model. | SRJ9/django-driver27,SRJ9/django-driver27,SRJ9/django-driver27 | driver27/migrations/0009_populate_driver_in_seats.py | driver27/migrations/0009_populate_driver_in_seats.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
def populate_driver_in_seats(apps, schema_editor):
Seat = apps.get_model("driver27", "Seat")
for seat in Seat.objects.all():
driver = seat.contender.driver
seat.driver = driver
seat.save()
class Migration(migrations.Migration):
dependencies = [
('driver27', '0008_auto_20170529_2220'),
]
operations = [
migrations.RunPython(
populate_driver_in_seats,
),
] | mit | Python | |
ba239762d34db9e13708d4e0828e2f8adba4e8bc | add sensapex accuracy test script | campagnola/acq4,acq4/acq4,pbmanis/acq4,acq4/acq4,campagnola/acq4,campagnola/acq4,acq4/acq4,pbmanis/acq4,campagnola/acq4,pbmanis/acq4,pbmanis/acq4,acq4/acq4 | acq4/drivers/sensapex/accuracy_test.py | acq4/drivers/sensapex/accuracy_test.py |
from __future__ import print_function
import os, sys, time
import numpy as np
import acq4.pyqtgraph as pg
from acq4.drivers.sensapex import SensapexDevice, UMP, UMPError
ump = UMP.get_ump()
devids = ump.list_devices()
devs = {i:SensapexDevice(i) for i in devids}
print("SDK version:", ump.sdk_version())
print("Found device IDs:", devids)
dev = devs[int(sys.argv[1])]
app = pg.mkQApp()
win = pg.GraphicsLayoutWidget()
win.show()
plots = [
win.addPlot(labels={'left': ('x position', 'm'), 'bottom': ('time', 's')}),
win.addPlot(labels={'left': ('y position', 'm'), 'bottom': ('time', 's')}),
win.addPlot(labels={'left': ('z position', 'm'), 'bottom': ('time', 's')}),
]
plots[1].setYLink(plots[0])
plots[2].setYLink(plots[0])
plots[1].setXLink(plots[0])
plots[2].setXLink(plots[0])
win.nextRow()
errplots = [
win.addPlot(labels={'left': ('x error', 'm'), 'bottom': ('time', 's')}),
win.addPlot(labels={'left': ('y error', 'm'), 'bottom': ('time', 's')}),
win.addPlot(labels={'left': ('z error', 'm'), 'bottom': ('time', 's')}),
]
errplots[1].setYLink(errplots[0])
errplots[2].setYLink(errplots[0])
errplots[0].setXLink(plots[0])
errplots[1].setXLink(plots[0])
errplots[2].setXLink(plots[0])
start = time.time()
pos = [[], [], []]
tgt = [[], [], []]
err = [[], [], []]
times = []
def update(update_error=False):
p = dev.get_pos()
now = time.time() - start
times.append(now)
for i in range(3):
pos[i].append((p[i] - p1[i]) * 1e-9)
tgt[i].append((target[i] - p1[i]) * 1e-9)
plots[i].plot(times, tgt[i], pen='r', clear=True)
plots[i].plot(times, pos[i])
if update_error:
err[i].append(pos[i][-1] - tgt[i][-1])
errplots[i].plot(times, err[i], clear=True, connect='finite')
else:
err[i].append(np.nan)
app.processEvents()
p1 = dev.get_pos()
diffs = []
errs = []
targets = []
positions = []
moves = []
for i in range(10):
d = (np.random.random(size=3) * 1e6).astype(int)
#d[0] = 0
#d[1] *= 0.01
#d[2] *= 0.01
moves.append(d)
target = p1 + d
targets.append(target)
dev.goto_pos(target, speed=1000, linear=True)
while dev.is_busy():
update()
for i in range(10):
update(update_error=True)
time.sleep(0.05)
p2 = dev.get_pos(timeout=200)
positions.append(p2)
diff = (p2 - target) * 1e-9
diffs.append(diff)
errs.append(np.linalg.norm(diff))
print(diff, errs[-1])
dev.goto_pos(p1, 1000)
print("mean:", np.mean(errs), " max:", np.max(errs))
# plt = pg.plot(labels={'left': ('error', 'm'), 'bottom': 'trial'})
# plt.plot([abs(e[0]) for e in diffs], pen=None, symbol='o', symbolBrush='r')
# plt.plot([abs(e[1]) for e in diffs], pen=None, symbol='o', symbolBrush='g')
# plt.plot([abs(e[2]) for e in diffs], pen=None, symbol='o', symbolBrush='b')
| mit | Python | |
3b4c811f0b45f5739ce7c0d64f31eb2c2c9a7f4b | add battery | EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,zeyuanxy/hacker-rank | ai/machine-learning/battery/battery.py | ai/machine-learning/battery/battery.py | from numpy import *
def loadData():
xArr, yArr = [], []
for i in open('trainingdata.txt'):
line = map(float, i.split(','))
if line[0] < 4:
xArr.append(line[:-1])
yArr.append(line[-1])
return xArr, yArr
def lineReg(xArr, yArr):
xMat = mat(xArr); yMat = mat(yArr).T
xTx = xMat.T * xMat
if linalg.det(xTx) == 0.0:
print "this matrix is singular, cannot do inverse"
return
w = xTx.I * (xMat.T * yMat)
return w
def main():
xArr, yArr = loadData()
w = lineReg(xArr, yArr)
x = float(raw_input())
if x < 4:
xxArr = [x]
yHat = (mat(xxArr) * w)[0][0]
print '%.2f' % yHat
else:
print 8.00
if __name__ == "__main__":
main()
| mit | Python | |
023e587e28e148be3a21d4cb34a702a68ef02a0b | test script to list root dirs of image files | yellcorp/floppy-recovery | testdir.py | testdir.py | #!/usr/local/bin/python
import sys
import disklib.mediageom
import disklib.validity
import msfat.dir
import msfat.volume
def main():
prog_errs = [ ]
for path in sys.argv[1:]:
print path
try:
validity = disklib.validity.read_validity_for_file(path)
with open(path, "rb") as stream:
geometry = disklib.mediageom.DiskGeometry.from_image_size(validity.domain)
volume = msfat.volume.FATVolume(stream, geometry)
for k, v in volume.get_info()._asdict().iteritems():
if isinstance(v, (int, long)):
sv = "0x{0:08X}".format(v)
else:
sv = repr(v)
print "{0:24} {1}".format(k, sv)
for entry in msfat.dir.read_dir(volume._open_root_dir()):
print str(entry)
except Exception as e:
prog_errs.append((path, e))
print ""
if prog_errs:
print "Program errors ({0}):".format(len(prog_errs))
for path, e in prog_errs:
print u"{0}: {1!s}".format(path, e)
if __name__ == '__main__':
main()
| mit | Python | |
87b2d15f1953d75c1a55259370f36f5ea4d3fea9 | Create testing.py | joshgrib/course-scheduler,joshgrib/course-scheduler | testing.py | testing.py | '''
Maybe an easier format?
http://web.stevens.edu/scheduler/cor 2015F/sched_plus_crsemtg.txt
Got there by going back to the "core" part of the url then going to that text file (plus course meeting?)
'''
courses = {\
'BT 353A' : ["M","1300","1350"] , \
'BT 353A' : ["W","1100","1240"] , \
'BT 353B' : ["M","1500","1640"] , \
'BT 353B' : ["W","0900","0950"] , \
'BT 353C' : ["T","1500","1640"] , \
'BT 353C' : ["R","1100","1150"] , \
'BT 353D' : ["T","1500","1640"] , \
'BT 353D' : ["R","1100","1150"] , \
'BT 353E' : ["M","1815","2045"] , \
'CS 115A' : ["MWF","1200","1250"] , \
'CS 115B' : ["MRF","1300","1350"] , \
'CS 115LA' : ["R","0900","1040"] , \
'CS 115LB' : ["R","1100","1240"] , \
'CS 115LC' : ["R","1500","1640"] , \
'CS 115LD' : ["R","1500","1640"] , \
'CS 115LE' : ["F","1000","1140"] , \
'CS 115LF' : ["F","1600","1740"] , \
'CS 135A' : ["MWF","1000","1050"] , \
'CS 135LA' : ["F","1100","1240"] , \
'CS 135LB' : ["F","1300","1440"] , \
'CS 146A' : ["TWF","0900","0950"] , \
'CS 146B' : ["MTR","1400","1450"] , \
'D 110A' : ["T","1700","1805"] , \
'HHS 468EV' : ["M","1815","2045"] }
uniqueCourses = []
for x in courses:
#print x + " String length:" + str(len(x))
#print "Course number: " + x[4:9]
#print ""
y = x[4:7]
if (uniqueCourses.count(y) == 0):
uniqueCourses.append(y)
print uniqueCourses
'''
String idexes 4-6 have the course numbers
432 possibilities here, low enough that blunt-force should work fine
Start with classes where there's only one section, if theres a conflict there, stop and say the schedule isn't possibilities
Then go to the next ones with 2 sections, then 3, etc
If Any of those conflict with something with 1 section, remove it as a possibility and continue
5 lists, on for every day of the week.
There isn't a conflict as long as end1<start2 or end2<start1
isAllowed():
|----------------| interval 1
|----------| end2<start1 True
|-------| end1<start2 True
|------------------| end2 !< start1 False
|---------------| end1 !< start2 False
|-------------------------| end2 !< start1 False
& end1 !< start2 False
|--------| end2 !< start1
& end1 !< start2 False
'''
#simple comparison for two classes in the format above - only compares for one day right now
def isAllowed(classList1, classList2):
if (classList2[2] < classList1[1]) or (classList1[2] < classList2[1]):
print 'No conflict!'
else:
print 'Conflict!'
isAllowed(courses['CS 115A'], courses['BT 353D']) #no conflict
isAllowed(courses['CS 115A'], courses['BT 353A']) #conflict
| mit | Python | |
338f8d95df785b49eb0c00209535bfde675b6ce9 | Create release_jobs_from_hold.py | IGB-UIUC/Biocluster,IGB-UIUC/Biocluster,IGB-UIUC/Biocluster,IGB-UIUC/Biocluster | release_jobs_from_hold.py | release_jobs_from_hold.py | #!/usr/bin/env perl
use strict;
use Getopt::Long;
my $usage = "\t\t --usage $0 job_id=<PBS JOB ID> min=<Start of array> max=<End of array>\n";
my $command = "releasehold -a";
my $job_id;
my $min;
my $max;
$\= "\n";
if (! scalar(@ARGV) ) {
die $usage . scalar @ARGV;
}
GetOptions ("job_id=i" => \$job_id,
"min=i" => \$min,
"max=i" => \$max) ;
if(! $job_id || ! $min || ! $max){
die $usage;
}
print "job=$job_id min=$min max=$max\n";
for(my $i = $min; $i < $max; $i++){
my $call = "$command \"$job_id" . '\[' . $i . '\]"';
print $call;
system($call);
}
| apache-2.0 | Python | |
fb03cd60646e56a789e61471f5bb6772f7035d6e | add test for io | osoken/kisell | tests/test_io.py | tests/test_io.py | # -*- coding: utf-8 -*-
import os
import re
import unittest
from kisell.core import Origin, Pipe
from kisell import io
_license_file_path = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'LICENSE'
)
_license_file_content = None
with open(_license_file_path, 'r') as f:
_license_file_content = f.read()
class _AddLineNumber(Pipe):
def __init__(self):
super(_AddLineNumber, self).__init__()
def _initialize(self):
cnt = 0
for x in self.upstream:
yield str(cnt) + x
cnt += 1
class ReadStreamTester(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test__init__(self):
fin = open(_license_file_path, 'r')
test = io.ReadStream(fin, 100)
l = list(test)
self.assertEqual(len(l[0]), 100)
class FileReadStreamTester(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test__init__(self):
test = io.FileReadStream(_license_file_path)
self.assertEqual(test.name, _license_file_path)
self.assertEqual(test.encoding, 'utf-8')
self.assertEqual(test.mode, 'r')
content = test.read()
self.assertEqual(content, _license_file_content)
class WriteStreamTester(unittest.TestCase):
tmp_dir_path = os.path.join(os.path.dirname(__file__), 'tmp')
def setUp(self):
if not os.path.exists(WriteStreamTester.tmp_dir_path):
os.makedirs(WriteStreamTester.tmp_dir_path)
def tearDown(self):
os.removedirs(WriteStreamTester.tmp_dir_path)
def test__init__(self):
tmp_name = os.path.join(WriteStreamTester.tmp_dir_path, 'init')
tmp_file = open(tmp_name, 'w')
test = io.WriteStream(tmp_file)
orig = Origin(re.split('\s', _license_file_content))
(orig + test).run()
tmp_file.close()
with open(tmp_name, 'r') as tmp_fin:
self.assertListEqual([x.rstrip('\n') for x in tmp_fin],
re.split('\s', _license_file_content))
os.remove(tmp_name)
class FileWriteStreamTester(unittest.TestCase):
pass
| mit | Python | |
f1e47cc7854b0cb98f384ec866571aeaab96edd7 | Add forgotten migration | hasanalom/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,edofic/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,vladan-m/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,vladan-m/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core | src/ggrc/migrations/versions/20130805234925_4752027f1c40_create_directive_met.py | src/ggrc/migrations/versions/20130805234925_4752027f1c40_create_directive_met.py | """Create Directive.meta_kind
Revision ID: 4752027f1c40
Revises: 3a5ff1d71b9f
Create Date: 2013-08-05 23:49:25.621647
"""
# revision identifiers, used by Alembic.
revision = '4752027f1c40'
down_revision = '3a5ff1d71b9f'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import select, table, column, and_
def upgrade():
# Add `directives.meta_kind` to be the subclass discriminator
op.add_column('directives', sa.Column('meta_kind', sa.String(length=250), nullable=True))
directives_table = table('directives',
column('id', sa.Integer),
column('kind', sa.String),
column('meta_kind', sa.String)
)
meta_kind_mappings = {
"Contract": ("Contract",),
"Policy": (
"Company Policy",
"Org Group Policy", "Data Asset Policy", "Product Policy",
"Contract-Related Policy", "Company Controls Policy"
),
"Regulation": ("Regulation",),
}
for meta_kind, kinds in meta_kind_mappings.items():
op.execute(
directives_table.update()\
.where(directives_table.c.kind.in_(kinds))\
.values(meta_kind=meta_kind))
# We removed 'Controllable' from System/Process, so drop any
# existing relationships there
object_controls_table = table('object_controls',
column('id', sa.Integer),
column('controllable_type', sa.String),
column('controllable_id', sa.Integer)
)
op.execute(
object_controls_table.delete()\
.where(object_controls_table.c.controllable_type.in_(
['System', 'Process', 'SystemOrProcess'])))
# Now, alter mappings to System/Directive to use new class name
systems_table = table('systems',
column('id', sa.Integer),
column('is_biz_process', sa.Boolean)
)
directives_table = table('directives',
column('id', sa.Integer),
column('meta_kind', sa.Boolean)
)
process_ids = select([systems_table.c.id])\
.where(systems_table.c.is_biz_process == True)
system_ids = select([systems_table.c.id])\
.where(systems_table.c.is_biz_process != True)
contract_ids = select([directives_table.c.id])\
.where(directives_table.c.meta_kind == 'Contract')
policy_ids = select([directives_table.c.id])\
.where(directives_table.c.meta_kind == 'Policy')
regulation_ids = select([directives_table.c.id])\
.where(directives_table.c.meta_kind == 'Regulation')
system_types = ["System", "Process"]
directive_types = ["Directive", "Contract", "Policy", "Regulation"]
type_ids_old_types = [
("System", system_ids, system_types),
("Process", process_ids, system_types),
("Contract", contract_ids, directive_types),
("Policy", policy_ids, directive_types),
("Regulation", regulation_ids, directive_types),
]
polymorphic_links = [
('object_objectives', 'objectiveable'),
('object_controls', 'controllable'),
('object_sections', 'sectionable'),
('object_people', 'personable'),
('object_documents', 'documentable'),
('relationships', 'source'),
('relationships', 'destination')
]
for table_name, prefix in polymorphic_links:
t = table(table_name,
column('id', sa.Integer),
column('{0}_type'.format(prefix), sa.String),
column('{0}_id'.format(prefix), sa.Integer)
)
for type, ids, old_types in type_ids_old_types:
op.execute(t.update()\
.values(**{ "{0}_type".format(prefix) : type })\
.where(
and_(
t.c.get("{0}_type".format(prefix)).in_(old_types),
t.c.get("{0}_id".format(prefix)).in_(ids))))
def downgrade():
op.drop_column('directives', 'meta_kind')
| apache-2.0 | Python | |
3c885004d579dacbe7a193576b21ee965a9d8e11 | add test file for images | coblo/isccbench | iscc_bench/elastic_search/generate_image_hashes.py | iscc_bench/elastic_search/generate_image_hashes.py | # -*- coding: utf-8 -*-
import time
from PIL import Image
import os
import dhash
from elasticsearch import Elasticsearch
from elasticsearch import helpers
from iscc_bench import DATA_DIR
from iscclib.image import ImageID
es = Elasticsearch()
IMAGE_DIR = os.path.join(DATA_DIR, 'images\src')
mapping_image = '''
{
"mappings": {
"default": {
"dynamic": "strict",
"properties": {
"name": {
"type": "keyword",
"index": "true"
},
"type": {
"type": "keyword",
"index": "true"
},
"dHash": {
"type": "keyword",
"index": "true"
},
"wHash": {
"type": "keyword",
"index": "true"
}
}
}
}
}'''
def from_image_dhash(image):
image = Image.open(image)
d_hash = dhash.dhash_int(image, 5)
return ImageID(ident=d_hash, bits=64)
def init_index():
if es.indices.exists(index='iscc_images'):
es.indices.delete(index='iscc_images')
es.indices.create(index='iscc_images', body=mapping_image)
def action_generator():
for image in os.listdir(IMAGE_DIR):
img_file = os.path.join(IMAGE_DIR, image)
wiid = ImageID.from_image(img_file)
diid = from_image_dhash(img_file)
query = {
"_index": "iscc_images",
"_type": "default",
"_source": {"name": image.split('.')[0].split('_')[0], "type": image.split('.')[1],
"wHash": "{}".format(wiid), "dHash": "{}".format(diid)}
}
yield query
def generate_ids():
success = 0
failed = 0
for ok, item in helpers.streaming_bulk(es, action_generator(), chunk_size=5000):
if ok:
success += 1
else:
failed += 1
def check_values(type1, type2):
passed = True
group_by = '{"size": 0, "aggs": {"group_by_state": {"terms": {"field": "%s", "size": 1000}}}}' % type1
res = es.search('iscc_images', body=group_by)
buckets = res['aggregations']['group_by_state']['buckets']
for bucket in buckets:
if bucket['doc_count'] > 1:
get_by_key = {"query": {"terms": {type1: [bucket['key']]}}}
value = None
for entry in helpers.scan(es, index='iscc_images', query=get_by_key):
if value:
if not value == entry['_source'][type2]:
passed = False
print('Fail with %s %s' % (type1, bucket['key']))
else:
value = entry['_source'][type2]
return passed
def evaluate():
check_1 = check_values('name', 'wHash')
check_2 = check_values('name', 'dHash')
check_3 = check_values('wHash', 'name')
check_4= check_values('dHash', 'name')
if check_1 and check_2 and check_3 and check_4:
print('All tests passed.')
if __name__ == '__main__':
init_index()
generate_ids()
time.sleep(10)
evaluate()
| bsd-2-clause | Python | |
64fca89a9bb3bc0cd7725f4ad2ef0924c5c97859 | remove very large target coverage | shengqh/ngsperl,shengqh/ngsperl,shengqh/ngsperl,shengqh/ngsperl | lib/GATK4/fixCollectHsMetrics.py | lib/GATK4/fixCollectHsMetrics.py | import argparse
import sys
import logging
import os
import random
DEBUG=False
NotDEBUG=not DEBUG
parser = argparse.ArgumentParser(description="fixCollectHsMetrics",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-i', '--input', action='store', nargs='?', help='Input CollectHsMetrics file', required=NotDEBUG)
parser.add_argument('-o', '--output', action='store', nargs='?', help="Output file", required=NotDEBUG)
args = parser.parse_args()
if DEBUG:
args.input = "/scratch/cqs/PCA_scRNAseq/Exoseq/20220214_7538_CH/bwa_g4_refine_target_coverage/result/WD82458_NL_hs_metrics.txt"
args.output = "/scratch/cqs/PCA_scRNAseq/Exoseq/20220214_7538_CH/bwa_g4_refine_target_coverage/result/WD82458_NL_hs_metrics.txt.fixed"
logger = logging.getLogger('fixCollectHsMetrics')
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)-8s - %(message)s')
logger.info("reading " + args.input + " ...")
with open(args.output, "wt") as fout:
with open(args.input, "rt") as fin:
for line in fin:
if "PCT_TARGET_BASES_100000X" not in line:
fout.write(line)
else:
break
parts = line.split('\t')
PCT_TARGET_BASES_500X = parts.index('PCT_TARGET_BASES_500X')
AT_DROPOUT = parts.index('AT_DROPOUT')
del parts[PCT_TARGET_BASES_500X:AT_DROPOUT]
fout.write("\t".join(parts))
line = fin.readline()
parts = line.split('\t')
del parts[PCT_TARGET_BASES_500X:AT_DROPOUT]
fout.write("\t".join(parts))
for line in fin:
fout.write(line)
logger.info("done.")
| apache-2.0 | Python | |
ee928a52805ea8179277487e667947746985a2db | Create __init__.py | darwinex/DarwinexLabs | tools/dwx_zeromq_connector/v2.0.1/EXAMPLES/TEMPLATE/STRATEGIES/__init__.py | tools/dwx_zeromq_connector/v2.0.1/EXAMPLES/TEMPLATE/STRATEGIES/__init__.py | bsd-3-clause | Python | ||
8fe57fbbc5764d3e13c3513afcdb2c49d04b117e | Add a migration for php5-fpm pools to php7 | YunoHost/yunohost,YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost | src/yunohost/data_migrations/0003_php5_to_php7_pools.py | src/yunohost/data_migrations/0003_php5_to_php7_pools.py | import os
import glob
from shutil import copy2
from moulinette.utils.log import getActionLogger
from yunohost.tools import Migration
from yunohost.service import _run_service_command
logger = getActionLogger('yunohost.migration')
PHP5_POOLS = "/etc/php5/fpm/pool.d"
PHP7_POOLS = "/etc/php/7.0/fpm/pool.d"
PHP5_SOCKETS_PREFIX = "/var/run/php5-fpm"
PHP7_SOCKETS_PREFIX = "/run/php/php7.0-fpm"
MIGRATION_COMMENT = "; YunoHost note : this file was automatically moved from {}".format(PHP5_POOLS)
class MyMigration(Migration):
"Migrate php5-fpm 'pool' conf files to php7 stuff"
def migrate(self):
# Get list of php5 pool files
php5_pool_files = glob.glob("{}/*.conf".format(PHP5_POOLS))
# Keep only basenames
php5_pool_files = [os.path.basename(f) for f in php5_pool_files]
# Ignore the "www.conf" (default stuff, probably don't want to touch it ?)
php5_pool_files = [f for f in php5_pool_files if f != "www.conf"]
for f in php5_pool_files:
# Copy the files to the php7 pool
src = "{}/{}".format(PHP5_POOLS, f)
dest = "{}/{}".format(PHP7_POOLS, f)
copy2(src, dest)
# Replace the socket prefix if it's found
c = "sed -i -e 's@{}@{}@g' {}".format(PHP5_SOCKETS_PREFIX, PHP7_SOCKETS_PREFIX, dest)
os.system(c)
# Also add a comment that it was automatically moved from php5
# (for human traceability and backward migration)
c = "sed -i '1i {}' {}".format(MIGRATION_COMMENT, dest)
os.system(c)
# Reload/restart the php pools
_run_service_command("restart", "php-fpm")
def backward(self):
# Get list of php7 pool files
php7_pool_files = glob.glob("{}/*.conf".format(PHP7_POOLS))
# Keep only files which have the migration comment
php7_pool_files = [f for f in php7_pool_files if open(f).readline().strip() == MIGRATION_COMMENT]
# Delete those files
for f in php7_pool_files:
os.remove(f)
# Reload/restart the php pools
_run_service_command("restart", "php-fpm")
| agpl-3.0 | Python | |
935c77777d9d15269d2579f001c3abd97f8635e7 | add - module for communicating with redis. | rfaulkner/Flickipedia,rfaulkner/Flickipedia,rfaulkner/Flickipedia,rfaulkner/Flickipedia,rfaulkner/Flickipedia | flickipedia/redisio.py | flickipedia/redisio.py | """
Module for handling redis IO
"""
import redis
from flickipedia.config import log
__author__ = 'Ryan Faulkner'
__date__ = "2014-04-01"
class DataIORedis(object):
""" Class implementing data IO for Redis. """
DEFAULT_HOST = 'localhost'
DEFAULT_PORT = 6379
DEFAULT_DB = 0
def __init__(self, **kwargs):
super(DataIORedis, self).__init__(**kwargs)
self.conn = None
self.host = kwargs['host'] if kwargs.has_key('host') else \
self.DEFAULT_HOST
self.port = kwargs['port'] if kwargs.has_key('port') else \
self.DEFAULT_PORT
self.db = kwargs['db'] if kwargs.has_key('db') else self.DEFAULT_DB
def connect(self, **kwargs):
self.conn = redis.Redis(host=self.host, port=self.port, db=self.db)
def write(self, **kwargs):
if self.conn:
try:
return self.conn.set(kwargs['key'], kwargs['value'])
except KeyError as e:
log.error('Missing param -> {0}'.format(e.message))
return False
else:
log.error('No redis connection.')
return False
def read(self, **kwargs):
if self.conn:
try:
return self.conn.get(kwargs['key'])
except KeyError as e:
log.error('Missing param -> {0}'.format(e.message))
return False
else:
log.error('No redis connection.')
return False
def _del(self, **kwargs):
if self.conn:
try:
return self.conn.delete(kwargs['key'])
except KeyError as e:
log.error('Missing param -> {0}'.format(e.message))
return False
else:
log.error('No redis connection.')
return False
| bsd-2-clause | Python | |
637165eef82d40abc240b1dc40edddabecbb6af3 | Create new package. (#6503) | mfherbst/spack,krafczyk/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,krafczyk/spack,matthiasdiener/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,mfherbst/spack,EmreAtes/spack,EmreAtes/spack,EmreAtes/spack,mfherbst/spack,iulian787/spack,tmerrick1/spack,tmerrick1/spack,krafczyk/spack,tmerrick1/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,krafczyk/spack,LLNL/spack,mfherbst/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,EmreAtes/spack,LLNL/spack,LLNL/spack,matthiasdiener/spack,tmerrick1/spack | var/spack/repos/builtin/packages/r-biocstyle/package.py | var/spack/repos/builtin/packages/r-biocstyle/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RBiocstyle(RPackage):
"""Provides standard formatting styles for Bioconductor PDF and HTML
documents. Package vignettes illustrate use and functionality."""
homepage = "https://www.bioconductor.org/packages/BiocStyle/"
url = "https://git.bioconductor.org/packages/BiocStyle"
version('2.4.1', git='https://git.bioconductor.org/packages/BiocStyle', commit='ef10764b68ac23a3a7a8ec3b6a6436187309c138')
depends_on('r-bookdown', type=('build', 'run'))
depends_on('r-knitr@1.12:', type=('build', 'run'))
depends_on('r-rmarkdown@1.2:', type=('build', 'run'))
depends_on('r-yaml', type=('build', 'run'))
depends_on('r@3.4.0:3.4.9', when='@2.4.1')
| lgpl-2.1 | Python | |
b577b1b54cf8ba2f1b9184bda270e0bcd9613ef5 | Create wordcount-mapper.py | hardikvasa/hadoop-mapreduce-examples-python | wordcount-mapper.py | wordcount-mapper.py | #!/usr/bin/env python
import sys
for line in sys.stdin: # Input is read from STDIN and the output of this file is written into STDOUT
line = line.strip() # remove leading and trailing whitespace
words = line.split() # split the line into words
for word in words:
print '%s\t%s' % (word, 1) #Print all words (key) individually with the value 1
| mit | Python | |
79a0302ed3cebc0b93775824f6bfa5ce17bdb371 | Create j34all_possible.py | jovian34/j34all_possible | j34all_possible.py | j34all_possible.py | import itertools
class ListOps():
def __init__(self, length=9, total=100):
self.length = length
self.total = total
temp_list = []
temp_value = [0, 1, -1]
temp_value = tuple(temp_value)
temp_list.append(temp_value)
for i in range(2, self.length):
temp_value = [0, i, -i, (10 * (i - 1)) + i, - ((10 * (i - 1)) + i)]
temp_value = tuple(temp_value)
temp_list.append(temp_value)
temp_value = [self.length, - (self.length),
(10 * (self.length - 1)) + self.length, -
((10 * (self.length - 1)) + self.length)]
temp_list.append(temp_value)
self.poss_values = tuple(temp_list)
self.valid_list = []
self.final_list = []
def _make_full_list(self):
self.full_list = list(itertools.product(*self.poss_values))
def _is_valid(self, num_list):
for i in range(1, len(num_list)):
if abs(num_list[-(i)]) > 10 and abs(num_list[-(i) -1]) > 0:
return False
elif abs(num_list[-(i)]) < 10 and num_list[-(i) -1] == 0:
return False
return True
def _make_valid_list(self):
for i in range(0, len(self.full_list)):
if self._is_valid(self.full_list[i]):
self.valid_list.append(self.full_list[i])
def _equals_total(self, num_list):
total = 0
for i in range(0, len(num_list)):
total += num_list[i]
if total == self.total:
return True
else:
return False
def _make_final_list(self):
for i in range(0, len(self.valid_list)):
if self._equals_total(self.valid_list[i]):
self.final_list.append(self.valid_list[i])
def _print_final_list(self):
for i in range(len(self.final_list)):
for j in range(len(self.final_list[i])):
if self.final_list[i][j] == 0:
print('', end = '')
elif self.final_list[i][j] > 0:
print('+', self.final_list[i][j], ' ', end = '')
elif self.final_list[i][j] < 0:
print('-', abs(self.final_list[i][j]), ' ', end = '')
print('')
def run(self):
self._make_full_list()
self._make_valid_list()
self._make_final_list()
self._print_final_list()
def main():
list_obj = ListOps()
list_obj.run()
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
be1b50a9780bc8d2114b8660687fd72bb9472949 | Implement new Lin similarity based query expansion in query_processing/wordnet_expansion.py | amkahn/question-answering,amkahn/question-answering | src/query_processing/wordnet_expansion.py | src/query_processing/wordnet_expansion.py | from nltk.corpus import wordnet as wn
from nltk.corpus import wordnet_ic
semcor_ic = wordnet_ic.ic('ic-semcor.dat')
from nltk.corpus import lin_thesaurus as thes
import heapq
# specifies number of top-scoring synonyms to use
NUMBER_SYNONYMS = 3
def expand_query(query):
# add weight of 1 for each original term
weighted_terms = {}
for term in query:
weighted_terms[term] = 1
# should do some stopword removal here
for word in query:
syns = thes.scored_synonyms(word)
if (thes) > NUMBER_SYNONYMS:
all_syns = []
# syn list is in the form ((POS, [syn, syn, syn]), (POS, [syn, syn, syn]) ...)
# concatenate all synonyms from the various lists
for element in syns:
all_syns.extend(element[1])
# get n-best synonyms according to Lin similarity
top = heapq.nlargest(NUMBER_SYNONYMS, all_syns, key = lambda k: k[1])
else:
top = syns
# add top synonyms to weighted term dict
for element in top:
weighted_terms[element[0]] = element[1]
return weighted_terms
# functions below are old - probably won't want to use them
def get_synset_similarity(t1, t2):
"""
takes a pair of query terms and returns their synset similarity: the
overlap between the glosses of all the synsets of both terms
"""
overlap = (get_synset_glosses(t1).intersection(get_synset_glosses(t2)))
print overlap
return len(overlap)
def get_synset_glosses(t):
"""
returns a set of the glosses for each of a term's synsets
"""
glosses = set()
for synset in wn.synsets(t):
# not sure how to get glosses.. seems like a good approximation
glosses.add(synset.lemma_names[0])
return glosses
## debug code
get_synset_similarity("dog", "domestic_dog")
| mit | Python | |
1b810ec3fb2bdd241d831a3167d9ed8051fa29ca | Add to repo. | kraftur/mapIt | mapIt.py | mapIt.py | #!/usr/bin/python3
# mapIt.py - Launches a map in the browser using an address from the
# command line or clipboard.
import webbrowser
import sys
if len(sys.argv) > 1:
# Get address from command line.
address = ' '.join(sys.argv[1:])
else:
# Get address from clipboard.
address = pyperclip.paste()
webbrowser.open('https://www.google.com/maps/place/' + address)
| mit | Python | |
3559faeceff06aee82409ca22158223aff696b07 | Create MajorityElement_004.py | cc13ny/algo,Chasego/codi,Chasego/cod,Chasego/cod,cc13ny/algo,cc13ny/Allin,cc13ny/Allin,cc13ny/Allin,Chasego/codi,cc13ny/Allin,cc13ny/algo,Chasego/cod,Chasego/codirit,cc13ny/algo,cc13ny/algo,Chasego/codirit,Chasego/codi,Chasego/codirit,Chasego/cod,Chasego/codirit,Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/codi,cc13ny/Allin | leetcode/169-Majority-Element/MajorityElement_004.py | leetcode/169-Majority-Element/MajorityElement_004.py | class Solution(object):
def majorityElement(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
num, cnt = nums[0], 1
for i in xrange(1, len(nums)):
if nums[i] == num:
cnt += 1
elif cnt == 0:
num = nums[i]
cnt = 1
else:
cnt -= 1
return num
| mit | Python | |
ba0d2ed9373df05eae280f8664214decddbd755c | add basic drawing tests for svg | tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable | kiva/tests/test_svg_drawing.py | kiva/tests/test_svg_drawing.py | import contextlib
import StringIO
import unittest
from xml.etree import ElementTree
from kiva.tests.drawing_tester import DrawingTester
from kiva.svg import GraphicsContext
class TestSVGDrawing(DrawingTester, unittest.TestCase):
def create_graphics_context(self, width, height):
return GraphicsContext((width, height))
@contextlib.contextmanager
def draw_and_check(self):
yield
filename = "{0}.svg".format(self.filename)
self.gc.save(filename)
tree = ElementTree.parse(filename)
elements = [element for element in tree.iter()]
if not len(elements) in [4, 7]:
self.fail('The expected number of elements was not found')
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | Python | |
911baa4f700c34b2c2c3de8239a0fee60c12f1e9 | Create db.py | haibo-yu/awesome-python-webapp,haibo-yu/awesome-python-webapp,haibo-yu/awesome-python-webapp | www/transwarp/db.py | www/transwarp/db.py | unlicense | Python | ||
c51d6fd5d12fd22391e792f8bb792a48b5bcda04 | Create yt-search-filter.py | mthomas128/Search-Index | yt-search-filter.py | yt-search-filter.py | """This program is designed to facilitate rapidly
finding a video and its link on YouTube. Instructions for use:
Install elementtree and gdata 2.0 APIs.
Run program through command prompt of choice and enter a query
to be searched on YouTube. Specific queries work best.
Then enter a second query to filter the results through.
This is because YouTube delivers results with the query not
necessarily in the title. One word queries on this part work best.
If an output.txt file is in the same directory as the program,
the results satisfying the paramaters will be added to the file."""
import elementtree, httplib, urllib, gdata.youtube, gdata.youtube.service
SortedYTFeed = {}
result = {}
# Functions take YouTube query and process them into a dictionary
# ----------------------------------------------------------------
def UpdateSortedFeed(entry):
SortedYTFeed.update({entry.media.title.text: entry.GetSwfUrl()})
def LoopThroughEntries(feed):
for entry in feed.entry:
UpdateSortedFeed(entry)
def YouTubeSearch(search_terms):
yt_service = gdata.youtube.service.YouTubeService()
query = gdata.youtube.service.YouTubeVideoQuery()
query.vq = search_terms
query.orderby = 'relevance'
query.max_results = 50
feed = yt_service.YouTubeQuery(query)
LoopThroughEntries(feed)
# ----------------------------------------------------------------
# Basic search function accepting dictionaries
def search(Websites, query):
for i in Websites:
for j in range(len(i)):
if i[j:j + len(query)].lower() == query.lower():
result.update({i: Websites[i]})
YouTubeSearch(raw_input("Enter query for YouTube search:\n> "))
SortedYTFeed = {key: value for key, value in SortedYTFeed.items() if key != "https://youtube.com/devicesupport"}
search(SortedYTFeed, raw_input("Enter query for search refinement:\n> "))
# Writes to file to make large results more managable
with open("output.txt", "r+") as output:
# OVERWRITES OUTPUT FILE ON EACH RUN
output.truncate()
for i in result:
output.write(i + " - " + result[i] + "\n\n")
| mit | Python | |
fd1759b05c35d45bb6bf289f5267415e8c2a447e | Add missing superclass | sassoftware/rbuild,sassoftware/rbuild | rbuild/internal/rbuilder/rbuildercommand.py | rbuild/internal/rbuilder/rbuildercommand.py | #
# Copyright (c) 2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
import sys
import urllib
import urlparse
from rbuild.facade import rbuilderfacade
from rbuild.pluginapi import command
class RbuilderCommand(command.BaseCommand):
"""
Base class for rBuilder commands.
Handles backwards-compatibility for some command-line options.
"""
requireConfig = False
def processLocalConfigOptions(self, rbuildConfig, argSet):
"""
Tweak the serverUrl option so URLs that used to work with the
old client continue to work with rBuild.
"""
uri = rbuildConfig.serverUrl
if not uri:
return
scheme, netloc, path, query, fragment = urlparse.urlsplit(uri)
pathParts = path.split('/')
if 'xmlrpc-private' in pathParts:
# Remove xmlrpc-private; rbuild expects it not to be there
# but the old client needed it.
del pathParts[pathParts.index('xmlrpc-private'):]
path = '/'.join(pathParts)
userPart, hostPart = urllib.splituser(netloc)
if userPart is not None:
user, password = urllib.splitpasswd(userPart)
rbuildConfig['user'] = (user, password)
# Re-form URI sans user part
uri = urlparse.urlunsplit((scheme, hostPart, path, query, fragment))
rbuildConfig['serverUrl'] = uri
| apache-2.0 | Python | |
5e005fab08da740a027dcc23ba1b53abc3efaec4 | add missing indices | ergo/ziggurat_foundations,ergo/ziggurat_foundations | ziggurat_foundations/migrations/versions/613e7c11dead_create_indices_on_resource_owners.py | ziggurat_foundations/migrations/versions/613e7c11dead_create_indices_on_resource_owners.py | """create indices on resource owners
Revision ID: 613e7c11dead
Revises: b5e6dd3449dd
Create Date: 2018-02-15 11:51:29.659352
"""
from __future__ import unicode_literals
# revision identifiers, used by Alembic.
revision = '613e7c11dead'
down_revision = 'b5e6dd3449dd'
from alembic import op
def upgrade():
op.create_index(op.f('ix_resources_owner_group_id'), 'resources', ['owner_group_id'])
op.create_index(op.f('ix_resources_owner_user_id'), 'resources', ['owner_user_id'])
def downgrade():
pass
| bsd-3-clause | Python | |
88361e72624243b4e7fa45122e44a6843f21e2c6 | Add spider for Marriott hotels group | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/marriott.py | locations/spiders/marriott.py | import json
import re
import scrapy
from scrapy.selector import Selector
from locations.items import GeojsonPointItem
class MarriottHotels(scrapy.Spider):
name = "marriott"
allowed_domains = ["marriott.com", "ritzcarlton.com"]
download_delay = 0.2
def start_requests(self):
start_urls = [
('http://www.ritzcarlton.com/en/hotels/map/_jcr_content/par_content/locationmap.PINS.html', self.parse_ritz),
('https://www.marriott.com/sitemap.us.hws.1.xml', self.parse)
]
for url, callback in start_urls:
yield scrapy.Request(url=url, callback=callback)
def parse_hotel(self, response):
if "invalidProperty=true" in response.url:
return
street_address = response.xpath('//span[@itemprop="streetAddress"]/text()').extract_first()
city = response.xpath('//span[@itemprop="addressLocality"]/text()').extract_first()
state = response.xpath('//span[@itemprop="addressRegion"]/text()').extract_first()
name = response.xpath('//div[contains(@class, "m-hotel-info")]//span[@itemprop="name"]/text()').extract_first()
if name:
name = name.replace(u'\u2122', '') # remove tm symbol
brand = response.xpath('//ul[contains(@class,"tile-breadcrumbs")]/li[2]/a/span/text()').extract_first()
if brand == "Design HotelsTM":
brand = "Design Hotels"
properties = {
'ref': re.search(r'.*/(.*)/$', response.url).groups()[0],
'name': name,
'addr_full': street_address,
'city': city,
'state': state,
'postcode': response.xpath('//span[@itemprop="postalCode"]/text()').extract_first(),
'country': response.xpath('//span[@itemprop="addressCountry"]/text()').extract_first(),
'phone': (response.xpath('//span[@itemprop="telephone"]/text()').extract_first() or '').strip('| '),
'lat': float(response.xpath('//span[@itemprop="latitude"]/text()').extract_first()),
'lon': float(response.xpath('//span[@itemprop="longitude"]/text()').extract_first()),
'website': response.url,
'extras': {
'brand': brand
}
}
yield GeojsonPointItem(**properties)
def parse_ritz(self, response):
brands = {
'upcoming': 'Ritz-Carlton',
'reserve': 'Ritz-Carton Reserve',
}
data = re.search(r'trc.pageProperties.trcMap.mapData= (.*)', response.text).groups()[0]
data = json.loads(data.strip(';\r\n '))
for item in data["response"]["list"]["listItems"]["items"]:
if item["venue"]["type"] in ('ritz', 'upcoming'):
name = "The Ritz-Carlton " + item["venue"]["name"]
else:
name = item["venue"]["name"]
phone = re.split(r'<br./>', item["tip"]["text"])[-1]
properties = {
'ref': "-".join(re.search(r'.*/(.*)/(.*)$', item["tip"]["link"]["url"]).groups()),
'name': name,
'addr_full': item["venue"]["location"]["address"].strip(),
'city': item["venue"]["location"]["city"].strip(),
'state': item["venue"]["location"]["state"].strip(),
'postcode': item["venue"]["location"]["postalCode"].strip('. '),
'country': item["venue"]["location"]["country"].strip(),
'phone': phone.strip(),
'lat': float(item["venue"]["location"]["lat"]),
'lon': float(item["venue"]["location"]["lng"]),
'website': item["tip"]["link"]["url"],
'extras': {
'brand': brands.get(item["venue"]["type"], 'Ritz-Carlton')
}
}
yield GeojsonPointItem(**properties)
def parse(self, response):
xml = Selector(response)
xml.remove_namespaces()
urls = xml.xpath('//loc/text()').extract()
for url in urls:
yield scrapy.Request(response.urljoin(url), callback=self.parse_hotel)
| mit | Python | |
02900e9135b26de835692aacdb5f1f332f582fa9 | Create main_menu_ui.py | lakewik/storj-gui-client | main_menu_ui.py | main_menu_ui.py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'main_menu.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainMenu(object):
def setupUi(self, MainMenu):
MainMenu.setObjectName(_fromUtf8("MainMenu"))
MainMenu.resize(715, 257)
self.label = QtGui.QLabel(MainMenu)
self.label.setGeometry(QtCore.QRect(230, 10, 241, 41))
self.label.setObjectName(_fromUtf8("label"))
self.bucket_menager_bt = QtGui.QPushButton(MainMenu)
self.bucket_menager_bt.setGeometry(QtCore.QRect(30, 70, 211, 31))
self.bucket_menager_bt.setObjectName(_fromUtf8("bucket_menager_bt"))
self.settings_bt = QtGui.QPushButton(MainMenu)
self.settings_bt.setGeometry(QtCore.QRect(30, 110, 211, 31))
self.settings_bt.setObjectName(_fromUtf8("settings_bt"))
self.uploader_bt = QtGui.QPushButton(MainMenu)
self.uploader_bt.setGeometry(QtCore.QRect(30, 150, 331, 41))
self.uploader_bt.setObjectName(_fromUtf8("uploader_bt"))
self.file_manager_bt = QtGui.QPushButton(MainMenu)
self.file_manager_bt.setGeometry(QtCore.QRect(480, 70, 211, 71))
self.file_manager_bt.setObjectName(_fromUtf8("file_manager_bt"))
self.about_bt = QtGui.QPushButton(MainMenu)
self.about_bt.setGeometry(QtCore.QRect(480, 210, 211, 31))
self.about_bt.setObjectName(_fromUtf8("about_bt"))
self.label_2 = QtGui.QLabel(MainMenu)
self.label_2.setGeometry(QtCore.QRect(40, 210, 91, 31))
self.label_2.setObjectName(_fromUtf8("label_2"))
self.account_label = QtGui.QLabel(MainMenu)
self.account_label.setGeometry(QtCore.QRect(150, 210, 311, 31))
self.account_label.setObjectName(_fromUtf8("account_label"))
self.create_bucket_bt = QtGui.QPushButton(MainMenu)
self.create_bucket_bt.setGeometry(QtCore.QRect(260, 70, 211, 71))
self.create_bucket_bt.setObjectName(_fromUtf8("create_bucket_bt"))
self.downloader_bt = QtGui.QPushButton(MainMenu)
self.downloader_bt.setGeometry(QtCore.QRect(370, 150, 321, 41))
self.downloader_bt.setObjectName(_fromUtf8("downloader_bt"))
self.retranslateUi(MainMenu)
QtCore.QMetaObject.connectSlotsByName(MainMenu)
def retranslateUi(self, MainMenu):
MainMenu.setWindowTitle(_translate("MainMenu", "Main menu - Storj GUI Client", None))
self.label.setText(_translate("MainMenu", "<html><head/><body><p><span style=\" font-size:16pt; font-weight:600;\">Storj GUI Client - Menu</span></p></body></html>", None))
self.bucket_menager_bt.setText(_translate("MainMenu", "Bucekts manager", None))
self.settings_bt.setText(_translate("MainMenu", "Settings", None))
self.uploader_bt.setText(_translate("MainMenu", "File uploader [UNFINISHED]", None))
self.file_manager_bt.setText(_translate("MainMenu", "File manager", None))
self.about_bt.setText(_translate("MainMenu", "About", None))
self.label_2.setText(_translate("MainMenu", "<html><head/><body><p><span style=\" font-size:16pt; font-weight:600;\">Account:</span></p></body></html>", None))
self.account_label.setText(_translate("MainMenu", "<html><head/><body><p><span style=\" font-size:16pt; font-weight:600;\">abc@def.pl</span></p></body></html>", None))
self.create_bucket_bt.setText(_translate("MainMenu", "Create bucket", None))
self.downloader_bt.setText(_translate("MainMenu", "File downloader [UNFINISHED]", None))
| mit | Python | |
1b5a8307fd816c935f197993d1ead07cffc01892 | add simple consumer | fortime/pykafka,sontek/pykafka,appsoma/pykafka,vortec/pykafka,thedrow/samsa,benauthor/pykafka,jofusa/pykafka,appsoma/pykafka,sammerry/pykafka,aeroevan/pykafka,aeroevan/pykafka,fortime/pykafka,wikimedia/operations-debs-python-pykafka,jofusa/pykafka,thedrow/samsa,sontek/pykafka,tempbottle/pykafka,tempbottle/pykafka,benauthor/pykafka,sammerry/pykafka,thedrow/samsa,wikimedia/operations-debs-python-pykafka,vortec/pykafka,wikimedia/operations-debs-python-pykafka,benauthor/pykafka,yungchin/pykafka,yungchin/pykafka | samsa/rdsamsa/consumer.py | samsa/rdsamsa/consumer.py | from collections import namedtuple
from copy import copy
import logging
import rd_kafka
from samsa import abstract
logger = logging.getLogger(__name__)
Message = namedtuple("Message", ["topic", "payload", "key", "offset"])
# TODO ^^ namedtuple is just a placeholder thingy until we've fleshed out
# samsa.common.Message etc
class Consumer(abstract.Consumer):
def __init__(self, client, topic, partitions=None):
if isinstance(topic, basestring):
topic = client[topic]
self._topic = topic
self._partitions = partitions or self.topic.partitions # ie all
config, topic_config = self._configure()
rdk_consumer = rd_kafka.Consumer(config)
self.rdk_topic = rdk_consumer.open_topic(self.topic.name, topic_config)
self.rdk_queue = rdk_consumer.new_queue()
for p in self.partitions:
if not isinstance(p, int):
p = p.id
self.rdk_queue.add_toppar(self.rdk_topic, p, start_offset=0)
# FIXME ^^ change python-librdkafka to provide default for offset
# (which should probably be OFFSET_STORED)
# Note that this ^^ uses a new rdk_consumer handle for every instance;
# this avoids the confusion of not being allowed a second reader on
# the same toppar (a restriction python-librdkafka would impose if
# we'd use a common rdk_consumer). The extra overhead should be
# acceptable for most uses.
def _configure(self):
config = copy(self.topic.cluster.config)
topic_config = {} # TODO where do we expose this?
# TODO config.update( ...stuff like group.id ...)
return config, topic_config
@property
def topic(self):
return self._topic
@property
def partitions(self):
return self._partitions # TODO check if Partitions or ints are expected
def __iter__(self):
raise NotImplementedError
# TODO implement StopIteration in python-librdkafka
def consume(self, timeout=1):
msg = self.rdk_queue.consume(timeout_ms=1000 * timeout)
return None if msg is None else Message(self.topic.name,
msg.key[:],
msg.payload[:],
msg.offset)
# XXX copy key/payload to native str in python-librdkafka instead?
| apache-2.0 | Python | |
46a652e13da604776d745d2f09e02e4f75dc3fd7 | test commit 1 | rooneygw/test | testfile.py | testfile.py | print("Hello")
| mit | Python | |
afaed1b9c6889312cc3e6fa992a03c500470e967 | add test for feature sequence | ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge | src/edge/tests/test_feature.py | src/edge/tests/test_feature.py | from django.test import TestCase
from edge.models import Fragment
class FeatureTests(TestCase):
def setUp(self):
self.root_sequence = "agttcgaggctga"
self.root = Fragment.create_with_sequence("Foo", self.root_sequence)
def test_sequence_positive_strand(self):
feature = self.root.annotate(3, 5, "A1", "gene", 1)
self.assertEqual(feature.sequence, "ttc")
def test_sequence_negative_strand(self):
feature = self.root.annotate(3, 5, "A1", "gene", -1)
self.assertEqual(feature.sequence, "gaa")
| mit | Python | |
bf34c1dbb37865e62e97e3463645c7df16a4ca08 | Add an interface for Markov Chains | iluxonchik/lyricist | markov_chain.py | markov_chain.py | from random import choice
class MarkovChain(object):
""" An interface for signle-word states Markov Chains """
def __init__(self, text=None):
self._states_map = {}
if text is not None:
self.add_text(text)
def add_text(self, text, separator=" "):
""" Adds text to the markov chain """
word_list = text.split(separator)
for i in range(0, len(word_list)-1):
self._states_map.setdefault(word_list[i], []).append(word_list[i+1])
return self
def get_word(self, key):
""" Returns a word from Markov Chain associated with the key """
values = self._states_map.get(key)
return choice(values) if values is not None else None
| mit | Python | |
055c1b0d140e2c5659c2767fd123fc69d0f83859 | Create clean_up_d3s.py | bearing/dosenet-raspberrypi,cllamb0/dosenet-raspberrypi,bearing/dosenet-raspberrypi,yarocoder/dosenet-raspberrypi,yarocoder/dosenet-raspberrypi,cllamb0/dosenet-raspberrypi,tybtab/dosenet-raspberrypi,tybtab/dosenet-raspberrypi | clean_up_d3s.py | clean_up_d3s.py | from globalvalues import RPI
if RPI:
import RPi.GPIO as GPIO
GPIO.cleanup()
| mit | Python | |
fb10273ee2007846fa760d36ebb6806b35407fa3 | add script/json/ts-urllib2.py | Zex/Starter,Zex/Starter,Zex/Starter | script/json/ts-urllib2.py | script/json/ts-urllib2.py | #!/usr/bin/env python
#
# ts-urllib2.py
#
# Author: Zex <top_zlynch@yahoo.com>
#
import urllib2
import json
from os import path, mkdir
from basic import *
if not path.isdir(RESPONSE_DIR):
mkdir(RESPONSE_DIR)
def case():
headers = {
#'Content-Type' : 'application/json'
#'Content-Type' : 'text/html',
}
data = {
}
# url = 'http://' + '127.0.0.1:5000'#/sos/login.html'
# auth = urllib2.HTTPPasswordMgrWithDefaultRealm()
# auth.add_password(None, url, USERNAME, PASSWORD)
# auth_handler = urllib2.HTTPBasicAuthHandler(auth)
#
# opener = urllib2.build_opener(auth_handler)
# urllib2.install_opener(opener)
# for k in headers.items():
# opener.addheaders.append(k)
# rsp = opener.open(url, json.dumps(data))
# with open(RESPONSE_DIR+'/rsp_'+url.replace('/','')+'.json', 'w') as fd:
# print rsp
#
# opener.close()
url = URL# + '/accesspoint'
req = urllib2.Request(url, json.dumps(data), headers)
req.get_method = lambda:'GET'
rsp = urllib2.urlopen(req)
with open(RESPONSE_DIR+'/rsp_'+url.replace('/','.')+'.json', 'w') as fd:
fd.write(rsp.read() + '\n')
try:
case()
except Exception as e:
print e
| mit | Python | |
65cc9fd3ada01790484469028875e580e8447c85 | Update migrations to current state (#65) | ArabellaTech/aa-stripe | aa_stripe/migrations/0021_auto_20190906_1623.py | aa_stripe/migrations/0021_auto_20190906_1623.py | # Generated by Django 2.1.11 on 2019-09-06 20:23
import django_extensions.db.fields.json
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('aa_stripe', '0020_stripecharge_statement_descriptor'),
]
operations = [
migrations.AlterField(
model_name='stripecharge',
name='stripe_refund_id',
field=models.CharField(blank=True, db_index=True, max_length=255),
),
migrations.AlterField(
model_name='stripecharge',
name='stripe_response',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
migrations.AlterField(
model_name='stripecoupon',
name='metadata',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict, help_text='Set of key/value pairs that you can attach to an object. It can be useful for storing additional information about the object in a structured format.'),
),
migrations.AlterField(
model_name='stripecoupon',
name='stripe_response',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
migrations.AlterField(
model_name='stripecustomer',
name='sources',
field=django_extensions.db.fields.json.JSONField(blank=True, default=[]),
),
migrations.AlterField(
model_name='stripecustomer',
name='stripe_js_response',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
migrations.AlterField(
model_name='stripecustomer',
name='stripe_response',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
migrations.AlterField(
model_name='stripesubscription',
name='metadata',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict, help_text='https://stripe.com/docs/api/python#create_subscription-metadata'),
),
migrations.AlterField(
model_name='stripesubscription',
name='stripe_response',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
migrations.AlterField(
model_name='stripesubscriptionplan',
name='metadata',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict, help_text='A set of key/value pairs that you can attach to a plan object. It can be useful for storing additional information about the plan in a structured format.'),
),
migrations.AlterField(
model_name='stripesubscriptionplan',
name='stripe_response',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
migrations.AlterField(
model_name='stripewebhook',
name='raw_data',
field=django_extensions.db.fields.json.JSONField(blank=True, default=dict),
),
]
| mit | Python | |
6f415dccb547cae95aeb9946e503cadd99f63bd6 | add backtest_files.py | joequant/sptrader,joequant/sptrader,joequant/sptrader,joequant/sptrader,joequant/sptrader | scripts/backtest_files.py | scripts/backtest_files.py | #!/usr/bin/python3
import json
import sys
from pprint import pprint
import requests
import re
import os
import shutil
location = os.path.dirname(os.path.realpath(__file__))
data_dir = os.path.join(location, "..", "data")
config_name = sys.argv[1]
items = sys.argv[2:]
config_file = os.path.join(data_dir, config_name + ".json")
print("looking for ", config_file)
with open(config_file) as data_file:
data = json.load(data_file)
for i in items:
base_name = os.path.basename(i)
data_file = os.path.join(data_dir, base_name)
if os.path.exists(data_file):
print(i, " already exists in data dir", data_dir)
else:
print("copying ", i, " to ", data_dir)
shutil.copy(i, data_file)
data['tickersource'] = base_name
r = requests.post("http://localhost:5000/backtest",
data)
k = base_name.rsplit(".", 1)
outfile = k[0] + ".html"
print("writing to ", outfile)
with open(outfile, 'w') as outf:
print(r.text, file=outf)
| bsd-2-clause | Python | |
e1b36f6d875ca358993af03ae0fb95d3def87f29 | Create weather_cnn3d.py | prl900/DeepWeather | weather_cnn3d.py | weather_cnn3d.py | import numpy as np
import os.path
import sys
from keras.models import Sequential, load_model
from keras.layers import Convolution2D, MaxPooling2D, Convolution3D, MaxPooling3D
#from keras.layers.convolutional import Conv2D, Conv3D
#from keras.layers.pooling import MaxPooling2D, MaxPooling3D
from keras.layers.core import Flatten, Dense, Lambda
from keras import backend as K
from keras.optimizers import SGD, Adagrad, Adadelta
from keras.callbacks import CSVLogger
airports = ['EIDW', 'EGLL', 'LFPG', 'LFBO', 'EGPH', 'EHAM', 'EBBR', 'LEMD', 'LEBL', 'LPPT', 'LIRF',
'LIMC', 'LSZH', 'EDDM', 'EDFH', 'EDDT', 'EKCH', 'ENGM', 'ESSA', 'EFHK', 'LOWW']
def get_rains(code):
arr = np.load("data/rain.npy")
idx = airports.index(code)
return arr[:, idx].astype(np.int32)
def get_era_full(param, level):
arr = np.load("data/{}{}_uint8.npy".format(param, level))
return arr.astype(np.float32) / 256.
def train_model(airport):
# Import data
params = ["z", "z", "z"]
levels = [500, 850, 1000]
in1_var = get_era_full(params[0], levels[0])
in2_var = get_era_full(params[1], levels[1])
in3_var = get_era_full(params[2], levels[2])
x = np.concatenate((np.expand_dims(in1_var, axis=3), np.expand_dims(in2_var, axis=3), np.expand_dims(in3_var, axis=3)), axis=3)
X = np.zeros((13141, 80, 120, 8, 3))
for i in range(13141):
X[i,:,:,:,:] = np.rollaxis(x[i:i+8, :, :, :],0,3)
Y = get_rains(airport)[7:]
b = np.zeros((Y.shape[0], 2))
b[np.arange(Y.shape[0]), Y] = 1
model = None
if os.path.isfile('model_3d_{}.h5'.format(airport)):
model = load_model('model_3d_{}.h5'.format(airport))
else:
model = Sequential()
model.add(Convolution3D(128, (3, 3, 3), padding='same', activation='relu', name='block1_conv1', input_shape=(80,120,8,3)))
model.add(MaxPooling3D((2, 2, 2), strides=(2, 2, 2)))
model.add(Convolution3D(256, (3, 3, 3), padding='same', activation='relu', name='block2_conv1'))
model.add(MaxPooling3D((2, 2, 2), strides=(2, 2, 2)))
model.add(Flatten())
model.add(Dense(2, activation = 'softmax', name='final_fully_connected'))
adagrad = Adagrad(lr=0.0002)
model.compile(loss='categorical_crossentropy', optimizer = adagrad, metrics=['accuracy'])
csv_logger = CSVLogger('{}.log'.format(airport))
model.fit(X, b, batch_size=20, epochs=100, verbose=1, validation_split=0.2, callbacks=[csv_logger])
model.save('model_3d_{}.h5'.format(airport))
if __name__ == '__main__':
airport = sys.argv[1]
train_model(airport)
| apache-2.0 | Python | |
7e1058821f165e60d76eee1b07a7b411f3439408 | Create uber.py | jasuka/pyBot,jasuka/pyBot | modules/uber.py | modules/uber.py | def uber(self):
self.send_chan("Moi")
| mit | Python | |
b8795def87635aa8192f5f8cf64afe1a22ec30f1 | Add findCsetsIntersection.py script to find intersecting changesets in our knownBrokenRanges. | MozillaSecurity/funfuzz,nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,nth10sd/funfuzz | autobisect-js/findCsetsIntersection.py | autobisect-js/findCsetsIntersection.py | #!/usr/bin/env python
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import sys
from optparse import OptionParser
from ignoreAndEarliestWorkingLists import knownBrokenRanges
path0 = os.path.dirname(os.path.abspath(__file__))
path1 = os.path.abspath(os.path.join(path0, os.pardir, 'util'))
sys.path.append(path1)
from subprocesses import captureStdout, normExpUserPath
def parseOptions():
parser = OptionParser()
parser.add_option('-R', '--repo', dest='rDir',
help='Sets the repository to analyze..')
options, args = parser.parse_args()
assert options.rDir is not None
assert os.path.isdir(normExpUserPath(options.rDir))
return options.rDir
def countCsets(revset, rdir):
listCmd = ['hg', 'log', '-r', revset, '--template=1']
rangeIntersectionOnes = captureStdout(listCmd, currWorkingDir=rdir)
assert rangeIntersectionOnes[1] == 0
return len(rangeIntersectionOnes[0])
def main():
repoDir = parseOptions()
brokenRanges = knownBrokenRanges()
cnt = 0
for i in range(0, len(brokenRanges)):
print 'Analyzing revset: ' + brokenRanges[i] + \
' which matches ' + str(countCsets(brokenRanges[i], repoDir)) + ' changesets'
for j in range(i + 1, len(brokenRanges)):
cnt += 1
print 'Number ' + str(cnt) + ': Compared against revset: ' + brokenRanges[j]
overlap = countCsets(brokenRanges[i] + ' and ' + brokenRanges[j], repoDir)
if overlap > 0:
print('Number of overlapping changesets: ' + str(overlap))
cnt = 0
if __name__ == '__main__':
main()
| mpl-2.0 | Python | |
e81f823a1542bf24caa081b299352e593e1a10c9 | Add a utility function | dseomn/cohydra | cohydra/util.py | cohydra/util.py | import os
def recursive_scandir(top_dir, dir_first=True):
"""Recursively scan a path.
Args:
top_dir: The path to scan.
dir_first: If true, yield a directory before its contents.
Otherwise, yield a directory's contents before the
directory itself.
Returns:
A generator of tuples of the path of a directory relative to
the top path, and an os.DirEntry object of an entry in that
directory. The top_dir itself is not included.
"""
def f(relpath, dir_entry):
if dir_first and dir_entry is not None:
yield relpath, dir_entry
path = os.path.join(top_dir, relpath)
for entry in os.scandir(path):
entry_relpath = os.path.join(relpath, entry.name)
if entry.is_dir():
for item in f(entry_relpath, entry):
yield item
else:
yield entry_relpath, entry
if not dir_first and dir_entry is not None:
yield relpath, dir_entry
return f('', None)
| apache-2.0 | Python | |
e29de047d770de70f3745ae410b62d0ddad4b0b4 | Add one test case for IOTOS-358 | ostroproject/meta-iotqa,ostroproject/meta-iotqa,wanghongjuan/meta-iotqa-1,ostroproject/meta-iotqa,daweiwu/meta-iotqa-1,ostroproject/meta-iotqa,daweiwu/meta-iotqa-1,ostroproject/meta-iotqa,wanghongjuan/meta-iotqa-1,wanghongjuan/meta-iotqa-1,daweiwu/meta-iotqa-1,daweiwu/meta-iotqa-1,wanghongjuan/meta-iotqa-1,wanghongjuan/meta-iotqa-1,daweiwu/meta-iotqa-1 | lib/oeqa/runtime/misc/appFW.py | lib/oeqa/runtime/misc/appFW.py | from oeqa.oetest import oeRuntimeTest
class AppFwTest(oeRuntimeTest):
""" App Framework testing """
def test_sqlite_integration(self):
""" test sqlite is integrated in image """
(status,output) = self.target.run("rpm -qa | grep sqlite")
self.assertEqual(status, 0, output)
| mit | Python | |
6b4507793f6c536a604ba0e81143453ff9a9eab2 | Add a rough-and-ready proximity | westpark/robotics | piwars/controllers/proximity.py | piwars/controllers/proximity.py | # -*- coding: utf-8 -*-
import os, sys
import collections
import itertools
import queue
import statistics
import threading
from ..core import config, exc, logging, utils
log = logging.logger(__package__)
from . import remote
from ..sensors import ultrasonic
#
# This controller is trying to have the robot get as close as it can
# to a wall without swerving too far from side to side. Both sensors
# should be pointing forwards and we'll be detecting unwanted swerve
# by determining when they are offset from each other.
#
#
# How much difference does there have to be between one
# sensor and the other to constitute a swerve?
#
OFFSET_THRESHOLD = 10
PROXIMITY_THRESHOLD = 30
class Controller(remote.Controller):
N_SAMPLES = 10
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.started = False
self.sensor_queues = {}
self.sensor_threads = []
self.distances = {}
def dispatch(self, action, params):
if not self.started and action not in ("start", "stop"):
log.warn("Cannot dispatch %s : %s when not yet started", action, ", ".join(params))
else:
super().dispatch(action, params)
def distance_sensor(self, sensor, queue):
sensor.steady_trigger()
while not self.finished():
queue.put(sensor.find_distance_mm())
def handle_start(self):
for position in "left", "right":
sensor_name = "%s_ultrasonic" % position
sensor_config = config[sensor_name]
sensor = ultrasonic.Sensor(sensor_config.trigger_pin, sensor_config.echo_pin)
queue = self.sensor_queues[position] = queue.Queue()
self.distances[position] = deque.deque([], self.N_SAMPLES)
sensor_thread = threading.Thread(target=self.distance_sensor, args=(sensor, queue))
self.sensor_threads.append(sensor_thread)
for sensor_thread in self.sensor_threads:
thread.start()
self.started = True
self.robot.forward()
def handle_stop(self):
self.robot.stop()
self.started = False
def read_distances(self):
"""Add any new distance readings from the sensors
"""
for position in self.sensor_queues:
try:
distance = self.sensor_queues[position].get_nowait()
except queue.Empty:
continue
else:
self.distances[position].append(distance)
def current_distance(self, distances):
"""Discard any outliers and return the most recent reading
for this sensor
"""
robust_distances utils.without_outliers(distances)
return robust_distances[-1]
def effective_offset(self):
left_distance current_distance(self.distances["left"])
right_distance = current_distance(self.distances["right"])
return left_distance - right_distance
def generate_commands(self):
#
# Handle remote commands first
#
super().generate_commands()
#
# Pick up the latest movements recorded from the lateral sensors
#
self.read_distances()
#
# If either distance is within the proximity threshold, stop
# Otherwise, if the difference between the two is greater than
# the offset threshold, swerve the other way
#
left_distance = current_distance(self.distances["left"])
right_distance = current_distance(self.distances["right"])
if left_distance < PROXIMITY_THRESHOLD or right_distance < PROXIMITY_THRESHOLD:
self.handle_stop()
elif left_distance - right_distance > OFFSET_THRESHOLD:
self.robot.turn("right")
elif right_distance - left_distance > OFFSET_THRESHOLD:
self.robot.turn("left")
| mit | Python | |
1051ec35f33c3e7a3946af3cf8a11a86dc9265a0 | Create utility module | thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,gizmo-cda/g2x,gizmo-cda/g2x | app_v2/utils.py | app_v2/utils.py | def map_range(x, in_min, in_max, out_min, out_max):
out_delta = out_max - out_min
in_delta = in_max - in_min
return (x - in_min) * out_delta / in_delta + out_min
| mit | Python | |
d3a3d4d5b380ea18b767515dfadfc0256d26e1eb | Add a snippet. | jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets | python/tkinter/python3/matplotlib_canvas_using_class_and_toolbar_and_keyboard_events.py | python/tkinter/python3/matplotlib_canvas_using_class_and_toolbar_and_keyboard_events.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Jérémie DECOCK (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See: http://matplotlib.sourceforge.net/examples/user_interfaces/embedding_in_tk.html
# http://matplotlib.sourceforge.net/examples/user_interfaces/embedding_in_tk2.html
# http://stackoverflow.com/questions/18675266/how-to-make-matplotlibpyplot-resizeable-with-the-tkinter-window-in-python
import matplotlib
matplotlib.use('TkAgg')
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.backends.backend_tkagg import FigureCanvasTkAgg, NavigationToolbar2TkAgg
# Implement the default Matplotlib key bindings
# See: http://matplotlib.org/users/navigation_toolbar.html#navigation-keyboard-shortcuts
from matplotlib.backend_bases import key_press_handler
import tkinter as tk
class Gui:
def __init__(self, root):
# Matplotlib ##################
self.fig = plt.figure(figsize=(8.0, 8.0))
# Make widgets ################
self.root = root
# Add a callback on WM_DELETE_WINDOW events
self.root.protocol("WM_DELETE_WINDOW", self.quit)
# MPL canvas
self.canvas = FigureCanvasTkAgg(self.fig, master=self.root)
self.canvas.get_tk_widget().pack(fill="both", expand=True)
# MPL toolbar
toolbar = NavigationToolbar2TkAgg(self.canvas, self.root)
toolbar.update()
self.canvas._tkcanvas.pack(side=tk.TOP, fill=tk.BOTH, expand=1)
# Default Matplotlib key bindings
# See: http://matplotlib.org/users/navigation_toolbar.html#navigation-keyboard-shortcuts
def on_key_event(event):
print('you pressed %s'%event.key)
key_press_handler(event, self.canvas, toolbar)
self.canvas.mpl_connect('key_press_event', on_key_event)
# Draw the figure #############
self.draw_figure()
def draw_figure(self):
"""
Draw the figure.
"""
self.fig.clf()
ax = self.fig.add_subplot(111)
x = np.arange(-10, 10, 0.01)
y = np.sin(x)
ax.plot(x, y)
self.fig.canvas.draw()
def run(self):
"""
Launch the main loop (Tk event loop).
"""
self.root.mainloop()
def quit(self):
"""
Clean exit.
"""
self.root.quit() # stops mainloop
self.root.destroy() # this is necessary on Windows to prevent
# Fatal Python Error: PyEval_RestoreThread: NULL tstate
def main():
root = tk.Tk()
gui = Gui(root)
gui.run()
if __name__ == "__main__":
print()
print(80 * "*")
print("See http://matplotlib.org/users/navigation_toolbar.html#navigation-keyboard-shortcuts for the list of navigation keyboard shortcuts")
print(80 * "*")
print()
main()
| mit | Python | |
c6d505ef6610ebb383e4f0a7a3d1b746f7fd5f75 | add group | conjurinc/api-python,conjurinc/api-python | conjur/group.py | conjur/group.py | #
# Copyright (C) 2014 Conjur Inc
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from conjur.role import Role
class Group(object):
def __init__(self, api, id):
self.api = api
self.id = id
self.role = Role(api, 'group', id)
def add_member(self, member, admin=False):
self.role.grant_to(member, admin)
def remove_member(self, member):
self.role.revoke_from(member)
| apache-2.0 | Python | |
d14c0127a2489c03a50f49751b8b735202873e29 | Add RFID Reader example. | riklaunim/pyusb-keyboard-alike | black_rfid_reader.py | black_rfid_reader.py | from keyboard_alike import reader
class RFIDReader(reader.Reader):
"""
This class supports common black RFID Readers for 125 kHz read only tokens
http://www.dx.com/p/intelligent-id-card-usb-reader-174455
"""
def extract_meaningful_data_from_chunk(self, raw_data):
# every good chunk is followed by blank chunk
chunks = super(RFIDReader, self).extract_meaningful_data_from_chunk(raw_data)
for index, chunk in enumerate(chunks):
if index % 2 == 0:
yield chunk
if __name__ == "__main__":
reader = RFIDReader(0x08ff, 0x0009, 84, 8)
reader.initialize()
print(reader.read().strip())
| mit | Python | |
a14b5aad9f6f15cfd9bee32c689e023a5dc94f19 | Add very basic visualization tool in python until I find a go lib which can do the same | jacksontj/dnms,jacksontj/dnms | visualize.py | visualize.py | '''
Create a visual representation of the various DAGs defined
'''
import requests
import networkx as nx
import matplotlib.pyplot as plt
if __name__ == '__main__':
g = nx.DiGraph()
labels = {
'edges': {},
'nodes': {},
}
nodes = {}
for routeKey, routeMap in requests.get('http://localhost:12345').json().iteritems():
for i, node in enumerate(routeMap['Path']):
g.add_node(node['Name'])
labels['nodes'][node['Name']] = node['Name']
if i - 1 >= 0:
g.add_edge(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
labels['edges'][(routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])] = (routeMap['Path'][i-1]['Name'], routeMap['Path'][i]['Name'])
#pos = nx.spring_layout(g)
#nx.draw(g, pos=pos)
nx.draw_networkx(g, with_labels=True)
# add labels
#nx.draw_networkx_labels(g, pos, labels['nodes'])
#nx.draw_networkx_edge_labels(g, pos, labels['edges'])
# write out the graph
plt.savefig('dag.png')
plt.show() # in case people have the required libraries to make it happen
| mit | Python | |
78c64d00df97edbec5f07213dc87ff30a7bb4ca9 | Create moveToObject.py | aaronfang/personal_scripts | af_scripts/misc/moveToObject.py | af_scripts/misc/moveToObject.py | # move to object
import pymel.core as pm
def move_to_object():
get_sel = pm.ls(sl=1,fl=1)
if len(get_sel) == 2:
src = get_sel[1]
target = get_sel[0]
src_oc_x = pm.objectCenter(src,x=1)
src_oc_y = pm.objectCenter(src,y=1)
src_oc_z = pm.objectCenter(src,z=1)
target_oc_x = pm.objectCenter(target,x=1)
target_oc_y = pm.objectCenter(target,y=1)
target_oc_z = pm.objectCenter(target,z=1)
src_oc = [src_oc_x,src_oc_y,src_oc_z]
target_oc = [target_oc_x,target_oc_y,target_oc_z]
vector = (src_oc_x-target_oc_x),(src_oc_y-target_oc_y),(src_oc_z-target_oc_z)
pm.xform(target,t=vector,r=1)
move_to_object()
| mit | Python | |
a84935319ceb492af854c4af933794c2f014c269 | Create http.py | AlanOndra/Waya | src/site/cms/http.py | src/site/cms/http.py | # -*- coding: utf-8 -*-
from os.path import abspath, join
import pickle
from datetime import *
from http.cookies import *
from cms.fs import *
class Status:
OK = '200 OK'
Created = '201 Created'
Moved = '301 Moved Permanently'
Found = '302 Found'
Redirect = '307 Temporary Redirect'
PermRedirect = '308 Permanent Redirect'
BadRequest = '400 Bad Request'
Unauthorized = '401 Unauthorized'
Forbidden = '403 Forbidden'
NotFound = '404 Not Found'
NotAllowed = '405 Method Not Allowed'
NotAcceptable = '406 Not Acceptable'
Teapot = '418 I\'m a teapot'
ServerError = '500 Internal Server Error'
Unavailable = '503 Service Unavailable'
class ContentType:
BMP = 'image/x-windows-bmp'
CSS = 'text/css'
CSV = 'text/csv'
GIF = 'image/gif'
HTML = 'text/html'
JS = 'application/javascript'
JPEG = 'image/jpeg'
JSON = 'application/json'
MPEG = 'video/mpeg'
MP3 = 'audio/mpeg'
MP4 = 'video/mp4'
OGG = 'audio/ogg'
PDF = 'application/pdf'
TXT = 'text/plain'
PNG = 'image/png'
QT = 'video/quicktime'
RDF = 'application/rdf+xml'
RSS = 'application/rss+xml'
SOAP = 'application/soap+xml'
SVG = 'image/svg+xml'
VORB = 'audio/vorbis'
Wave = 'audio/vnd.wave'
WMV = 'video/x-ms-wmv'
XHTML = 'application/xhtml+xml'
XML = 'application/xml'
ZIP = 'application/zip'
class Request:
def __init__(self, env, cookies, path='/', root='/', matches=[], get={}, post={}):
self.env = env
self.path = path or '/'
self.root = root or '/'
self.get = get or {}
self.post = post or {}
self.cookies = cookies
class Response:
def __init__(self, status=Status.OK, headers={}, content='', cookies=None):
self.status = status or Status.OK
self.headers = headers or {}
self.content = content or ''
if cookies is not None:
self.cookies = cookies
else:
self.cookies = None
class Session:
key = ''
filename = None
data = {}
def get(key, default=None):
if key in Session.data.keys():
return Session.data[key]
else:
return default
def set(key, value):
if key in Session.data.keys():
if value is None:
data = dict(Session.data)
del data[key]
Sssion.data = data
else:
Session.data[key] = value
return True
else:
return False
def load():
if Session.filename is None:
Session.filename = abspath(join(FS.sessdir, Session.key + '.session'))
try:
f = open(Session.filename, 'r')
Session.data = pickle.load(f)
return True
except:
return False
def save():
if Session.filename is None:
Session.filename = abspath(join(FS.sessdir, Session.key + '.session'))
try:
f = open(Session.filename, 'w')
pickle.dump(Session.data, f)
return True
except:
return False
| bsd-3-clause | Python | |
7c1a1ee17b83a39d7dfb37b595090ccb7bc23532 | create default group for ODIN users (if necessary) | crs4/ProMort,crs4/ProMort,lucalianas/ProMort,lucalianas/ProMort,crs4/ProMort,lucalianas/ProMort | promort/odin/migrations/0001_initial.py | promort/odin/migrations/0001_initial.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-01 08:00
from __future__ import unicode_literals
from django.db import migrations
from promort import settings as pms
import logging
logger = logging.getLogger('promort')
def create_odin_group(apps, schema_editor):
logger.info('Creating default group for Odin users (if needed)')
Group = apps.get_model('auth', 'Group')
group_name = pms.DEFAULT_GROUPS['odin_members']['name']
group, created = Group.objects.get_or_create(name=group_name)
logger.info('Group "%s" --- Created %s', group.name, created)
class Migration(migrations.Migration):
dependencies = [
('worklist_manager', '0002_auto_20170328_0652'),
]
operations = [
migrations.RunPython(create_odin_group),
]
| mit | Python | |
62f1eec7e9c88a096855f3e4d513e2c34bca0b7c | remove old profile function | PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature,PnEcrins/GeoNature | backend/geonature/migrations/versions/dde31e76ce45_remove_old_profile_function.py | backend/geonature/migrations/versions/dde31e76ce45_remove_old_profile_function.py | """remove old profile function
Revision ID: dde31e76ce45
Revises: 6f7d5549d49e
Create Date: 2022-01-06 10:23:55.290043
"""
import sqlalchemy as sa
from alembic import op
from geonature.utils.config import config
# revision identifiers, used by Alembic.
revision = 'dde31e76ce45'
down_revision = '6f7d5549d49e'
branch_labels = None
depends_on = None
def upgrade():
op.execute("DROP MATERIALIZED VIEW gn_synthese.vm_min_max_for_taxons")
op.execute("DROP FUNCTION gn_synthese.fct_calculate_min_max_for_taxon(int4)")
def downgrade():
op.get_bind().execute(
sa.sql.text(
"""
CREATE MATERIALIZED VIEW gn_synthese.vm_min_max_for_taxons AS
WITH
s as (
SELECT synt.cd_nom, t.cd_ref, the_geom_local, date_min, date_max, altitude_min, altitude_max
FROM gn_synthese.synthese synt
LEFT JOIN taxonomie.taxref t ON t.cd_nom = synt.cd_nom
WHERE id_nomenclature_valid_status IN('1','2')
)
,loc AS (
SELECT cd_ref,
count(*) AS nbobs,
public.ST_Transform(public.ST_SetSRID(public.box2d(public.ST_extent(s.the_geom_local))::geometry, :local_srid), 4326) AS bbox4326
FROM s
GROUP BY cd_ref
)
,dat AS (
SELECT cd_ref,
min(TO_CHAR(date_min, 'DDD')::int) AS daymin,
max(TO_CHAR(date_max, 'DDD')::int) AS daymax
FROM s
GROUP BY cd_ref
)
,alt AS (
SELECT cd_ref,
min(altitude_min) AS altitudemin,
max(altitude_max) AS altitudemax
FROM s
GROUP BY cd_ref
)
SELECT loc.cd_ref, nbobs, daymin, daymax, altitudemin, altitudemax, bbox4326
FROM loc
LEFT JOIN alt ON alt.cd_ref = loc.cd_ref
LEFT JOIN dat ON dat.cd_ref = loc.cd_ref
ORDER BY loc.cd_ref;
"""),
{"local_srid": config["LOCAL_SRID"]}
)
op.execute(
"""
CREATE FUNCTION gn_synthese.fct_calculate_min_max_for_taxon(mycdnom integer)
RETURNS TABLE(cd_ref integer, nbobs bigint, daymin integer, daymax integer, altitudemin integer, altitudemax integer, bbox4326 geometry)
LANGUAGE plpgsql
AS $function$
BEGIN
--USAGE (getting all fields): SELECT * FROM gn_synthese.fct_calculate_min_max_for_taxon(351);
--USAGE (getting one or more field) : SELECT cd_ref, bbox4326 FROM gn_synthese.fct_calculate_min_max_for_taxon(351)
--See field names and types in TABLE declaration above
--RETURN one row for the supplied cd_ref or cd_nom
--This function can be use in a FROM clause, like a table or a view
RETURN QUERY SELECT * FROM gn_synthese.vm_min_max_for_taxons WHERE cd_ref = taxonomie.find_cdref(mycdnom);
END;
$function$
;
"""
)
| bsd-2-clause | Python | |
bf979d2c32d84c4011a7363489798056d3cc6a58 | add TestBEventsWithFile | TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl | tests/unit/Events/test_BEventsWithFile.py | tests/unit/Events/test_BEventsWithFile.py | from AlphaTwirl.Events import BEvents as Events
from AlphaTwirl.Events import Branch
import unittest
import ROOT
##____________________________________________________________________________||
inputPath = '/Users/sakuma/work/cms/c150130_RA1_data/c150130_01_PHYS14/20150331_SingleMu/TTJets/treeProducerSusyAlphaT/tree.root'
treeName = 'tree'
##____________________________________________________________________________||
@unittest.skip("skip TestBEventsWithFile")
class TestBEventsWithFile(unittest.TestCase):
def test_branch(self):
inputFile = ROOT.TFile.Open(inputPath)
tree = inputFile.Get(treeName)
events = Events(tree)
jet_pt = events.jet_pt
met_pt = events.met_pt
self.assertIsInstance(jet_pt, Branch)
self.assertIsInstance(met_pt, Branch)
self.assertEqual(0, len(jet_pt))
self.assertEqual(1, len(met_pt))
self.assertEqual(0.0, met_pt[0])
tree.GetEntry(0)
self.assertEqual(2, len(jet_pt))
self.assertEqual(1, len(met_pt))
self.assertEqual(124.55626678466797, jet_pt[0])
self.assertEqual(86.90544128417969, jet_pt[1])
self.assertAlmostEqual(43.783382415771484, met_pt[0])
tree.GetEntry(1)
self.assertEqual(3, len(jet_pt))
self.assertEqual(1, len(met_pt))
self.assertEqual(112.48554992675781, jet_pt[0])
self.assertEqual(52.32780075073242, jet_pt[1])
self.assertEqual(48.861289978027344, jet_pt[2])
self.assertAlmostEqual(20.483951568603516, met_pt[0])
##____________________________________________________________________________||
| bsd-3-clause | Python | |
0782e8786272fcd6e3e1a41d31bea253865c468b | Add SolveTimer - print number of iterations and elapsed time to console while running ml.solve() - see docstring for usage | pastas/pastas | pastas/timer.py | pastas/timer.py | try:
from tqdm.auto import tqdm
except ModuleNotFoundError:
raise ModuleNotFoundError("SolveTimer requires 'tqdm' to be installed.")
class SolveTimer(tqdm):
"""Progress indicator for model optimization.
Usage
-----
Print timer and number of iterations in console while running
`ml.solve()`::
>>> with SolveTimer() as t:
ml.solve(callback=t.update)
This prints the following to the console, for example::
Optimization progress: 73it [00:01, 67.68it/s]
Note
----
If the logger is also printing messages to the console the timer will not
be updated quite as nicely.
"""
def __init__(self, *args, **kwargs):
if "total" not in kwargs:
kwargs['total'] = None
if "desc" not in kwargs:
kwargs["desc"] = "Optimization progress"
super(SolveTimer, self).__init__(*args, **kwargs)
def update(self, _, n=1):
displayed = super(SolveTimer, self).update(n)
return displayed
| mit | Python | |
a9a0afa66c94e08513cf4d19725b081cab39d4dc | Make new elk module script | hynok/BEES,cvdileo/graphite,xwa9860/FHR_MOOSE,arfc/moltres,jwpeterson/stork,cpgr/bilby,delcmo/multi_d_gray_rhea,pradchar/pradyawong,vityurkiv/Ox,Kane9806/GOPHER,jessecarterMOOSE/PRARIEDOG,arfc/moltres,gridley/moltres,gnilheb/Armadillo,tophmatthews/buck,yechuda/rimat,harrymccormack/lion,amitjn7042/bear,ragusa/badger_burgers,ragusa/SEM,zhaonat/Mantis,jinghaohe/eagle,hynok/BEES,pradchar/pradyawong,karpeev/xcontact,ragusa/SEM,rtenhove/grizzly,delcmo/swe_explicit,zbhui/Joojak,CARDCFEM/Roshan,delcmo/swe_explicit,lindsayad/moltres,Kane9806/GOPHER,whtblk/littlegrass,amburan/gecko,xuli9302/baymax,xy515258/Xia,delcmo/burger_explicit,lindsayad/moltres,aeslaughter/mamba,WilkAndy/ba_hun,harrymccormack/lion,friedmud/meta,Biyss/Groundhog,zhaonat/Mantis,tophmatthews/buck,lindsayad/dgosprey-fork,lindsayad/zapdos,cvdileo/graphite,ragusa/badger_burgers,markr622/stork,arfc/moltres,dinayuryev/panda,jinghaohe/eagle,friedmud/meta,hynok/BEES,lyfhq/stork,zbhui/Joojak,ragusa/ham,aeslaughter/gardensnake,lvpeng0525/ant,feasytech/cuckoo,hynok/BEES,shannon-lab/zapdos,ragusa/simple_advection_moose,delcmo/TigEr,shoheiogawa/LaplaceYoung,ragusa/simple_advection_moose,CARDCFEM/Roshan,jhaase1/zapdos,djsalo/djsalodeposit,ragusa/Eel_Euler,amitjn7042/bear,friedmud/stork,xikaij/poisson,delcmo/SEVan_bandEd_arMadillo,delcmo/swe_explicit,shortlab/ouroboros,gorindn/leopard,Kane9806/GOPHER,ragusa/testMBP,lvpeng0525/ant,gorindn/leopard,drewshepherd/caribou,soobyes/PupALup,anilkunwar/danphe,lw4992/Roshan,antuxie/coati,gridley/moltres,friedmud/meta,ragusa/SV,shortlab/ouroboros,cticenhour/EELS,EHKong/BLUCHE,delcmo/SEVan_bandEd_arMadillo,shannon-lab/zapdos,shortlab/hognose,nsc-galtenco/storkNsc,lw4992/Roshan,lyfhq/stork,anilkunwar/danphe,penghuicao/ouroboros,friedmud/dog,angushendrick/honeybadger,aladshaw3/dgosprey,friedmud/dog,xuli9302/baymax,cticenhour/EELS,ragusa/simple_advection_moose,harrymccormack/lion,soobyes/PupALup,xikaij/poisson,lyfhq/stork,aladshaw3/dgosprey,aeslaughter/mamba,xwa9860/FHR_MOOSE,lindsayad/zapdos,CARDCFEM/Roshan,cticenhour/EELS,zzyfisherman/PlateHole,cvdileo/DefDiff,coughee/test,gnilheb/Armadillo,lindsayad/streamerPlasma,jwpeterson/stork,apc-llc/ewe,harrymccormack/lion,zhupai1991/Firefly,aeslaughter/mamba,ragusa/rhea_rad_hy,jamiesahote/sahote,lindsayad/streamerPlasma,ragusa/Eel_Euler,AHumphriesMicallef/TestKitteh,cvdileo/DefDiff,lindsayad/dgosprey-fork,rtenhove/grizzly,shortlab/hognose,xuli9302/baymax,adamLange/slug,jhaase1/zapdos,lw4992/Roshan,gorindn/leopard,ragusa/SEM,antuxie/coati,ragusa/rhea_rad_hy,penghuicao/ouroboros,WilkAndy/ba_hun,djsalo/djsalodeposit,jarons/dendragapus,gleicher27/Tardigrade,aeslaughter/hognose,ragusa/rhea_rad_hy,whiplash01/slug,adamLange/slug,zengyf1989/PF-SSA,ragusa/Eel_Euler,WilkAndy/ba_hun,shortlab/mamba,apc-llc/ewe,feasytech/cuckoo,vityurkiv/Ox,anilkunwar/danphe,cvdileo/DefDiff,ragusa/simple_advection_moose,shoheiogawa/LaplaceYoung,djsalo/djsalodeposit,arfc/moltres,antuxie/coati,jarons/dendragapus,soobyes/PupALup,bsorbom/arc,friedmud/stork,ragusa/testMBP,pradchar/pradyawong,rtenhove/grizzly,jamiesahote/sahote,delcmo/TigEr,cpgr/bilby,0dt12/loon,ragusa/ham,aeslaughter/stork,ragusa/SV,xy515258/Xia,cvdileo/DefDiff,coughee/test,BruceWyxv/chrysalis,whiplash01/slug,xikaij/poisson,lyfhq/stork,ragusa/SEM,angushendrick/honeybadger,delcmo/burger_explicit,Biyss/panda,jhaase1/zapdos,coughee/test,ragusa/Eel_Euler,yechuda/rimat,lindsayad/zapdos,delcmo/MySV,zengyf1989/PF-SSA,drewshepherd/caribou,ragusa/SV,aeslaughter/stork,SudiptaBiswas/Crow,delcmo/SEVan_bandEd_arMadillo,BruceWyxv/chrysalis,delcmo/mcci,cpgr/bilby,shortlab/mamba,CARDCFEM/Roshan,jessecarterMOOSE/PRARIEDOG,dinayuryev/panda,lw4992/Roshan,wtp1022/MOOSE,Biyss/panda,lindsayad/dgosprey-fork,lvpeng0525/ant,Ivor23/lele,SudiptaBiswas/Crow,zengyf1989/PF-SSA,ragusa/rhea_rad_hy,penghuicao/ouroboros,favinom/quasiquadratic,cvdileo/graphite,AHumphriesMicallef/TestKitteh,whiplash01/slug,djwalter/sturgeon,zhupai1991/Firefly,adamLange/slug,delcmo/MySV,delcmo/mcci,andybond13/asiago,lindsayad/moltres,njl14/stork,angushendrick/honeybadger,delcmo/mcci,yechuda/rimat,shortlab/ouroboros,BruceWyxv/chrysalis,aeslaughter/stork,jamiesahote/sahote,vityurkiv/Ox,imastorakos/PFC,apc-llc/ewe,delcmo/burger_explicit,aeslaughter/hognose,njl14/stork,djwalter/sturgeon,Ivor23/lele,jessecarterMOOSE/PRARIEDOG,amitjn7042/bear,markr622/stork,vityurkiv/Ox,xy515258/Xia,njl14/stork,ragusa/MySV_Marco_laptop,zzyfisherman/PlateHole,aeslaughter/gardensnake,wtp1022/MOOSE,zhupai1991/Firefly,aeslaughter/gardensnake,favinom/quasiquadratic,chezhia/Croc,zhupai1991/Firefly,amburan/gecko,zhaonat/Mantis,feasytech/cuckoo,amburan/gecko,kribby/FeCr,djsalo/djsalodeposit,ragusa/badger_burgers,Biyss/panda,SudiptaBiswas/Crow,aeslaughter/hognose,delcmo/mcci,Kane9806/GOPHER,delcmo/multi_d_gray_rhea,ragusa/testMBP,gridley/moltres,friedmud/gardensnake,karpeev/xcontact,AHumphriesMicallef/TestKitteh,lindsayad/moltres,zhaonat/Mantis,soobyes/PupALup,bsorbom/arc,ragusa/SV,shortlab/ouroboros,drewshepherd/caribou,jinghaohe/eagle,gnilheb/Armadillo,ragusa/ham,Biyss/Groundhog,xwa9860/FHR_MOOSE,0dt12/loon,bsorbom/arc,zzyfisherman/PlateHole,AHumphriesMicallef/TestKitteh,favinom/quasiquadratic,SudiptaBiswas/Crow,kribby/FeCr,cvdileo/graphite,friedmud/stork,friedmud/gardensnake,friedmud/gardensnake,delcmo/SEVan_bandEd_arMadillo,EHKong/BLUCHE,ragusa/badger_burgers,vityurkiv/Ox,gleicher27/Tardigrade,zbhui/Joojak,apc-llc/ewe,lvpeng0525/ant,Ivor23/lele,delcmo/multi_d_gray_rhea,gridley/moltres,Biyss/panda,jarons/dendragapus,whtblk/littlegrass,friedmud/dog,andybond13/asiago,delcmo/TigEr,gledoux94/beaver,nsc-galtenco/storkNsc,chezhia/Croc,amitjn7042/bear,markr622/stork,karpeev/xcontact,aladshaw3/dgosprey,imastorakos/PFC,dinayuryev/panda,delcmo/TigEr,ragusa/ham,ragusa/MySV_Marco_laptop,kribby/FeCr,wtp1022/MOOSE,shortlab/mamba,WilkAndy/ba_hun,anilkunwar/danphe,vityurkiv/Ox,aeslaughter/mamba,gleicher27/Tardigrade,shannon-lab/zapdos,djwalter/sturgeon,whtblk/littlegrass,andybond13/asiago,penghuicao/ouroboros,wtp1022/MOOSE,delcmo/MySV,lindsayad/streamerPlasma,nsc-galtenco/storkNsc,delcmo/MySV,shortlab/mamba,0dt12/loon,apc-llc/ewe,imastorakos/PFC,gleicher27/Tardigrade,lindsayad/dgosprey-fork,chezhia/Croc,jamiesahote/sahote,delcmo/burger_explicit,shoheiogawa/LaplaceYoung,gledoux94/beaver,bsorbom/arc,Biyss/Groundhog,lindsayad/streamerPlasma,tophmatthews/buck,jwpeterson/stork,tophmatthews/buck,whiplash01/slug,EHKong/BLUCHE,shortlab/hognose,vityurkiv/Ox,delcmo/swe_explicit,ragusa/MySV_Marco_laptop,amburan/gecko,nsc-galtenco/storkNsc,aladshaw3/dgosprey,gledoux94/beaver,adamLange/slug,andybond13/asiago | make_new_elk_module.py | make_new_elk_module.py | #!/usr/bin/env python
# This script is for creating a new herd animal. Just run this script
# from the "stork" directory supplying a new animal name and it should
# create a complete application template built with support for both
# MOOSE and ELK. Enjoy!
import os, sys, string, re, subprocess
from optparse import OptionParser
from shutil import copytree, ignore_patterns
global_ignores = ['.svn', '.git']
global_app_name = ''
def renameFiles(app_path):
pattern = re.compile(r'(stork)(.*)', re.I)
for dirpath, dirnames, filenames in os.walk(app_path):
# Don't traverse into ignored directories
for ignore in global_ignores:
if ignore in dirnames:
dirnames.remove(ignore)
for file in filenames:
match = pattern.match(file)
# Replace 'stork' in the contents
replaceNameInContents(dirpath + '/' + file)
# See if the file needs to be renamed and rename
if match != None:
replace_string = replacementFunction(match)
os.rename(dirpath + '/' + file, dirpath + '/' + replace_string + match.group(2))
def replaceNameInContents(filename):
f = open(filename)
text = f.read()
f.close()
# Replace all instances of the word stork with the right case
pattern = re.compile(r'(stork)', re.I)
text = pattern.sub(replacementFunction, text)
# Retrieve original file attribute to be applied later
mode = os.stat(filename).st_mode
# Now write the file back out
f = open(filename + '~tmp', 'w')
f.write(text)
f.close()
os.chmod(filename + '~tmp', mode)
os.rename(filename + '~tmp', filename)
def replacementFunction(match):
# There are 3 "case" cases
# Case 1: all lower case
if match.group(1) == 'stork':
return global_app_name
# Case 2: all upper case
if match.group(1) == 'STORK':
return string.upper(global_app_name)
# Case 3: First letter is capitalized
if match.group(1) == 'Stork':
name = global_app_name.replace("_", " ")
name = name.title()
name = name.replace(" ", "")
return name
print match.group(0) + "\nBad Case Detected!"
sys.exit(1)
def printUsage():
print './make_new_elk_module.py <module name> <elk dir>'
sys.exit()
def makeElkModule(elk_dir):
copytree('.', elk_dir + "/" + global_app_name, ignore=ignore_patterns('.svn', '.git', 'make_new*'))
if __name__ == '__main__':
parser = OptionParser()
(global_options, args) = parser.parse_args()
# Make sure an animal name was supplied (args[0])
if len(args) != 2:
printUsage()
if not os.path.exists(args[1]):
print "Unable to access ", args[1]
sys.exit()
global_app_name = string.lower(args[0])
renameFiles('.')
makeElkModule(args[1])
print 'Your elk module should be ready!\nYou need to edit the following files to include your new module in elk:\n\t$(ELK_DIR)/elk_modules.mk\n\t$(ELK_DIR)/src/base/ElkApp.C\n'
| lgpl-2.1 | Python | |
0d12fe35e0c7a31987d83737d22bfc9f54e72709 | Add Binary | feigaochn/leetcode | add-binary.py | add-binary.py | # author: Fei Gao
#
# Add Binary
#
# Given two binary strings, return their sum (also a binary string).
# For example,
# a = "11"
# b = "1"
# Return "100".
class Solution:
# @param a, a string
# @param b, a string
# @return a string
def addBinary(self, a, b):
ai = int(a, base=2)
bi = int(b, base=2)
return '{:b}'.format(ai + bi)
def main():
solver = Solution()
print(solver.addBinary('101', '1'))
pass
if __name__ == '__main__':
main()
pass
| mit | Python | |
dd30bed54205eb3639e8af0e2cf879e7cf319701 | add solution for Symmetric Tree | zhyu/leetcode,zhyu/leetcode | src/symmetricTree.py | src/symmetricTree.py | # Definition for a binary tree node
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
# @param root, a tree node
# @return a boolean
def isSymmetric(self, root):
if not root:
return True
return self.check(root.left, root.right)
def check(self, left, right):
if left is None or right is None:
return left is right
return (left.val == right.val and
self.check(left.left, right.right) and
self.check(left.right, right.left))
| mit | Python | |
6ece957e5317a9f54499714f9a7cb9bca221d4e5 | Add a simple script that runs the pipeline for the single specified user | sunil07t/e-mission-server,sunil07t/e-mission-server,shankari/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server,shankari/e-mission-server,e-mission/e-mission-server | bin/debug/intake_single_user.py | bin/debug/intake_single_user.py | import json
import logging
import argparse
import numpy as np
import uuid
import emission.pipeline.intake_stage as epi
import emission.core.wrapper.user as ecwu
if __name__ == '__main__':
np.random.seed(61297777)
parser = argparse.ArgumentParser(prog="intake_single_user")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-e", "--user_email")
group.add_argument("-u", "--user_uuid")
args = parser.parse_args()
if args.user_uuid:
sel_uuid = uuid.UUID(args.user_uuid)
else:
sel_uuid = ecwu.User.fromEmail(args.user_email).uuid
epi.run_intake_pipeline("single", [sel_uuid])
| bsd-3-clause | Python | |
4c5f750801cef0424fd93432b688fb74b079f4c5 | Add migration to backfill recipient counts | tsotetsi/textily-web,reyrodrigues/EU-SMS,pulilab/rapidpro,reyrodrigues/EU-SMS,pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,reyrodrigues/EU-SMS,tsotetsi/textily-web,ewheeler/rapidpro,ewheeler/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,ewheeler/rapidpro,pulilab/rapidpro,ewheeler/rapidpro | temba/msgs/migrations/0037_backfill_recipient_counts.py | temba/msgs/migrations/0037_backfill_recipient_counts.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('msgs', '0036_auto_20151103_1014'),
]
def backfill_recipient_counts(apps, schema):
Broadcast = apps.get_model('msgs', 'Broadcast')
Msg = apps.get_model('msgs', 'Msg')
# get all broadcasts with 0 recipients
for broadcast in Broadcast.objects.filter(recipient_count=0):
# set to # of msgs
broadcast.recipient_count = Msg.objects.filter(broadcast=broadcast).count()
if recipient_count > 0:
broadcast.save()
print "Updated %d to %d recipients" % (broadcast.id, broadcast.recipient_count)
operations = [
migrations.RunPython(backfill_recipient_counts)
]
| agpl-3.0 | Python | |
b44f13bfa1ac8b3c1bd24e528fc7874a06df0121 | Add script that creates a filtered list of required packages | DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python | dev_tools/src/d1_dev/update-requirements-txt.py | dev_tools/src/d1_dev/update-requirements-txt.py | #!/usr/bin/env python
import shutil
import d1_dev.util
import os
import pip._internal.utils.misc
import re
REQUIREMENTS_FILENAME = 'requirements.txt'
# Modules in my dev environment that are not required by the stack
MODULE_FILTER_REGEX_LIST = {
'beautifulsoup',
'black',
'bs4',
'dataone.*',
'ete3',
'Flask',
'logging-tree',
'PyQt.*',
'pyqt5',
'python-magic',
'redbaron',
}
def main():
repo_dir = d1_dev.util.find_repo_root()
req_path = os.path.join(repo_dir, REQUIREMENTS_FILENAME)
req_backup_path = req_path + ".bak"
try:
os.remove(req_backup_path)
except FileNotFoundError:
pass
shutil.move(req_path, req_backup_path)
req_list = sorted(get_reqs())
write_reqs(req_path, req_list)
def get_reqs():
req_list = []
# noinspection PyProtectedMember
for package_dist in pip._internal.utils.misc.get_installed_distributions(local_only=True):
if not is_filtered_package(package_dist.project_name):
req_str = str(package_dist.as_requirement())
req_list.append(req_str)
return req_list
def is_filtered_package(project_name):
for filter_rx in MODULE_FILTER_REGEX_LIST:
if re.match(filter_rx, project_name, re.IGNORECASE):
print('Filtered: {}'.format(project_name, filter_rx))
return True
print('Included: {}'.format(project_name))
return False
def write_reqs(req_path, req_list):
"""
Args:
req_path:
req_list:
"""
with open(req_path, 'w') as f:
f.write('\n'.join(req_list) + "\n")
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
869e0d41a498698b3c785af7c86dc2bc831e0791 | Create three-words.py | Pouf/CodingCompetition,Pouf/CodingCompetition | CheckiO/three-words.py | CheckiO/three-words.py | def checkio(words):
return '111' in ''.join('1' if w.isalpha() else '0' for w in words.split())
| mit | Python | |
f7d95d4df21bc442261723298f9889bd093feb97 | add spaceapi module | guiniol/py3status,Spirotot/py3status,Andrwe/py3status,goto-bus-stop/py3status,docwalter/py3status,ultrabug/py3status,ultrabug/py3status,Shir0kamii/py3status,tobes/py3status,schober-ch/py3status,tobes/py3status,vvoland/py3status,guiniol/py3status,sethwoodworth/py3status,Andrwe/py3status,ultrabug/py3status,valdur55/py3status,alexoneill/py3status,Zopieux/py3status,UmBsublime/py3status,vjousse/py3status,jazmit/py3status,valdur55/py3status,valdur55/py3status,jantuomi/py3status,hburg1234/py3status | py3status/modules/spaceapi.py | py3status/modules/spaceapi.py | # -*- coding: utf-8 -*-
"""
This module shows if your favorite hackerspace is open or not
Last modified: 2015-02-01
Author: @timmszigat
License: WTFPL http://www.wtfpl.net/txt/copying/
"""
from time import time
import datetime
import json
import urllib.request
import codecs
class Py3status:
"""
Configuration Parameters:
- cache_timeout: Set timeout between calls in seconds
- url: URL to SpaceAPI json file of your space
- open_text: text if space is open, strftime parmeters will be translated
- open_color: color if space is open
- closed_text: text if space is closed, strftime parameters will be translated
- closed_color: color if space is closed
"""
cache_timeout = 60
url = 'http://status.chaospott.de/status.json'
open_text = 'open since %H:%M'
open_color = None
closed_text = 'closed since %H:%M'
closed_color = None
def __init__(self):
pass
def check(self, i3s_output_list, i3s_config):
response = {
'name': 'spaceapi',
'cached_until': time() + self.cache_timeout
}
try:
if not self.open_color:
self.open_color = i3s_config['color_good']
if not self.closed_color:
self.closed_color = ''
json_file=urllib.request.urlopen(self.url)
reader = codecs.getreader("utf-8")
data = json.load(reader(json_file))
json_file.close()
if(data['state']['open'] == True):
response['full_text'] = self.open_text
response['short_text'] = '%H:%M'
if self.open_color:
response['color'] = self.open_color
else:
response['full_text'] = self.closed_text
response['short_test'] = ''
if self.closed_color:
response['color'] = self.closed_color
dt = datetime.datetime.fromtimestamp(data['state']['lastchange'])
response['full_text'] = dt.strftime(response['full_text'])
except:
response['full_text'] = '';
return response
if __name__ == "__main__":
"""
Test this module by calling it directly.
"""
from time import sleep
x = Py3status()
while True:
print(x.check([], {}))
sleep(1)
| bsd-3-clause | Python | |
651c44b51a26733dde22e82a80b0668302e5df52 | implement a base class for backends | alfredodeza/merfi | merfi/backends/base.py | merfi/backends/base.py | from merfi import base, util
from tambo import Transport
class BaseBackend(base.BaseCommand):
options = []
parser = None
def parse_args(self):
self.parser = Transport(self.argv, options=self.options)
self.parser.catch_help = self.help()
self.parser.parse_args()
self.path = util.infer_path(self.parser.unkown_commands)
self.check_dependency()
self.sign()
def sign(self):
raise NotImplemented()
| mit | Python | |
7086ce47e4a2b6611596d177cc5adb166b382f48 | Create cryptography.py | CriticalD20/Cryptography,morganmeliment/Cryptography,voidJeff/Cryptography,VinzentM/Cryptography,VinzentM/Cryptography,phstearns/Cryptography,dina-hertog/Cryptography,kezarberger/Cryptography,sarahdunbar/Cryptography,TheBigBlueBlob/Cryptography,HHStudent/Cryptography,sawyerhanlon/Cryptography,HaginCodes/Cryptography,adamglueck/Cryptography,kezarberger/Cryptography,phstearns/Cryptography,CANDYISLIFE/Cryptography,danielwilson2017/Cryptography,ryankynor/Cryptography,DRBarnum/Cryptography,HHS-IntroProgramming/Cryptography,willcampbel/Cryptography,CriticalD20/Cryptography,adamglueck/Cryptography,APikielny/Cryptography,anoushkaalavilli/Cryptography,tesssny/Cryptography,nilskingston/Cryptography,RDanilek/Cryptography,ChubbyPotato/Cryptography,DanielPinigin/Cryptography,DRBarnum/Cryptography,DanielPinigin/Cryptography,sarahdunbar/Cryptography,nilskingston/Cryptography,HHStudent/Cryptography,jasminelou/Cryptography,ryankynor/Cryptography,HaginCodes/Cryptography,samuelpych/Cryptography,morganmeliment/Cryptography,Funjando/Cryptography,eliwoloshin/Cryptography,anoushkaalavilli/Cryptography,samuelpych/Cryptography,TheBigBlueBlob/Cryptography,CANDYISLIFE/Cryptography,SSupattapone/Cryptography,HHS-IntroProgramming/Cryptography,willcampbel/Cryptography,jasminelou/Cryptography,APikielny/Cryptography,SSupattapone/Cryptography,emma-supattapone/Cryptography,Funjando/Cryptography,danielwilson2017/Cryptography,RDanilek/Cryptography,dina-hertog/Cryptography,tesssny/Cryptography,ChubbyPotato/Cryptography,emma-supattapone/Cryptography,eliwoloshin/Cryptography,sawyerhanlon/Cryptography,voidJeff/Cryptography | cryptography.py | cryptography.py | """
| mit | Python | |
2eb5ba178e3bed422a2cb7437362b30df717103e | remove dbcred file from staging interface | fedspendingtransparency/data-act-validator,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend | dataactvalidator/interfaces/validatorStagingInterface.py | dataactvalidator/interfaces/validatorStagingInterface.py | from dataactcore.models.baseInterface import BaseInterface
from dataactcore.config import CONFIG_DB
class ValidatorStagingInterface(BaseInterface):
""" Manages all interaction with the staging database """
dbName = CONFIG_DB['staging_db_name']
Session = None
engine = None
session = None
def __init__(self):
super(ValidatorStagingInterface,self).__init__()
@staticmethod
def getDbName():
""" Return database name"""
return ValidatorStagingInterface.dbName
def dropTable(self,table):
"""
Args:
table: Table to be dropped
Returns:
True if successful
"""
self.runStatement("".join(["DROP TABLE ",table]))
self.session.commit()
def tableExists(self,table):
""" True if table exists, false otherwise """
return self.engine.dialect.has_table(self.engine.connect(),table)
def countRows(self,table):
""" Returns number of rows in the specified table """
if(self.tableExists(table)):
response = (self.runStatement("".join(["SELECT COUNT(*) FROM ",table]))).fetchone()[0]
# Try to prevent blocking
self.session.close()
return response
else:
return 0
@staticmethod
def getTableName(jobId):
""" Get the staging table name based on the job ID """
return "".join(["job",str(jobId)]) | from sqlalchemy.exc import ResourceClosedError
from dataactcore.models.baseInterface import BaseInterface
class ValidatorStagingInterface(BaseInterface):
""" Manages all interaction with the staging database """
dbName = "staging"
credFileName = "dbCred.json"
Session = None
engine = None
session = None
def __init__(self):
self.dbConfigFile = self.getCredFilePath()
super(ValidatorStagingInterface,self).__init__()
@staticmethod
def getDbName():
""" Return database name"""
return ValidatorStagingInterface.dbName
def dropTable(self,table):
"""
Args:
table: Table to be dropped
Returns:
True if successful
"""
self.runStatement("".join(["DROP TABLE ",table]))
self.session.commit()
def tableExists(self,table):
""" True if table exists, false otherwise """
return self.engine.dialect.has_table(self.engine.connect(),table)
def countRows(self,table):
""" Returns number of rows in the specified table """
if(self.tableExists(table)):
response = (self.runStatement("".join(["SELECT COUNT(*) FROM ",table]))).fetchone()[0]
# Try to prevent blocking
self.session.close()
return response
else:
return 0
@staticmethod
def getTableName(jobId):
""" Get the staging table name based on the job ID """
return "".join(["job",str(jobId)]) | cc0-1.0 | Python |
06b1113c74821b144a2e31e55020f2db2fcd44a2 | Add contacts example script. | xtuple/xtuple-python-rest-client-example | contacts.py | contacts.py | #!/usr/bin/python2.4
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple command-line sample that demonstrates xTuple 'Service Account'
OAuth 2.0 REST API scenario.
Lists all the Contacts in the xTuple database. Service accounts are created in
the xTuple Mobile Web Client. See the documentation for more information:
https://github.com/xtuple/xtuple/wiki/OAuth-2.0-Service-Accounts-Scenario
Command Line Usage Run:
$ python contacts.py
####################
TODO:
Set all the OAuth 2.0 client settings and Discovery Document URL below.
####################
"""
__author__ = 'ben@xtuple.com (bendiy)'
import httplib2
import pprint
import sys
from apiclient.discovery import build
from oauth2client.client import SignedJwtAssertionCredentials
from apiclient.model import JsonModel
def main(argv):
# Load the key in PKCS 12 format that you downloaded from the xTuple Mobile
# Web Client OAuth 2.0 Client registration workspace when you created your
# Service account client. Place the file in the same directory as this file.
f = file('privatekey.p12', 'rb')
key = f.read()
f.close()
# Create an httplib2.Http object to handle our HTTP requests and authorize it
# with the Credentials. Note that the first parameter, service_account_name,
# is the Client ID created for the Service Account.
####################
# TODO: Set these parameters to match your OAuth 2.0 Client values.
####################
credentials = SignedJwtAssertionCredentials('example_f8ad6a5f-883b-4d41-ea6a-1971af1919e8', #service_account_name
key, #private_key
'https://your-demo.xtuplecloud.com/your-db-name/auth', #scope
private_key_password='notasecret',
user_agent=None,
token_uri='https://your-demo.xtuplecloud.com/your-db-name/oauth/token',
revoke_uri='https://your-demo.xtuplecloud.com/your-db-name/oauth/revoke-token',
prn='admin'
)
####################
# TODO: On production, do not disable_ssl_certificate_validation. Development only!!!
####################
http = httplib2.Http(disable_ssl_certificate_validation=True)
http = credentials.authorize(http)
# By default, JsonModel is used and it sets alt_param = 'json'. The xTuple API
# does not support an 'alt' query parameter, so we strip it off the model.
# @See: google-api-python-client\apiclient\model.py _build_query()
features = []
model = JsonModel('dataWrapper' in features)
model.alt_param = None
# Fetch the Discovery Document and build a service for the resources.
####################
# TODO: Set the discoveryServiceUrl parameter to match your OAuth 2.0 Client value.
# Optionally, use a difference resource than contact.
####################
service = build("contact", #serviceName
"v1alpha1", #version
http=http,
discoveryServiceUrl="https://your-demo.xtuplecloud.com/your-db-name/discovery/{apiVersion}/apis/{api}/{apiVersion}/rest",
model=model
)
# List all the Contacts.
lists = service.Contact().list().execute(http=http)
pprint.pprint(lists)
if __name__ == '__main__':
main(sys.argv)
| apache-2.0 | Python | |
6f2529d1891b5c256394b9c8aa991b25a029b5f1 | Add a migration to load users and buckets | alphagov/backdrop,alphagov/backdrop,alphagov/backdrop | migrations/004_load_seed_file.py | migrations/004_load_seed_file.py | """
Load initial user and bucket data from seed files.
"""
import logging
import os
import subprocess
import sys
log = logging.getLogger(__name__)
def up(db):
names = db.collection_names()
if "users" in names:
log.info("users collection already created")
return
if "buckets" in names:
log.info("buckets collection already created")
return
invoke = os.path.join(os.path.dirname(sys.executable), "invoke")
subprocess.call([invoke, "load_seed"])
| mit | Python | |
3dbe5ce617d882dc74a1b95e830634dc0d0f800c | Add examples from the ORM tutorial | uwydoc/the-practices,uwydoc/the-practices,uwydoc/the-practices,uwydoc/the-practices,uwydoc/the-practices,uwydoc/the-practices,uwydoc/the-practices | python/sqlalchemy/tutorial.py | python/sqlalchemy/tutorial.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Examples from the ORM tutorial
#
from __future__ import print_function, division
from sqlalchemy import Column, Integer, String, Sequence, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, backref
Base = declarative_base()
class User(Base):
__tablename__ = 'user'
id = Column(Integer, Sequence('user_id_seq'), primary_key=True)
name = Column(String(50))
fullname = Column(String(50))
password = Column(String(12))
def __repr__(self):
return "<User(name='%s', fullname='%s', password='%s')>" % (self.name,
self.fullname, self.password)
class Address(Base):
__tablename__ = 'address'
id = Column(Integer, Sequence('address_id_seq'), primary_key=True)
email_address = Column(String(50))
user_id = Column(Integer, ForeignKey('user.id'))
user = relationship('User', backref=backref('addresses', order_by=id))
def __repr__(self):
return "<Address(email_address='%s')>" % self.email_address
if __name__ == '__main__':
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker
engine = create_engine('sqlite:///tutorial.db')
# Create the tables if not exist
Base.metadata.create_all(engine)
# Create a session, do some insert and query
Session = sessionmaker()
session = Session(bind=engine)
ed = User(name='ed', fullname='Ed Jacson', password='edpass')
session.add(ed)
session.commit()
# Query
for row in session.query(User).all():
print(row)
| mit | Python | |
b9da5732579dce0f25a413cbfe4936b8ac024aa5 | move gzip classes into gzip | kitsuyui/bamboo-crawler,kitsuyui/bamboo-crawler,kitsuyui/bamboo-crawler | bamboo_crawler/gzip/__init__.py | bamboo_crawler/gzip/__init__.py | import gzip
from ..interfaces.deserializer import Deserializer
from ..interfaces.serializer import Serializer
class GzipSerializer(Serializer[bytes, bytes]):
def serialize(self, value: bytes) -> bytes:
return gzip.compress(value)
class GzipDeserializer(Deserializer[bytes, bytes]):
def deserialize(self, value: bytes) -> bytes:
return gzip.decompress(value)
| bsd-3-clause | Python | |
99282d42a3948b9ed45b02df657c344667ec0cf2 | Add a migration for directive_sections -> relationships | AleksNeStu/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,josthkko/ggrc-core,hasanalom/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,edofic/ggrc-core,kr41/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,jmakov/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,edofic/ggrc-core,NejcZupec/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,hyperNURb/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,hasanalom/ggrc-core,NejcZupec/ggrc-core,hasanalom/ggrc-core,kr41/ggrc-core,prasannav7/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,AleksNeStu/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,kr41/ggrc-core,uskudnik/ggrc-core | src/ggrc/migrations/versions/20150521125008_324d461206_migrate_directive_sections_to_.py | src/ggrc/migrations/versions/20150521125008_324d461206_migrate_directive_sections_to_.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: anze@reciprocitylabs.com
# Maintained By: anze@reciprocitylabs.com
"""Migrate directive_sections to relationships
Revision ID: 324d461206
Revises: a2fc29a71f3
Create Date: 2015-05-21 12:50:08.987209
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '324d461206'
down_revision = 'a2fc29a71f3'
def upgrade():
sql = """
REPLACE INTO relationships (
modified_by_id, created_at, updated_at, source_id,
source_type, destination_id, destination_type, context_id
)
SELECT dc.modified_by_id, dc.created_at, dc.updated_at,
dc.section_id as source_id, 'Clause' as source_type,
dc.directive_id as destination_id,
IFNULL(d.kind, "Policy") as destination_type,
dc.context_id
FROM directive_sections as dc JOIN directives as d ON dc.directive_id = d.id;
"""
op.execute(sql)
op.drop_constraint(
'directive_sections_ibfk_2',
'directive_sections',
type_='foreignkey')
op.drop_constraint(
'directive_sections_ibfk_3',
'directive_sections',
type_='foreignkey')
def downgrade():
op.create_foreign_key(
'directive_sections_ibfk_2',
'directive_sections',
'sections',
['section_id'],
['id'])
op.create_foreign_key(
'directive_sections_ibfk_3',
'directive_sections',
'directives',
['directive_id'],
['id'])
| apache-2.0 | Python | |
5b9d9f531e3544f6d3dfe0a2e48dcaaebf132921 | Test case for RPC HTTP handler. | supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer | test/services/appmanager/test_http.py | test/services/appmanager/test_http.py | import time
import requests
from weavelib.messaging import Receiver
from weavelib.rpc import RPCServer, ServerAPI
from weavelib.services import BaseService
from weaveserver.core.services import ServiceManager
from weaveserver.services.appmanager import ApplicationService
AUTH = {
"auth1": {
"type": "SYSTEM",
"appid": "appmgr"
},
"auth2": {
"appid": "appid2",
"package": "p"
}
}
class DummyService(BaseService):
def __init__(self, token):
super(DummyService, self).__init__(token)
self.rpc_server = RPCServer("name", "desc", [
ServerAPI("api1", "desc2", [], self.api1),
], self)
def api1(self):
return "OK"
def on_service_start(self):
self.rpc_server.start()
def on_service_stop(self):
self.rpc_server.stop()
class TestApplicationService(object):
def setup_class(cls):
cls.service_manager = ServiceManager()
cls.service_manager.apps = AUTH
cls.service_manager.start_services(["messaging"])
cls.appmgr = ApplicationService("auth1", {"apps": AUTH})
cls.appmgr.exited.set()
cls.appmgr.on_service_start()
# Wait till it starts.
receiver = Receiver("/_system/root_rpc/request")
while True:
try:
receiver.start()
break
except:
time.sleep(1)
def teardown_class(cls):
cls.service_manager.stop()
cls.appmgr.on_service_stop()
def setup_method(self):
self.dummy_service = DummyService("auth2")
self.dummy_service.service_start()
def teardown_method(self):
self.dummy_service.service_stop()
def test_http_rpc(self):
obj = {
"package_name": "p",
"rpc_name": "name",
"api_name": "api1",
"args": [],
"kwargs": {}
}
url = "http://localhost:5000/api/rpc"
for _ in range(1):
res = requests.post(url, json=obj).json()
assert res == "OK"
| mit | Python | |
892c89c8ae953b84720cc4d617d772f1e777af82 | add @erfannoury's poisson disk sampling code | WangDequan/fast-bird-part-localization,yassersouri/fast-bird-part-localization | src/poisson_disk.py | src/poisson_disk.py | """
@author: Erfan Noury, https://github.com/erfannoury
"""
import numpy as np
from numpy.random import random as rnd
class PoissonDiskSampler(object):
def __init__(self, width, height, radius, k=20):
"""
This class is for sampling points in a 2-D region
such that no two points are closer to each other
than `radius`.
This is code is based on Bostocks implementation (http://bl.ocks.org/mbostock/19168c663618b7f07158) which in turn is based on (https://www.jasondavies.com/poisson-disc/)
Parameters
==========
width: int
width of the 2-D region
height: int
height of the 2-D region
radius: float
minimum distance between two arbitrary sampled points
k: int
maximum number of samples before rejection
"""
self.width = width
self.height = height
self.radius = radius
self.k = k
self.cell_size = self.radius * 1.0 / np.sqrt(2.0)
self.grid_width = int(np.ceil(self.width / self.cell_size))
self.grid_height = int(np.ceil(self.height / self.cell_size))
self.grid = [-1] * (self.grid_height * self.grid_width)
self.queue = []
self.samples = []
def get_sample(self):
"""
Returns an array of sample points sampled in the specified region using Bridson's Poisson-disk sampling algorithm
Returns
=======
samples: list of tuples of two ints
A list containing the coordinates sampled on a 2-d region such that no two samples points have distance less than `radius`.
"""
# initialize with a seed point
self.__sample__(rnd() * self.width, rnd() * self.height)
while len(self.queue) > 0:
idx = int(rnd() * len(self.queue))
p = self.queue[idx]
new_inserted = False
for j in xrange(self.k):
theta = 2 * np.pi * rnd()
# radius <= r <= 2 * radius
r = np.sqrt(3 * rnd() * self.radius**2 + self.radius**2)
x = p[0] + r * np.cos(theta)
y = p[1] + r * np.sin(theta)
if (0 <= x < self.width) and (0 <= y < self.height) and self.__far__(x, y):
self.__sample__(x, y)
new_inserted = True
break
# remove point from active list
if not new_inserted:
self.queue = self.queue[:idx] + self.queue[idx+1:]
self.samples.append(p)
return self.samples
def __far__(self, x, y):
i = int(y / self.cell_size)
j = int(x / self.cell_size)
i0 = np.max([i - 2, 0])
j0 = np.max([j - 2, 0])
i1 = np.min([i + 3, self.grid_height])
j1 = np.min([j + 3, self.grid_width])
for j in xrange(j0, j1):
for i in xrange(i0, i1):
if self.grid[i * self.grid_width + j] != -1:
dx = self.grid[i * self.grid_width + j][0] - x
dy = self.grid[i * self.grid_width + j][1] - y
if (dx**2 + dy**2) < (self.radius**2):
return False
return True
def __sample__(self, x, y):
p = (x, y)
self.queue.append(p)
idx = int(self.grid_width * np.floor(y / self.cell_size) + np.floor(x / self.cell_size))
self.grid[idx] = p
| mit | Python | |
8ba5b29200520d853791943341d41798ff80a248 | Change meta option for Github | lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin | src/repository/migrations/0003_auto_20170524_1503.py | src/repository/migrations/0003_auto_20170524_1503.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-05-24 15:03
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('repository', '0002_auto_20170522_2021'),
]
operations = [
migrations.AlterModelOptions(
name='github',
options={'verbose_name': 'github project', 'verbose_name_plural': 'github projects'},
),
]
| bsd-3-clause | Python | |
811c1ed7324075970f0009d691866d1d47de43a2 | add a setup.py to make this a nice official package | google/mysql-tools,google/mysql-tools | setup.py | setup.py | #!/usr/bin/python2.4
#
# Copyright 2006 Google Inc. All Rights Reserved.
from distutils.core import setup
setup(name="google-mysql-tools",
description="Google MySQL Tools",
url="http://code.google.com/p/google-mysql-tools",
version="0.1",
packages=["gmt"],
scripts=["mypgrep.py", "compact_innodb.py"])
| apache-2.0 | Python | |
193aa3ff7ef4219fd29a0ea40a8c0d2e5467de75 | Add setup.py script | miguelsousa/MutatorMath,moyogo/mutatormath,LettError/MutatorMath,anthrotype/MutatorMath | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name = "MutatorMath",
version = "1.8",
description = "Python for piecewise linear interpolation in multiple dimensions with multiple, arbitrarily placed, masters.",
author = "Erik van Blokland",
author_email = "erik@letterror.com",
url = "https://github.com/LettError/MutatorMath",
license = "BSD 3 Clause",
packages = [
"mutatorMath",
"mutatorMath.objects",
"mutatorMath.ufo",
],
package_dir = {"":"Lib"},
)
| bsd-3-clause | Python | |
21a0948eb1d25e9126e2940cbc7d0496181d6a93 | Add Django version trove classifiers. | grzes/djangae,asendecka/djangae,grzes/djangae,kirberich/djangae,asendecka/djangae,kirberich/djangae,armirusco/djangae,grzes/djangae,armirusco/djangae,potatolondon/djangae,chargrizzle/djangae,potatolondon/djangae,armirusco/djangae,kirberich/djangae,asendecka/djangae,chargrizzle/djangae,chargrizzle/djangae | setup.py | setup.py | import os
from setuptools import setup, find_packages
NAME = 'djangae'
PACKAGES = find_packages()
DESCRIPTION = 'Django integration with Google App Engine'
URL = "https://github.com/potatolondon/djangae"
LONG_DESCRIPTION = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
AUTHOR = 'Potato London Ltd.'
EXTRAS = {
"test": ["webtest"],
}
setup(
name=NAME,
version='0.9.1',
packages=PACKAGES,
# metadata for upload to PyPI
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
keywords=["django", "Google App Engine", "GAE"],
url=URL,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Framework :: Django :: 1.7',
'Framework :: Django :: 1.8',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
# dependencies
extras_require=EXTRAS,
tests_require=EXTRAS['test'],
)
| import os
from setuptools import setup, find_packages
NAME = 'djangae'
PACKAGES = find_packages()
DESCRIPTION = 'Django integration with Google App Engine'
URL = "https://github.com/potatolondon/djangae"
LONG_DESCRIPTION = open(os.path.join(os.path.dirname(__file__), 'README.md')).read()
AUTHOR = 'Potato London Ltd.'
EXTRAS = {
"test": ["webtest"],
}
setup(
name=NAME,
version='0.9.1',
packages=PACKAGES,
# metadata for upload to PyPI
author=AUTHOR,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
keywords=["django", "Google App Engine", "GAE"],
url=URL,
classifiers=[
'Development Status :: 3 - Alpha',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
include_package_data=True,
# dependencies
extras_require=EXTRAS,
tests_require=EXTRAS['test'],
)
| bsd-3-clause | Python |
f1ae87bd9df2c3d70db980ea5e721223b545da5f | Add setup.py | fubaz/djheroku,fubaz/djheroku | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='Djheroku',
version='0.1',
description='Some helper functionality for binding Heroku configuration to Django',
author='Ferrix Hovi',
author_email='ferrix+git@ferrix.fi',
url='http://github.com/ferrix/djheroku/',
packages=['djheroku']
)
| mit | Python | |
6f57426a6a3881816506868f8278e252e5b0e5cd | Add setup.py file. | DanielAndreasen/ObservationTools,iastro-pt/ObservationTools | setup.py | setup.py | try:
from setuptools import setup
except ImportError:
from distutils.core import setup
config = {
'description': 'A set of tools to plan astronomical observations.',
'author': 'iastro-pt',
'url': 'https://github.com/iastro-pt/ObservationTools',
'download_url': 'https://github.com/iastro-pt/ObservationTools',
'author_email': 'daniel.andreasen@astro.up.pt',
'version': '0.1',
'license': 'MIT',
'setup_requires': ['pytest-runner'],
'tests_require': ['pytest', 'hypothesis'],
# "PyAstronomy" when issue fixed.
'install_requires': ["numpy", "astropy", "scipy", "matplotlib",
"astropy", "argparse", "ephem"],
'extras_require': {
'dev': ['check-manifest'],
'tests': ['pytest', 'coverage', 'pytest-cov', 'python-coveralls', 'hypothesis'],
'docs': ['sphinx >= 1.4'],
},
'packages': [],
'package_data': {
# Inlcude the data files:
'': ['data/*']},
'scripts': ["visibility.py", "rv.py"],
'name': 'ObservationTools',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
"classifiers": [
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering :: Astronomy',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Natural Language :: English',
],
# What does your project relate to?
"keywords": ['Astronomy', 'Observation'],
}
setup(**config)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.