commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13 values | lang stringclasses 23 values |
|---|---|---|---|---|---|---|---|---|
bf93b3b4c8965e31e5b9b8ebdbf3f1b1d258e15e | Add a new script to simplify profiling of cvs2svn.py. Document in the script how to use kcachegrind to view the results. | YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,YueLinHo/Subversion,wbond/subversion,wbond/subversion,wbond/subversion,wbond/subversion | tools/cvs2svn/profile-cvs2svn.py | tools/cvs2svn/profile-cvs2svn.py | #!/usr/bin/env python
#
# Use this script to profile cvs2svn.py using Python's hotshot profiler.
#
# The profile data is stored in cvs2svn.hotshot. To view the data using
# hotshot, run the following in python:
#
# import hotshot.stats
# stats = hotshot.stats.load('cvs2svn.hotshot')
# stats.strip_dirs()
# stats.sort_stats('time', 'calls')
# stats.print_stats(20)
#
# It is also possible (and a lot better) to use kcachegrind to view the data.
# To do so, you must first convert the data to the cachegrind format using
# hotshot2cachegrind, which you can download from the following URL:
#
# http://kcachegrind.sourceforge.net/cgi-bin/show.cgi/KcacheGrindContribPython
#
# Convert the data using the following command:
#
# hotshot2cachegrind -o cachegrind.out cvs2svn.hotshot
#
# Depending on the size of the repository, this can take a long time. When
# the conversion is done, simply open cachegrind.out in kcachegrind.
import cvs2svn, hotshot
prof = hotshot.Profile('cvs2svn.hotshot')
prof.runcall(cvs2svn.main)
prof.close()
| apache-2.0 | Python | |
61f06365254c57ced68beb83714164186164d939 | add solutin for LRU Cache | zhyu/leetcode,zhyu/leetcode | src/LRUCache.py | src/LRUCache.py | class LRUCache:
class ListNode:
def __init__(self, val):
self.val = val
self.next = None
self.prev = None
# @param capacity, an integer
def __init__(self, capacity):
self.capacity = capacity
self.size = 0
self.cache = {}
self.head = self.ListNode(-1)
self.tail = self.ListNode(-1)
self.head.next = self.tail
self.tail.prev = self.head
# @return an integer
def get(self, key):
if key in self.cache:
self._refresh(self.cache[key][1])
return self.cache[key][0]
return -1
# @param key, an integer
# @param value, an integer
# @return nothing
def set(self, key, value):
if key not in self.cache:
self._refresh(self.ListNode(key))
self.size += 1
else:
self._refresh(self.cache[key][1])
self.cache[key] = [value, self.head.next]
if self.size > self.capacity:
node = self.tail.prev
del self.cache[node.val]
self._delete_node(node)
self.size = self.capacity
def _refresh(self, node):
if self.head.next == node:
return
if node.next:
self._delete_node(node)
node.prev = self.head
node.next = self.head.next
self.head.next = node
node.next.prev = node
def _delete_node(self, node):
node.prev.next = node.next
node.next.prev = node.prev
| mit | Python | |
3a0fdcf51e1db8abab900a6cc1b4596d0dc4b054 | automate fab process | ianjuma/errand-runner,ianjuma/errand-runner,ianjuma/errand-runner,ianjuma/errand-runner | automata.py | automata.py | import pexpect
import getpass
version = raw_input('Version: ')
secret = getpass.getpass('Enter Passphrase: ')
github_username = 'ianjuma'
clean = pexpect.spawn('fab clean')
clean.expect('Passphrase for private key:')
clean.send(secret)
deploy = pexpect.spawn('fab deploy:%s' %(version,))
deploy.expect('Passphrase for private key:')
deploy.sendline(secret)
deploy.expect("Username for 'https://github.com':")
deploy.sendline(github_username)
deploy.expect("Password for 'https://ianjuma@github.com':")
deploy.sendline(secret)
| apache-2.0 | Python | |
5ea95763c541b30a4b3f9ef5dbfa201b24ae5293 | Create get_gg_list_result.py | ericlzyu/yingxiao,ericlzyu/yingxiao,ericlzyu/yingxiao | get_gg_list_result.py | get_gg_list_result.py | import time
from splinter import Browser
def splinter(url,browser):
#login 126 email websize
browser.visit(url)
#wait web element loading
time.sleep(5)
#fill in account and password
browser.find_by_id('idInput').fill('xxxxxx')
browser.find_by_id('pwdInput').fill('xxxxx')
#click the button of login
browser.find_by_id('loginBtn').click()
time.sleep(8)
#close the window of brower
def getAll(browser):
element_list = browser.find_by_id("ires")
rel = element_list.find_by_tag('h3')
for i in rel:
print i.find_by_tag("a").text
e = i.find_by_tag("a")
print e['href'] #ok!!
data.append(e["href"])
def visitGG(url,text):
browser.visit(url)
time.sleep(2)
browser.fill('q',text)
browser.find_by_name('btnK').click()
if browser.is_text_present(text):
print 'yes, found it'
else:
print 'no. didn t find it'
getAll(browser)
print data
print "next page ========="
#next page
browser.find_by_id('pnnext').click()
time.sleep(2)
if browser.is_text_present(text):
print 'yes, found it'
else:
print 'no. didn t find it'
getAll(browser)
print data
def visitFB(url):
#1. visit
browser.visit(url)
time.sleep(2)
#2. common
if __name__ == '__main__':
data=[]
browser = Browser('chrome')
text = "pet site:facebook.com"
visitGG("http://www.google.com",text)
time.sleep(2)
for i in data:
print "start action"
visitFB(i)
#websize3 ='http://www.126.com'
#splinter(websize3)
#browser.quit()
| mit | Python | |
3095142aa814e51e8fcde4d53633a93a54a7574f | Index main label reference | hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel,hasgeek/funnel | migrations/versions/e679554261b2_main_label_index.py | migrations/versions/e679554261b2_main_label_index.py | """Main label index
Revision ID: e679554261b2
Revises: e2be4ab896d3
Create Date: 2019-05-09 18:55:24.472216
"""
# revision identifiers, used by Alembic.
revision = 'e679554261b2'
down_revision = 'e2be4ab896d3'
from alembic import op
import sqlalchemy as sa # NOQA
def upgrade():
op.create_index(op.f('ix_label_main_label_id'), 'label', ['main_label_id'], unique=False)
def downgrade():
op.drop_index(op.f('ix_label_main_label_id'), table_name='label')
| agpl-3.0 | Python | |
ff51c695b516ea7e16518779c66ebd827b4f6230 | Clean up Encode | OCForks/phantomjs,you21979/phantomjs,Andrey-Pavlov/phantomjs,r3b/phantomjs,iradul/phantomjs,ramanajee/phantomjs,iver333/phantomjs,grevutiu-gabriel/phantomjs,jguyomard/phantomjs,sxhao/phantomjs,tinfoil/phantomjs,djmaze/phantomjs,tmuelle2/phantomjs,woodpecker1/phantomjs,zhengyongbo/phantomjs,avinashkunuje/phantomjs,jefleponot/phantomjs,peakji/phantomjs,aljscott/phantomjs,jguyomard/phantomjs,fentas/phantomjs,pataquets/phantomjs,Vitallium/phantomjs,forzi/phantomjs_stradivari_fork,ChrisAntaki/phantomjs,PeterWangPo/phantomjs,webmull/phantomjs,delighted/phantomjs,pigshell/nhnick,toanalien/phantomjs,joomel1/phantomjs,lseyesl/phantomjs,MaDKaTZe/phantomjs,zhengyongbo/phantomjs,tianzhihen/phantomjs,grevutiu-gabriel/phantomjs,jdar/phantomjs-modified,bukalov/phantomjs,tmuelle2/phantomjs,woodpecker1/phantomjs,zhengyongbo/phantomjs,MeteorAdminz/phantomjs,nin042/phantomjs,danigonza/phantomjs,aljscott/phantomjs,fxtentacle/phantomjs,StevenBlack/phantomjs,toanalien/phantomjs,Vitallium/phantomjs,eugene1g/phantomjs,tianzhihen/phantomjs,Lkhagvadelger/phantomjs,asrie/phantomjs,lseyesl/phantomjs,delighted/phantomjs,jguyomard/phantomjs,iradul/phantomjs,peakji/phantomjs,revolutionaryG/phantomjs,zhengyongbo/phantomjs,dparshin/phantomjs,nin042/phantomjs,viewdy/phantomjs2,revolutionaryG/phantomjs,sxhao/phantomjs,bkrukowski/phantomjs,Dinamize/phantomjs,christoph-buente/phantomjs,Tomtomgo/phantomjs,gskachkov/phantomjs,forzi/phantomjs_stradivari_fork,mark-ignacio/phantomjs,mark-ignacio/phantomjs,klim-iv/phantomjs-qt5,jillesme/phantomjs,jorik041/phantomjs,tianzhihen/phantomjs,wuxianghou/phantomjs,avinashkunuje/phantomjs,delighted/phantomjs,eugene1g/phantomjs,tianzhihen/phantomjs,bukalov/phantomjs,ramanajee/phantomjs,ye11ow/phantomjs,christoph-buente/phantomjs,webmull/phantomjs,Vitallium/phantomjs,smasala/phantomjs,dongritengfei/phantomjs,dhendo/phantomjs,eceglov/phantomjs,smasala/phantomjs,raff/phantomjs,tmuelle2/phantomjs,shinate/phantomjs,AladdinSonni/phantomjs,bmotlaghFLT/FLT_PhantomJS,djmaze/phantomjs,angelman/phantomjs,youprofit/phantomjs,apanda/phantomjs-intercept,chauhanmohit/phantomjs,jkenn99/phantomjs,MaDKaTZe/phantomjs,woodpecker1/phantomjs,liorvh/phantomjs,rishilification/phantomjs,iver333/phantomjs,fentas/phantomjs,neraliu/tpjs,mapbased/phantomjs,bukalov/phantomjs,jjyycchh/phantomjs,JamesMGreene/phantomjs,mattvick/phantomjs,peakji/phantomjs,zackw/phantomjs,attilahorvath/phantomjs,thomasrogers03/phantomjs,zhulin2609/phantomjs,delighted/phantomjs,kyroskoh/phantomjs,Observer-Wu/phantomjs,shinate/phantomjs,Observer-Wu/phantomjs,dparshin/phantomjs,RobertoMalatesta/phantomjs,danigonza/phantomjs,hexid/phantomjs,JamesMGreene/phantomjs,liorvh/phantomjs,zackw/phantomjs,asrie/phantomjs,r3b/phantomjs,sharma1nitish/phantomjs,bkrukowski/phantomjs,mattvick/phantomjs,sharma1nitish/phantomjs,chauhanmohit/phantomjs,dongritengfei/phantomjs,dparshin/phantomjs,asrie/phantomjs,pcarrier-packaging/deb-phantomjs,chylli/phantomjs,AladdinSonni/phantomjs,cirrusone/phantom2,paulfitz/phantomjs,woodpecker1/phantomjs,klim-iv/phantomjs-qt5,jorik041/phantomjs,fentas/phantomjs,ariya/phantomjs,matepeter90/phantomjs,toanalien/phantomjs,cloudflare/phantomjs,Dinamize/phantomjs,eceglov/phantomjs,bprodoehl/phantomjs,zhulin2609/phantomjs,Dinamize/phantomjs,petermat/phantomjs,bkrukowski/phantomjs,eceglov/phantomjs,markhu/phantomjs,jguyomard/phantomjs,jorik041/phantomjs,iradul/phantomjs-clone,sporttech/phantomjs,cloudflare/phantomjs,brandingbrand/phantomjs,zhulin2609/phantomjs,dparshin/phantomjs,gitromand/phantomjs,apanda/phantomjs-intercept,matepeter90/phantomjs,admetricks/phantomjs,djmaze/phantomjs,ixiom/phantomjs,you21979/phantomjs,kyroskoh/phantomjs,tianzhihen/phantomjs,dongritengfei/phantomjs,rishilification/phantomjs,asrie/phantomjs,grevutiu-gabriel/phantomjs,jdar/phantomjs-modified,bjko/phantomjs,ChrisAntaki/phantomjs,hexid/phantomjs,thomasrogers03/phantomjs,jorik041/phantomjs,djmaze/phantomjs,VinceZK/phantomjs,Klaudit/phantomjs,fxtentacle/phantomjs,mapbased/phantomjs,bprodoehl/phantomjs,saisai/phantomjs,Deepakpatle/phantomjs,jjyycchh/phantomjs,VinceZK/phantomjs,bjko/phantomjs,iradul/phantomjs,unb-libraries/phantomjs,admetricks/phantomjs,NickelMedia/phantomjs,chauhanmohit/phantomjs,lattwood/phantomjs,pbrazdil/phantomjs,eceglov/phantomjs,smasala/phantomjs,saisai/phantomjs,ChrisAntaki/phantomjs,nin042/phantomjs,dhendo/phantomjs,farhi-naz/phantomjs,vietch2612/phantomjs,revolutionaryG/phantomjs,Klaudit/phantomjs,jkenn99/phantomjs,NickelMedia/phantomjs,jillesme/phantomjs,revolutionaryG/phantomjs,saisai/phantomjs,ixiom/phantomjs,mattvick/phantomjs,mattvick/phantomjs,pigshell/nhnick,raff/phantomjs,iver333/phantomjs,mattvick/phantomjs,linjeffrey/phantomjs,JingZhou0404/phantomjs,ChrisAntaki/phantomjs,cirrusone/phantom2,ezoic/phantomjs,peakji/phantomjs,cesarmarinhorj/phantomjs,mark-ignacio/phantomjs,klim-iv/phantomjs-qt5,bjko/phantomjs,smasala/phantomjs,cloudflare/phantomjs,wxkdesky/phantomjs,christoph-buente/phantomjs,StevenBlack/phantomjs,Deepakpatle/phantomjs,ChrisAntaki/phantomjs,bmotlaghFLT/FLT_PhantomJS,pcarrier-packaging/deb-phantomjs,admetricks/phantomjs,tinfoil/phantomjs,tmuelle2/phantomjs,attilahorvath/phantomjs,Klaudit/phantomjs,Observer-Wu/phantomjs,AladdinSonni/phantomjs,Lkhagvadelger/phantomjs,chylli/phantomjs,Medium/phantomjs-1,avinashkunuje/phantomjs,klickagent/phantomjs,vietch2612/phantomjs,eceglov/phantomjs,delighted/phantomjs,klickagent/phantomjs,yoki/phantomjs,xsyntrex/phantomjs,sharma1nitish/phantomjs,ChrisAntaki/phantomjs,r3b/phantomjs,houzhenggang/phantomjs,bjko/phantomjs,nin042/phantomjs,zackw/phantomjs,lseyesl/phantomjs,unb-libraries/phantomjs,cirrusone/phantom2,Andrey-Pavlov/phantomjs,wuxianghou/phantomjs,Vitallium/phantomjs,aljscott/phantomjs,ChrisAntaki/phantomjs,Tomtomgo/phantomjs,fentas/phantomjs,houzhenggang/phantomjs,martonw/phantomjs,sharma1nitish/phantomjs,grevutiu-gabriel/phantomjs,petermat/phantomjs,zhulin2609/phantomjs,bettiolo/phantomjs,linjeffrey/phantomjs,mapbased/phantomjs,attilahorvath/phantomjs,aljscott/phantomjs,dongritengfei/phantomjs,iradul/phantomjs-clone,bukalov/phantomjs,JamesMGreene/phantomjs,martonw/phantomjs,bkrukowski/phantomjs,mapbased/phantomjs,ramanajee/phantomjs,Medium/phantomjs-1,pataquets/phantomjs,bukalov/phantomjs,etiennekruger/phantomjs-qt5,OCForks/phantomjs,sporttech/phantomjs,mattvick/phantomjs,bprodoehl/phantomjs,klim-iv/phantomjs-qt5,Klaudit/phantomjs,Deepakpatle/phantomjs,revolutionaryG/phantomjs,raff/phantomjs,bprodoehl/phantomjs,raff/phantomjs,rishilification/phantomjs,asrie/phantomjs,cesarmarinhorj/phantomjs,pataquets/phantomjs,JamesMGreene/phantomjs,ChrisAntaki/phantomjs,chirilo/phantomjs,likaiwalkman/phantomjs,chylli/phantomjs,DocuSignDev/phantomjs,StevenBlack/phantomjs,petermat/phantomjs,fentas/phantomjs,kinwahlai/phantomjs-ghostdriver,jorik041/phantomjs,houzhenggang/phantomjs,Deepakpatle/phantomjs,AladdinSonni/phantomjs,avinashkunuje/phantomjs,brandingbrand/phantomjs,Medium/phantomjs-1,admetricks/phantomjs,Observer-Wu/phantomjs,lseyesl/phantomjs,iver333/phantomjs,cloudflare/phantomjs,NickelMedia/phantomjs,bjko/phantomjs,jefleponot/phantomjs,RobertoMalatesta/phantomjs,zhengyongbo/phantomjs,vegetableman/phantomjs,markhu/phantomjs,vietch2612/phantomjs,cesarmarinhorj/phantomjs,xsyntrex/phantomjs,iradul/phantomjs-clone,apanda/phantomjs-intercept,wxkdesky/phantomjs,dparshin/phantomjs,gitromand/phantomjs,bmotlaghFLT/FLT_PhantomJS,joomel1/phantomjs,VinceZK/phantomjs,yoki/phantomjs,pigshell/nhnick,pataquets/phantomjs,cirrusone/phantom2,forzi/phantomjs_stradivari_fork,jillesme/phantomjs,Medium/phantomjs-1,revolutionaryG/phantomjs,S11001001/phantomjs,paulfitz/phantomjs,martonw/phantomjs,vegetableman/phantomjs,brandingbrand/phantomjs,cloudflare/phantomjs,ChrisAntaki/phantomjs,wxkdesky/phantomjs,gskachkov/phantomjs,gskachkov/phantomjs,chauhanmohit/phantomjs,angelman/phantomjs,joomel1/phantomjs,S11001001/phantomjs,MaDKaTZe/phantomjs,kyroskoh/phantomjs,unb-libraries/phantomjs,jguyomard/phantomjs,pataquets/phantomjs,viewdy/phantomjs2,vietch2612/phantomjs,likaiwalkman/phantomjs,Lkhagvadelger/phantomjs,MaDKaTZe/phantomjs,youprofit/phantomjs,MeteorAdminz/phantomjs,tinfoil/phantomjs,likaiwalkman/phantomjs,jjyycchh/phantomjs,thomasrogers03/phantomjs,asrie/phantomjs,pbrazdil/phantomjs,pbrazdil/phantomjs,skyeckstrom/phantomjs,NickelMedia/phantomjs,pigshell/nhnick,paulfitz/phantomjs,jdar/phantomjs-modified,zackw/phantomjs,chylli/phantomjs,bjko/phantomjs,eugene1g/phantomjs,iradul/phantomjs,PeterWangPo/phantomjs,Observer-Wu/phantomjs,chirilo/phantomjs,NickelMedia/phantomjs,jorik041/phantomjs,martonw/phantomjs,neraliu/tpjs,bukalov/phantomjs,iradul/phantomjs-clone,fxtentacle/phantomjs,jkburges/phantomjs,you21979/phantomjs,chauhanmohit/phantomjs,iver333/phantomjs,apanda/phantomjs-intercept,jkenn99/phantomjs,eceglov/phantomjs,thomasrogers03/phantomjs,gskachkov/phantomjs,Lkhagvadelger/phantomjs,tmuelle2/phantomjs,NickelMedia/phantomjs,neraliu/tainted-phantomjs,lseyesl/phantomjs,Klaudit/phantomjs,hexid/phantomjs,lattwood/phantomjs,Klaudit/phantomjs,Lkhagvadelger/phantomjs,likaiwalkman/phantomjs,pigshell/nhnick,zhulin2609/phantomjs,gskachkov/phantomjs,pbrazdil/phantomjs,vegetableman/phantomjs,RobertoMalatesta/phantomjs,apanda/phantomjs-intercept,sporttech/phantomjs,avinashkunuje/phantomjs,lattwood/phantomjs,saisai/phantomjs,dparshin/phantomjs,tinfoil/phantomjs,delighted/phantomjs,toanalien/phantomjs,astefanutti/phantomjs,wuxianghou/phantomjs,DocuSignDev/phantomjs,asrie/phantomjs,MaDKaTZe/phantomjs,Dinamize/phantomjs,vegetableman/phantomjs,bukalov/phantomjs,attilahorvath/phantomjs,linjeffrey/phantomjs,unb-libraries/phantomjs,vietch2612/phantomjs,Tomtomgo/phantomjs,neraliu/tainted-phantomjs,iradul/phantomjs-clone,djmaze/phantomjs,eugene1g/phantomjs,chylli/phantomjs,neraliu/tpjs,iradul/phantomjs-clone,you21979/phantomjs,nin042/phantomjs,jdar/phantomjs-modified,eceglov/phantomjs,zhulin2609/phantomjs,StevenBlack/phantomjs,ramanajee/phantomjs,attilahorvath/phantomjs,Andrey-Pavlov/phantomjs,brandingbrand/phantomjs,grevutiu-gabriel/phantomjs,jdar/phantomjs-modified,farhi-naz/phantomjs,sxhao/phantomjs,jefleponot/phantomjs,vietch2612/phantomjs,woodpecker1/phantomjs,jillesme/phantomjs,angelman/phantomjs,farhi-naz/phantomjs,nin042/phantomjs,webmull/phantomjs,kyroskoh/phantomjs,angelman/phantomjs,avinashkunuje/phantomjs,dongritengfei/phantomjs,dongritengfei/phantomjs,martonw/phantomjs,martonw/phantomjs,OCForks/phantomjs,bprodoehl/phantomjs,ezoic/phantomjs,Tomtomgo/phantomjs,xsyntrex/phantomjs,Lochlan/phantomjs,matepeter90/phantomjs,webmull/phantomjs,houzhenggang/phantomjs,MeteorAdminz/phantomjs,JamesMGreene/phantomjs,dhendo/phantomjs,jillesme/phantomjs,tinfoil/phantomjs,Observer-Wu/phantomjs,pbrazdil/phantomjs,jkburges/phantomjs,RobertoMalatesta/phantomjs,martonw/phantomjs,christoph-buente/phantomjs,vietch2612/phantomjs,martonw/phantomjs,JamesMGreene/phantomjs,bettiolo/phantomjs,chirilo/phantomjs,delighted/phantomjs,MaDKaTZe/phantomjs,astefanutti/phantomjs,bjko/phantomjs,raff/phantomjs,likaiwalkman/phantomjs,webmull/phantomjs,matepeter90/phantomjs,dongritengfei/phantomjs,webmull/phantomjs,OCForks/phantomjs,PeterWangPo/phantomjs,christoph-buente/phantomjs,tmuelle2/phantomjs,lseyesl/phantomjs,cesarmarinhorj/phantomjs,djmaze/phantomjs,joomel1/phantomjs,MaDKaTZe/phantomjs,StevenBlack/phantomjs,joomel1/phantomjs,StevenBlack/phantomjs,christoph-buente/phantomjs,VinceZK/phantomjs,ramanajee/phantomjs,saisai/phantomjs,bettiolo/phantomjs,jkburges/phantomjs,S11001001/phantomjs,chauhanmohit/phantomjs,jorik041/phantomjs,jdar/phantomjs-modified,klim-iv/phantomjs-qt5,markhu/phantomjs,asrie/phantomjs,RobertoMalatesta/phantomjs,zhengyongbo/phantomjs,nicksay/phantomjs,kinwahlai/phantomjs-ghostdriver,eceglov/phantomjs,yoki/phantomjs,Klaudit/phantomjs,Andrey-Pavlov/phantomjs,klickagent/phantomjs,VinceZK/phantomjs,xsyntrex/phantomjs,gitromand/phantomjs,r3b/phantomjs,ye11ow/phantomjs,dongritengfei/phantomjs,thomasrogers03/phantomjs,pigshell/nhnick,djmaze/phantomjs,mattvick/phantomjs,neraliu/tpjs,ixiom/phantomjs,dhendo/phantomjs,jkenn99/phantomjs,jkenn99/phantomjs,etiennekruger/phantomjs-qt5,admetricks/phantomjs,christoph-buente/phantomjs,PeterWangPo/phantomjs,linjeffrey/phantomjs,zackw/phantomjs,smasala/phantomjs,youprofit/phantomjs,Observer-Wu/phantomjs,shinate/phantomjs,attilahorvath/phantomjs,grevutiu-gabriel/phantomjs,aljscott/phantomjs,angelman/phantomjs,thomasrogers03/phantomjs,VinceZK/phantomjs,apanda/phantomjs-intercept,forzi/phantomjs_stradivari_fork,bmotlaghFLT/FLT_PhantomJS,markhu/phantomjs,christoph-buente/phantomjs,cirrusone/phantom2,NickelMedia/phantomjs,Medium/phantomjs-1,likaiwalkman/phantomjs,jkburges/phantomjs,Tomtomgo/phantomjs,OCForks/phantomjs,wxkdesky/phantomjs,sporttech/phantomjs,brandingbrand/phantomjs,avinashkunuje/phantomjs,unb-libraries/phantomjs,kyroskoh/phantomjs,jkenn99/phantomjs,admetricks/phantomjs,toanalien/phantomjs,jguyomard/phantomjs,JingZhou0404/phantomjs,nicksay/phantomjs,ixiom/phantomjs,bukalov/phantomjs,eugene1g/phantomjs,zackw/phantomjs,tinfoil/phantomjs,danigonza/phantomjs,matepeter90/phantomjs,djmaze/phantomjs,danigonza/phantomjs,lseyesl/phantomjs,pbrazdil/phantomjs,sxhao/phantomjs,nin042/phantomjs,StevenBlack/phantomjs,VinceZK/phantomjs,rishilification/phantomjs,RobertoMalatesta/phantomjs,yoki/phantomjs,ramanajee/phantomjs,markhu/phantomjs,asrie/phantomjs,liorvh/phantomjs,delighted/phantomjs,smasala/phantomjs,woodpecker1/phantomjs,tinfoil/phantomjs,rishilification/phantomjs,klickagent/phantomjs,hexid/phantomjs,astefanutti/phantomjs,ramanajee/phantomjs,jguyomard/phantomjs,DocuSignDev/phantomjs,avinashkunuje/phantomjs,sharma1nitish/phantomjs,attilahorvath/phantomjs,MeteorAdminz/phantomjs,attilahorvath/phantomjs,yoki/phantomjs,bkrukowski/phantomjs,jjyycchh/phantomjs,DocuSignDev/phantomjs,joomel1/phantomjs,youprofit/phantomjs,avinashkunuje/phantomjs,iver333/phantomjs,jkenn99/phantomjs,toanalien/phantomjs,jorik041/phantomjs,mark-ignacio/phantomjs,eugene1g/phantomjs,Klaudit/phantomjs,JamesMGreene/phantomjs,you21979/phantomjs,dparshin/phantomjs,pbrazdil/phantomjs,tianzhihen/phantomjs,ye11ow/phantomjs,sxhao/phantomjs,smasala/phantomjs,ixiom/phantomjs,r3b/phantomjs,mattvick/phantomjs,cesarmarinhorj/phantomjs,jdar/phantomjs-modified,JingZhou0404/phantomjs,bkrukowski/phantomjs,jkenn99/phantomjs,tianzhihen/phantomjs,farhi-naz/phantomjs,ixiom/phantomjs,chauhanmohit/phantomjs,ye11ow/phantomjs,Andrey-Pavlov/phantomjs,pbrazdil/phantomjs,paulfitz/phantomjs,avinashkunuje/phantomjs,OCForks/phantomjs,viewdy/phantomjs2,AladdinSonni/phantomjs,lseyesl/phantomjs,skyeckstrom/phantomjs,ariya/phantomjs,linjeffrey/phantomjs,ye11ow/phantomjs,vegetableman/phantomjs,revolutionaryG/phantomjs,neraliu/tainted-phantomjs,djmaze/phantomjs,kinwahlai/phantomjs-ghostdriver,tinfoil/phantomjs,ramanajee/phantomjs,skyeckstrom/phantomjs,bmotlaghFLT/FLT_PhantomJS,zhengyongbo/phantomjs,pbrazdil/phantomjs,forzi/phantomjs_stradivari_fork,pataquets/phantomjs,jillesme/phantomjs,bprodoehl/phantomjs,chylli/phantomjs,kinwahlai/phantomjs-ghostdriver,sporttech/phantomjs,bkrukowski/phantomjs,RobertoMalatesta/phantomjs,kinwahlai/phantomjs-ghostdriver,JingZhou0404/phantomjs,Andrey-Pavlov/phantomjs,iver333/phantomjs,cesarmarinhorj/phantomjs,chauhanmohit/phantomjs,lattwood/phantomjs,fentas/phantomjs,fxtentacle/phantomjs,bettiolo/phantomjs,chirilo/phantomjs,ixiom/phantomjs,bettiolo/phantomjs,jorik041/phantomjs,sxhao/phantomjs,matepeter90/phantomjs,you21979/phantomjs,sxhao/phantomjs,vegetableman/phantomjs,smasala/phantomjs,Vitallium/phantomjs,sporttech/phantomjs,petermat/phantomjs,attilahorvath/phantomjs,tmuelle2/phantomjs,raff/phantomjs,AladdinSonni/phantomjs,brandingbrand/phantomjs,markhu/phantomjs,neraliu/tainted-phantomjs,paulfitz/phantomjs,PeterWangPo/phantomjs,youprofit/phantomjs,skyeckstrom/phantomjs,neraliu/tainted-phantomjs,gitromand/phantomjs,gitromand/phantomjs,gskachkov/phantomjs,saisai/phantomjs,jorik041/phantomjs,mattvick/phantomjs,jjyycchh/phantomjs,jkburges/phantomjs,NickelMedia/phantomjs,astefanutti/phantomjs,Dinamize/phantomjs,grevutiu-gabriel/phantomjs,petermat/phantomjs,joomel1/phantomjs,Tomtomgo/phantomjs,ariya/phantomjs,astefanutti/phantomjs,fxtentacle/phantomjs,iradul/phantomjs-clone,iver333/phantomjs,bkrukowski/phantomjs,pbrazdil/phantomjs,wuxianghou/phantomjs,StevenBlack/phantomjs,saisai/phantomjs,AladdinSonni/phantomjs,Tomtomgo/phantomjs,peakji/phantomjs,DocuSignDev/phantomjs,Lkhagvadelger/phantomjs,tmuelle2/phantomjs,bkrukowski/phantomjs,Klaudit/phantomjs,Lkhagvadelger/phantomjs,DocuSignDev/phantomjs,MeteorAdminz/phantomjs,sharma1nitish/phantomjs,kyroskoh/phantomjs,neraliu/tpjs,jdar/phantomjs-modified,klickagent/phantomjs,woodpecker1/phantomjs,nin042/phantomjs,S11001001/phantomjs,rishilification/phantomjs,peakji/phantomjs,astefanutti/phantomjs,JingZhou0404/phantomjs,wuxianghou/phantomjs,MaDKaTZe/phantomjs,revolutionaryG/phantomjs,neraliu/tainted-phantomjs,Lochlan/phantomjs,Lkhagvadelger/phantomjs,angelman/phantomjs,OCForks/phantomjs,saisai/phantomjs,chylli/phantomjs,chylli/phantomjs,gskachkov/phantomjs,ariya/phantomjs,likaiwalkman/phantomjs,zackw/phantomjs,kyroskoh/phantomjs,jefleponot/phantomjs,neraliu/tpjs,iradul/phantomjs-clone,danigonza/phantomjs,MeteorAdminz/phantomjs,mark-ignacio/phantomjs,joomel1/phantomjs,sharma1nitish/phantomjs,farhi-naz/phantomjs,toanalien/phantomjs,likaiwalkman/phantomjs,angelman/phantomjs,Lochlan/phantomjs,wxkdesky/phantomjs,klickagent/phantomjs,gitromand/phantomjs,nicksay/phantomjs,OCForks/phantomjs,sxhao/phantomjs,vegetableman/phantomjs,nicksay/phantomjs,bettiolo/phantomjs,lattwood/phantomjs,bettiolo/phantomjs,farhi-naz/phantomjs,sharma1nitish/phantomjs,xsyntrex/phantomjs,pigshell/nhnick,petermat/phantomjs,woodpecker1/phantomjs,liorvh/phantomjs,Observer-Wu/phantomjs,Lochlan/phantomjs,rishilification/phantomjs,r3b/phantomjs,liorvh/phantomjs,DocuSignDev/phantomjs,wuxianghou/phantomjs,bmotlaghFLT/FLT_PhantomJS,fentas/phantomjs,klim-iv/phantomjs-qt5,forzi/phantomjs_stradivari_fork,jkburges/phantomjs,dparshin/phantomjs,neraliu/tpjs,OCForks/phantomjs,sxhao/phantomjs,cloudflare/phantomjs,angelman/phantomjs,mattvick/phantomjs,Lochlan/phantomjs,aljscott/phantomjs,Andrey-Pavlov/phantomjs,ixiom/phantomjs,youprofit/phantomjs,pataquets/phantomjs,brandingbrand/phantomjs,aljscott/phantomjs,NickelMedia/phantomjs,wuxianghou/phantomjs,attilahorvath/phantomjs,chirilo/phantomjs,admetricks/phantomjs,Lochlan/phantomjs,ezoic/phantomjs,tmuelle2/phantomjs,dhendo/phantomjs,lattwood/phantomjs,Tomtomgo/phantomjs,houzhenggang/phantomjs,raff/phantomjs,paulfitz/phantomjs,Klaudit/phantomjs,StevenBlack/phantomjs,woodpecker1/phantomjs,klickagent/phantomjs,gitromand/phantomjs,Dinamize/phantomjs,Deepakpatle/phantomjs,chauhanmohit/phantomjs,ChrisAntaki/phantomjs,yoki/phantomjs,chylli/phantomjs,zhulin2609/phantomjs,admetricks/phantomjs,MaDKaTZe/phantomjs,petermat/phantomjs,bprodoehl/phantomjs,ixiom/phantomjs,klickagent/phantomjs,iradul/phantomjs,cirrusone/phantom2,gskachkov/phantomjs,jkburges/phantomjs,zhengyongbo/phantomjs,kyroskoh/phantomjs,hexid/phantomjs,fxtentacle/phantomjs,jillesme/phantomjs,christoph-buente/phantomjs,pataquets/phantomjs,farhi-naz/phantomjs,delighted/phantomjs,linjeffrey/phantomjs,kyroskoh/phantomjs,peakji/phantomjs,delighted/phantomjs,martonw/phantomjs,mattvick/phantomjs,Deepakpatle/phantomjs,mapbased/phantomjs,vietch2612/phantomjs,paulfitz/phantomjs,sporttech/phantomjs,dongritengfei/phantomjs,you21979/phantomjs,tinfoil/phantomjs,iradul/phantomjs,chirilo/phantomjs,mark-ignacio/phantomjs,dparshin/phantomjs,cesarmarinhorj/phantomjs,asrie/phantomjs,lseyesl/phantomjs,PeterWangPo/phantomjs,saisai/phantomjs,MeteorAdminz/phantomjs,lattwood/phantomjs,yoki/phantomjs,yoki/phantomjs,AladdinSonni/phantomjs,etiennekruger/phantomjs-qt5,StevenBlack/phantomjs,jjyycchh/phantomjs,mapbased/phantomjs,JingZhou0404/phantomjs,klim-iv/phantomjs-qt5,bkrukowski/phantomjs,thomasrogers03/phantomjs,r3b/phantomjs,iver333/phantomjs,revolutionaryG/phantomjs,OCForks/phantomjs,jjyycchh/phantomjs,MeteorAdminz/phantomjs,VinceZK/phantomjs,pigshell/nhnick,pcarrier-packaging/deb-phantomjs,cesarmarinhorj/phantomjs,eugene1g/phantomjs,zhulin2609/phantomjs,fxtentacle/phantomjs,liorvh/phantomjs,Lochlan/phantomjs,etiennekruger/phantomjs-qt5,PeterWangPo/phantomjs,tianzhihen/phantomjs,lattwood/phantomjs,mark-ignacio/phantomjs,chylli/phantomjs,tianzhihen/phantomjs,iradul/phantomjs-clone,xsyntrex/phantomjs,viewdy/phantomjs2,dparshin/phantomjs,fxtentacle/phantomjs,iver333/phantomjs,S11001001/phantomjs,jguyomard/phantomjs,nicksay/phantomjs,Deepakpatle/phantomjs,farhi-naz/phantomjs,brandingbrand/phantomjs,Vitallium/phantomjs,JamesMGreene/phantomjs,fentas/phantomjs,jkburges/phantomjs,linjeffrey/phantomjs,asrie/phantomjs,you21979/phantomjs,youprofit/phantomjs,Lochlan/phantomjs,Dinamize/phantomjs,jkburges/phantomjs,skyeckstrom/phantomjs,Lochlan/phantomjs,cirrusone/phantom2,martonw/phantomjs,jkenn99/phantomjs,MeteorAdminz/phantomjs,Medium/phantomjs-1,matepeter90/phantomjs,markhu/phantomjs,pcarrier-packaging/deb-phantomjs,bukalov/phantomjs,jefleponot/phantomjs,vietch2612/phantomjs,Dinamize/phantomjs,dhendo/phantomjs,sharma1nitish/phantomjs,Lochlan/phantomjs,likaiwalkman/phantomjs,toanalien/phantomjs,astefanutti/phantomjs,eceglov/phantomjs,paulfitz/phantomjs,bjko/phantomjs,mapbased/phantomjs,cesarmarinhorj/phantomjs,chirilo/phantomjs,mark-ignacio/phantomjs,pataquets/phantomjs,jkburges/phantomjs,neraliu/tainted-phantomjs,danigonza/phantomjs,ezoic/phantomjs,paulfitz/phantomjs,smasala/phantomjs,lattwood/phantomjs,RobertoMalatesta/phantomjs,chylli/phantomjs,angelman/phantomjs,liorvh/phantomjs,neraliu/tainted-phantomjs,bjko/phantomjs,farhi-naz/phantomjs,mapbased/phantomjs,linjeffrey/phantomjs,ramanajee/phantomjs,grevutiu-gabriel/phantomjs,wxkdesky/phantomjs,cesarmarinhorj/phantomjs,ramanajee/phantomjs,JingZhou0404/phantomjs,PeterWangPo/phantomjs,unb-libraries/phantomjs,Vitallium/phantomjs,gitromand/phantomjs,woodpecker1/phantomjs,brandingbrand/phantomjs,Tomtomgo/phantomjs,zhulin2609/phantomjs,iradul/phantomjs-clone,revolutionaryG/phantomjs,S11001001/phantomjs,etiennekruger/phantomjs-qt5,zhulin2609/phantomjs,ariya/phantomjs,bettiolo/phantomjs,hexid/phantomjs,skyeckstrom/phantomjs,houzhenggang/phantomjs,liorvh/phantomjs,neraliu/tainted-phantomjs,pcarrier-packaging/deb-phantomjs,shinate/phantomjs,apanda/phantomjs-intercept,pigshell/nhnick,bjko/phantomjs,eceglov/phantomjs,grevutiu-gabriel/phantomjs,kyroskoh/phantomjs,etiennekruger/phantomjs-qt5,saisai/phantomjs,ramanajee/phantomjs,JamesMGreene/phantomjs,MaDKaTZe/phantomjs,markhu/phantomjs,petermat/phantomjs,vegetableman/phantomjs,mapbased/phantomjs,eceglov/phantomjs,viewdy/phantomjs2,ezoic/phantomjs,AladdinSonni/phantomjs,NickelMedia/phantomjs,chauhanmohit/phantomjs,PeterWangPo/phantomjs,klim-iv/phantomjs-qt5,zhulin2609/phantomjs,dongritengfei/phantomjs,bettiolo/phantomjs,bmotlaghFLT/FLT_PhantomJS,ezoic/phantomjs,webmull/phantomjs,gitromand/phantomjs,JingZhou0404/phantomjs,chirilo/phantomjs,neraliu/tpjs,forzi/phantomjs_stradivari_fork,linjeffrey/phantomjs,S11001001/phantomjs,webmull/phantomjs,zhengyongbo/phantomjs,bukalov/phantomjs,admetricks/phantomjs,astefanutti/phantomjs,bprodoehl/phantomjs,toanalien/phantomjs,NickelMedia/phantomjs,etiennekruger/phantomjs-qt5,bprodoehl/phantomjs,lseyesl/phantomjs,ixiom/phantomjs,Deepakpatle/phantomjs,paulfitz/phantomjs,skyeckstrom/phantomjs,zhengyongbo/phantomjs,nin042/phantomjs,gskachkov/phantomjs,pataquets/phantomjs,youprofit/phantomjs,christoph-buente/phantomjs,dparshin/phantomjs,PeterWangPo/phantomjs,youprofit/phantomjs,klickagent/phantomjs,viewdy/phantomjs2,rishilification/phantomjs,AladdinSonni/phantomjs,Medium/phantomjs-1,lattwood/phantomjs,jefleponot/phantomjs,smasala/phantomjs,revolutionaryG/phantomjs,likaiwalkman/phantomjs,bukalov/phantomjs,bmotlaghFLT/FLT_PhantomJS,saisai/phantomjs,wxkdesky/phantomjs,djmaze/phantomjs,S11001001/phantomjs,OCForks/phantomjs,peakji/phantomjs,pbrazdil/phantomjs,skyeckstrom/phantomjs,klickagent/phantomjs,viewdy/phantomjs2,DocuSignDev/phantomjs,Andrey-Pavlov/phantomjs,unb-libraries/phantomjs,webmull/phantomjs,Andrey-Pavlov/phantomjs,jefleponot/phantomjs,mark-ignacio/phantomjs,paulfitz/phantomjs,nicksay/phantomjs,viewdy/phantomjs2,angelman/phantomjs,VinceZK/phantomjs,tianzhihen/phantomjs,Andrey-Pavlov/phantomjs,joomel1/phantomjs,fxtentacle/phantomjs,thomasrogers03/phantomjs,Dinamize/phantomjs,eugene1g/phantomjs,chirilo/phantomjs,vietch2612/phantomjs,bmotlaghFLT/FLT_PhantomJS,Lkhagvadelger/phantomjs,pataquets/phantomjs,eugene1g/phantomjs,petermat/phantomjs,pigshell/nhnick,ezoic/phantomjs,avinashkunuje/phantomjs,mapbased/phantomjs,klim-iv/phantomjs-qt5,pcarrier-packaging/deb-phantomjs,bettiolo/phantomjs,Deepakpatle/phantomjs,sharma1nitish/phantomjs,pigshell/nhnick,iver333/phantomjs,eugene1g/phantomjs,thomasrogers03/phantomjs,attilahorvath/phantomjs,djmaze/phantomjs,zackw/phantomjs,tmuelle2/phantomjs,aljscott/phantomjs,Tomtomgo/phantomjs,matepeter90/phantomjs,vietch2612/phantomjs,wuxianghou/phantomjs,cirrusone/phantom2,jillesme/phantomjs,DocuSignDev/phantomjs,bmotlaghFLT/FLT_PhantomJS,cirrusone/phantom2,woodpecker1/phantomjs,tinfoil/phantomjs,ChrisAntaki/phantomjs,farhi-naz/phantomjs,linjeffrey/phantomjs,wxkdesky/phantomjs,delighted/phantomjs,Lkhagvadelger/phantomjs,farhi-naz/phantomjs,Medium/phantomjs-1,yoki/phantomjs,RobertoMalatesta/phantomjs,jkenn99/phantomjs,fentas/phantomjs,tinfoil/phantomjs,toanalien/phantomjs,ezoic/phantomjs,fentas/phantomjs,grevutiu-gabriel/phantomjs,fxtentacle/phantomjs,Lkhagvadelger/phantomjs,raff/phantomjs,jdar/phantomjs-modified,webmull/phantomjs,Vitallium/phantomjs,hexid/phantomjs,shinate/phantomjs,neraliu/tpjs,wuxianghou/phantomjs,ye11ow/phantomjs,ye11ow/phantomjs,peakji/phantomjs,wxkdesky/phantomjs,ariya/phantomjs,xsyntrex/phantomjs,astefanutti/phantomjs,apanda/phantomjs-intercept,dhendo/phantomjs,ye11ow/phantomjs,Deepakpatle/phantomjs,admetricks/phantomjs,forzi/phantomjs_stradivari_fork,yoki/phantomjs,toanalien/phantomjs,Observer-Wu/phantomjs,VinceZK/phantomjs,nicksay/phantomjs,kinwahlai/phantomjs-ghostdriver,rishilification/phantomjs,liorvh/phantomjs,wuxianghou/phantomjs,nicksay/phantomjs,mark-ignacio/phantomjs,Andrey-Pavlov/phantomjs,kyroskoh/phantomjs,lseyesl/phantomjs,Observer-Wu/phantomjs,JamesMGreene/phantomjs,rishilification/phantomjs,jguyomard/phantomjs,bkrukowski/phantomjs,admetricks/phantomjs,pcarrier-packaging/deb-phantomjs,r3b/phantomjs,liorvh/phantomjs,jkburges/phantomjs,bmotlaghFLT/FLT_PhantomJS,unb-libraries/phantomjs,webmull/phantomjs,jjyycchh/phantomjs,unb-libraries/phantomjs,shinate/phantomjs,sxhao/phantomjs,wxkdesky/phantomjs,dongritengfei/phantomjs,jdar/phantomjs-modified,Tomtomgo/phantomjs,AladdinSonni/phantomjs,jillesme/phantomjs,klickagent/phantomjs,kinwahlai/phantomjs-ghostdriver,sporttech/phantomjs,thomasrogers03/phantomjs,jillesme/phantomjs,neraliu/tainted-phantomjs,jkenn99/phantomjs,Vitallium/phantomjs,Observer-Wu/phantomjs,youprofit/phantomjs,StevenBlack/phantomjs,hexid/phantomjs,iradul/phantomjs,jorik041/phantomjs,wxkdesky/phantomjs,gitromand/phantomjs,wuxianghou/phantomjs,apanda/phantomjs-intercept,shinate/phantomjs,r3b/phantomjs,danigonza/phantomjs,skyeckstrom/phantomjs,houzhenggang/phantomjs,joomel1/phantomjs,zackw/phantomjs,chirilo/phantomjs,tmuelle2/phantomjs,sharma1nitish/phantomjs,JingZhou0404/phantomjs,vegetableman/phantomjs,chauhanmohit/phantomjs,ixiom/phantomjs,nin042/phantomjs,bettiolo/phantomjs,Lochlan/phantomjs,you21979/phantomjs,ezoic/phantomjs,jjyycchh/phantomjs,liorvh/phantomjs,zhengyongbo/phantomjs,likaiwalkman/phantomjs,gitromand/phantomjs,shinate/phantomjs,shinate/phantomjs,chirilo/phantomjs,apanda/phantomjs-intercept,VinceZK/phantomjs,lattwood/phantomjs,rishilification/phantomjs,MaDKaTZe/phantomjs,iradul/phantomjs,JingZhou0404/phantomjs,nin042/phantomjs,smasala/phantomjs,you21979/phantomjs,bjko/phantomjs,jjyycchh/phantomjs,christoph-buente/phantomjs,viewdy/phantomjs2,Klaudit/phantomjs,youprofit/phantomjs,nicksay/phantomjs,Medium/phantomjs-1,matepeter90/phantomjs,RobertoMalatesta/phantomjs,dhendo/phantomjs,fentas/phantomjs,jefleponot/phantomjs,raff/phantomjs,mark-ignacio/phantomjs,mapbased/phantomjs,angelman/phantomjs,jillesme/phantomjs,yoki/phantomjs,petermat/phantomjs,houzhenggang/phantomjs,xsyntrex/phantomjs,sxhao/phantomjs,shinate/phantomjs,JingZhou0404/phantomjs,joomel1/phantomjs,fxtentacle/phantomjs,cloudflare/phantomjs,iradul/phantomjs-clone,jjyycchh/phantomjs,danigonza/phantomjs,aljscott/phantomjs,markhu/phantomjs,linjeffrey/phantomjs,aljscott/phantomjs,houzhenggang/phantomjs,petermat/phantomjs,RobertoMalatesta/phantomjs,pcarrier-packaging/deb-phantomjs,grevutiu-gabriel/phantomjs,cloudflare/phantomjs,JamesMGreene/phantomjs,cesarmarinhorj/phantomjs,jefleponot/phantomjs,viewdy/phantomjs2,thomasrogers03/phantomjs,you21979/phantomjs,viewdy/phantomjs2,PeterWangPo/phantomjs,Deepakpatle/phantomjs,xsyntrex/phantomjs,bprodoehl/phantomjs,dhendo/phantomjs,bprodoehl/phantomjs,apanda/phantomjs-intercept,iradul/phantomjs,neraliu/tpjs,jdar/phantomjs-modified,aljscott/phantomjs,shinate/phantomjs,martonw/phantomjs | python/pyphantomjs/encoding.py | python/pyphantomjs/encoding.py | '''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <roejames12@hotmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import codecs
class Encode(object):
def __init__(self, encoding, default):
# check that encoding is valid
try:
codecs.lookup(encoding)
self.encoding = encoding
except LookupError:
# fall back to default encoding
self.encoding = default
@property
def name(self):
return codecs.lookup(self.encoding).name
| '''
This file is part of the PyPhantomJS project.
Copyright (C) 2011 James Roe <roejames12@hotmail.com>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import codecs
class Encode(object):
def __init__(self, encoding, default):
# check that encoding is valid
try:
codecs.lookup(encoding)
self.encoding = encoding
self._encoding = encoding.lower()
except LookupError:
# fall back to default encoding
self.encoding = default
self._encoding = default.lower()
@property
def name(self):
return codecs.lookup(self.encoding).name
| bsd-3-clause | Python |
815d758f74e01bc7a460e211ffb9cb81fedb9726 | add 0002 | Yrthgze/prueba-sourcetree2,Yrthgze/prueba-sourcetree2,Show-Me-the-Code/python,Jaccorot/python,starlightme/python,Yrthgze/prueba-sourcetree2,haiyangd/python-show-me-the-code-,luoxufeiyan/python,Yrthgze/prueba-sourcetree2,JiYouMCC/python,luoxufeiyan/python,Show-Me-the-Code/python,JiYouMCC/python,renzongxian/Show-Me-the-Code,renzongxian/Show-Me-the-Code,Show-Me-the-Code/python,Jaccorot/python,Yrthgze/prueba-sourcetree2,renzongxian/Show-Me-the-Code,renzongxian/Show-Me-the-Code,JiYouMCC/python,starlightme/python,haiyangd/python-show-me-the-code-,Yrthgze/prueba-sourcetree2,Jaccorot/python,starlightme/python,JiYouMCC/python,renzongxian/Show-Me-the-Code,Show-Me-the-Code/python,Jaccorot/python,luoxufeiyan/python,Show-Me-the-Code/python,JiYouMCC/python,luoxufeiyan/python,haiyangd/python-show-me-the-code-,starlightme/python,haiyangd/python-show-me-the-code-,haiyangd/python-show-me-the-code-,Jaccorot/python,luoxufeiyan/python,Show-Me-the-Code/python,starlightme/python | Jaccorot/0002/0002.py | Jaccorot/0002/0002.py | #!/usr/local/bin/python
#coding=utf-8
#第 0002 题:将 0001 题生成的 200 个激活码(或者优惠券)保存到 MySQL 关系型数据库中。
import uuid
import MySQLdb
def create_code(num, length):
#生成”num“个激活码,每个激活码含有”length“位
result = []
while True:
uuid_id = uuid.uuid1()
temp = str(uuid_id).replace('-', '')[:length]
if temp not in result:
result.append(temp)
if len(result) == num:
break
return result
def save_to_mysql(num_list):
conn = MySQLdb.connect(host='localhost', user='root', passwd='aaaa', port=3306)
cur = conn.cursor()
sql_create_database = 'create database if not exists activecode_db'
cur.execute(sql_create_database)
conn.select_db("activecode_db")
sql_create_table = 'create table if not exists active_codes(active_code char(32))'
cur.execute(sql_create_table)
cur.executemany('insert into active_codes values(%s)', num_list)
conn.commit()
cur.close()
conn.close()
code_num = create_code(200, 20)
#print code_num
save_to_mysql(code_num)
| mit | Python | |
c3e8a9a60410ca4494038ba9f3a774b960a8a29e | Create quiz3.py | jeimynoriega/uip-prog3 | Laboratorios/quiz3.py | Laboratorios/quiz3.py |
segundos = 0
while chance < 6:
mints_seg = int (("ingrese el tiempo en segundos:"))
chance +=1
if mints_seg / 60
segundos =60 time_seg%60
print (segundos)
| mit | Python | |
71a6d0a032896f4ef2e9a4cda541d142f2c48171 | Add unittests for environment handler. | atmtools/typhon,atmtools/typhon | typhon/tests/test_environment.py | typhon/tests/test_environment.py | # -*- coding: utf-8 -*-
"""Testing the environment/configuration handler.
"""
import os
from copy import copy
import pytest
from typhon import environment
class TestEnvironment:
"""Testing the environment handler."""
def setup_method(self):
"""Run all test methods with an empty environment."""
self.env = copy(os.environ)
os.environ = {}
def teardown_method(self):
"""Restore old environment."""
os.environ = self.env
def test_get_environment_variables(self):
"""Test if environment variables are considered."""
os.environ['TYPHON_ENV_TEST'] = 'TEST_VALUE'
assert environment.environ['TYPHON_ENV_TEST'] == 'TEST_VALUE'
def test_set_environment_variables(self):
"""Test if environment variables are updated."""
environment.environ['TYPHON_ENV_TEST'] = 'TEST_VALUE'
assert os.environ['TYPHON_ENV_TEST'] == 'TEST_VALUE'
def test_undefined_variable(self):
"""Test behavior for undefined variables."""
with pytest.raises(KeyError):
environment.environ['TYPHON_ENV_TEST']
| mit | Python | |
d84a0b0d50fb4d01b2a2354d5317afd181f1053c | Add Random Forest Regression in Python | a-holm/MachinelearningAlgorithms,a-holm/MachinelearningAlgorithms | Regression/RandomForestRegression/regularRandomForestRegression.py | Regression/RandomForestRegression/regularRandomForestRegression.py | # -*- coding: utf-8 -*-
"""Random Forest Regression for machine learning.
Random forest algorithm is a supervised classification algorithm. As the name
suggest, this algorithm creates the forest with a number of decision trees.
In general, the more trees in the forest the more robust the forest looks like.
In the same way in the random forest classifier, the higher the number of trees
in the forest gives the high accuracy results.
Example:
$ python regularRandomForestRegression.py
Todo:
*
"""
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.ensemble import RandomForestRegressor
# from sklearn.preprocessing import StandardScaler
# from sklearn.model_selection import train_test_split
# Importing the dataset
dataset = pd.read_csv('Position_Salaries.csv')
features = dataset.iloc[:, 1:2].values
labels = dataset.iloc[:, 2].values
# Splitting the Dataset into a Training set and a Test set
"""feature_train, feature_test, label_train, label_test = train_test_split(
features, labels, test_size=0.2)
"""
# Feature scaling, normalize scale is important. Especially on algorithms
# involving euclidian distance. Two main feature scaling formulas are:
# Standardisation: x_stand = (x-mean(x))/(standard_deviation(x))
# Normalisation: x_norm = (x-min(x))/(max(x)-min(x))
"""sc_feature = StandardScaler()
feature_train = sc_feature.fit_transform(feature_train)
feature_test = sc_feature.transform(feature_test)
sc_labels = StandardScaler()
labels_train = sc_labels.fit_transform(labels_train)
labels_test = sc_labels.transform(labels_test)
"""
# Fit the Random Forest Regression to the dataset
regressor = RandomForestRegressor(n_estimators=310, random_state=0)
regressor.fit(features, labels)
# Predict new result with the Random Forest Regression
y_pred = regressor.predict(6.5)
# Visualising the regression results with smoother curve
x_grid = np.arange(min(features), max(features), 0.01)
x_grid = x_grid.reshape((len(x_grid), 1))
plt.scatter(features, labels, color='r')
plt.plot(x_grid, regressor.predict(x_grid), color='b')
plt.title('Truth or Bluff (Random Forest Regression)')
plt.xlabel('Position level')
plt.ylabel('Salary')
plt.show()
| mit | Python | |
8bdab0460cf280a63538e8c56650a90109cda283 | add PermMissingElem.py - working | mickeyshaughnessy/Codility-examples | PermMissinElem.py | PermMissinElem.py | def solution(A):
euler = (len(A) + 1) * (len(A) + 2) / 2
return euler - sum(A)
| mit | Python | |
8f60b540e44fd13787c11303d81f570861c74bcf | make M5_PATH a real search path | vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5,vovojh/gem5,pombredanne/http-repo.gem5.org-gem5-,vovojh/gem5 | configs/common/SysPaths.py | configs/common/SysPaths.py | # Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
import os, sys
from os.path import isdir, join as joinpath
from os import environ as env
config_path = os.path.dirname(os.path.abspath(__file__))
config_root = os.path.dirname(config_path)
def searchpath(path, file):
for p in path:
f = joinpath(p, file)
if os.path.exists(f):
return f
raise IOError, "Can't find file '%s' on path." % file
def disk(file):
system()
return searchpath(disk.path, file)
def binary(file):
system()
return searchpath(binary.path, file)
def script(file):
system()
return searchpath(script.path, file)
def system():
if not system.path:
try:
path = env['M5_PATH'].split(':')
except KeyError:
path = [ '/dist/m5/system', '/n/poolfs/z/dist/m5/system' ]
# filter out non-existent directories
system.path = filter(os.path.isdir, path)
if not system.path:
raise IOError, "Can't find a path to system files."
if not binary.path:
binary.path = [joinpath(p, 'binaries') for p in system.path]
if not disk.path:
disk.path = [joinpath(p, 'disks') for p in system.path]
if not script.path:
script.path = [joinpath(config_root, 'boot')]
system.path = None
binary.path = None
disk.path = None
script.path = None
| # Copyright (c) 2006 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Ali Saidi
import os, sys
from os.path import isdir, join as joinpath
from os import environ as env
config_path = os.path.dirname(os.path.abspath(__file__))
config_root = os.path.dirname(config_path)
def disk(file):
system()
return joinpath(disk.dir, file)
def binary(file):
system()
return joinpath(binary.dir, file)
def script(file):
system()
return joinpath(script.dir, file)
def system():
if not system.dir:
try:
path = env['M5_PATH'].split(':')
except KeyError:
path = [ '/dist/m5/system', '/n/poolfs/z/dist/m5/system' ]
for system.dir in path:
if os.path.isdir(system.dir):
break
else:
raise ImportError, "Can't find a path to system files."
if not binary.dir:
binary.dir = joinpath(system.dir, 'binaries')
if not disk.dir:
disk.dir = joinpath(system.dir, 'disks')
if not script.dir:
script.dir = joinpath(config_root, 'boot')
system.dir = None
binary.dir = None
disk.dir = None
script.dir = None
| bsd-3-clause | Python |
32d12ae035d1c8cebd3a163f9e35c538628e5bc7 | Add test_message.py | pvital/patchew,pvital/patchew,pvital/patchew,pvital/patchew | tests/test_message.py | tests/test_message.py | #!/usr/bin/env python3
#
# Copyright 2016 Red Hat, Inc.
#
# Authors:
# Fam Zheng <famz@redhat.com>
#
# This work is licensed under the MIT License. Please see the LICENSE file or
# http://opensource.org/licenses/MIT.
import sys
import os
import time
import datetime
from patchewtest import PatchewTestCase, main
class ProjectTest(PatchewTestCase):
def setUp(self):
self.create_superuser()
def test_0_second(self):
from api.models import Message
message = Message()
message.date = datetime.datetime.utcnow()
age = message.get_age()
self.assertEqual(age, "0 second")
def test_now(self):
from api.models import Message
message = Message()
dt = datetime.datetime.fromtimestamp(time.time() + 100)
message.date = dt
age = message.get_age()
self.assertEqual(age, "now")
def test_1_day(self):
from api.models import Message
message = Message()
dt = datetime.datetime.fromtimestamp(time.time() - 3600 * 25)
message.date = dt
age = message.get_age()
self.assertEqual(age, "1 day")
if __name__ == '__main__':
main()
| mit | Python | |
a965c542e8a2ea4bb74e522eae34161d8a6c3efa | Add minimal product test | ooz/epages-rest-python,ooz/epages-rest-python | tests/test_product.py | tests/test_product.py | # -*- coding: utf-8 -*-
import unittest
import os
from context import epages
class TestProduct(unittest.TestCase):
client = None
product_service = None
product_id = None
@classmethod
def setUpClass(cls):
host = os.environ['EPAGES_HOST']
shop = os.environ['EPAGES_SHOP']
token = os.environ['EPAGES_TOKEN']
TestProduct.client = epages.HTTPClient(host, shop, token)
TestProduct.product_service = epages.ProductService(TestProduct.client)
def setUp(self):
payload = {
"productNumber": "1337",
"name": "epages rest API test product",
"shortDescription": "Awesome product",
"description": "This is a brand new product",
"manufacturer": "Awesome Products Company",
"price": 13.37,
}
params = {
"locale": "en_GB",
"currency": "EUR",
}
try:
response = TestProduct.client.post(u"/products", params=params, json=payload)
TestProduct.product_id = response["productId"]
except epages.RESTError, error:
print(unicode(error))
def test_shop(self):
pass
def tearDown(self):
try:
if TestProduct.product_id is not None:
status_code = TestProduct.client.delete(u"/products/" + TestProduct.product_id)
self.assertEquals(status_code, 204, "DELETE on product should yield 204!")
except epages.RESTError, error:
print(unicode(error))
@classmethod
def tearDownClass(cls):
pass
if __name__ == '__main__':
unittest.main()
| mit | Python | |
cf8ff340597d29431eaed8265a67205a1b021ee7 | add host_evacuate task | unitedstack/rock,unitedstack/rock | rock/tasks/host_evacuate.py | rock/tasks/host_evacuate.py | from flow_utils import BaseTask
from actions import NovaAction
from server_evacuate import ServerEvacuateTask
import logging
class HostEvacuateTask(BaseTask,NovaAction):
def execute(self, host):
n_client = self._get_client()
evacuated_host = host
evacuable_servers = n_client.servers.list(
search_opts={'host':evacuated_host,
'all_tenants':1})
evacuated_servers = list()
for server in evacuable_servers:
logging.debug("Processing %s" % server)
if hasattr(server,'id'):
response = ServerEvacuateTask().execute(server.id,True)
if response['accepted']:
logging.info("Evacuated %s from %s: %s" %
(response["uuid"], evacuated_host, response["reason"]))
evacuated_servers.append(server)
else:
logging.error("Evacuation of %s on %s failed: %s" %
(response["uuid"], evacuated_host, response["reason"]))
time.sleep(2)
else:
logging.error("Could not evacuate instance: %s" % server.to_dict())
| apache-2.0 | Python | |
ceb3c0535f2701d595d440552d60da876d7cd0b8 | Move some functions from 'model.utils' to 'core.xrf_utils' | NSLS-II-HXN/PyXRF,NSLS-II-HXN/PyXRF,NSLS-II/PyXRF | pyxrf/core/xrf_utils.py | pyxrf/core/xrf_utils.py | import xraylib
def parse_compound_formula(compound_formula):
r"""
Parses the chemical formula of a compound and returns the dictionary,
which contains element name, atomic number, number of atoms and mass fraction
in the compound.
Parameters
----------
compound_formula: str
chemical formula of the compound in the form ``FeO2``, ``CO2`` or ``Fe``.
Element names must start with capital letter.
Returns
-------
dictionary of dictionaries, data on each element in the compound: key -
sybolic element name, value - a dictionary that contains ``AtomicNumber``,
``nAtoms`` and ``massFraction`` of the element. The elements are sorted
in the order of growing atomic number.
Raises
------
RuntimeError is raised if compound formula cannot be parsed
"""
xraylib.SetErrorMessages(0) # This is supposed to stop XRayLib from printing
# internal error messages, but it doesn't work
try:
compound_data = xraylib.CompoundParser(compound_formula)
except SystemError:
msg = f"Invalid chemical formula '{compound_formula}' is passed, parsing failed"
raise RuntimeError(msg)
# Now create more manageable structure
compound_dict = {}
for e_an, e_mf, e_na in zip(compound_data["Elements"],
compound_data["massFractions"],
compound_data["nAtoms"]):
e_name = xraylib.AtomicNumberToSymbol(e_an)
compound_dict[e_name] = {"AtomicNumber": e_an,
"nAtoms": e_na,
"massFraction": e_mf}
return compound_dict
def split_compound_mass(compound_formula, compound_mass):
r"""
Computes mass of each element in the compound given total mass of the compound
Parameters
----------
compound_formula: str
chemical formula of the compound in the form ``FeO2``, ``CO2`` or ``Fe``.
Element names must start with capital letter.
compound_mass: float
total mass of the compound
Returns
-------
dictionary: key - symbolic element name, value - mass of the element
Raises
------
RuntimeError is raised if compound formula cannot be parsed
"""
compound_dict = parse_compound_formula(compound_formula)
element_dict = {}
for el_name, el_info in compound_dict.items():
element_dict[el_name] = el_info["massFraction"] * compound_mass
return element_dict
| bsd-3-clause | Python | |
2a6ec396512c435413f6e3848d1448af839fa9a6 | Add unittests for FindQuery | winguru/graphite-api,winguru/graphite-api | tests/test_storage.py | tests/test_storage.py | import time
from graphite_api.storage import FindQuery
from . import TestCase
class StorageTestCase(TestCase):
def test_find_query(self):
end = int(time.time())
start = end - 3600
query = FindQuery('collectd', None, None)
self.assertEqual(repr(query), '<FindQuery: collectd from * until *>')
query = FindQuery('collectd', start, None)
self.assertEqual(repr(query), '<FindQuery: collectd from %s until *>'
% time.ctime(start))
query = FindQuery('collectd', None, end)
self.assertEqual(repr(query), '<FindQuery: collectd from * until %s>'
% time.ctime(end))
| apache-2.0 | Python | |
e4396938425bc27fc730d580a6cd4ee6e3fd09e9 | Remove v1.0 and v1.1 API from version info. | vivekanand1101/neutron,vbannai/neutron,JianyuWang/neutron,yanheven/neutron,takeshineshiro/neutron,oeeagle/quantum,aristanetworks/neutron,armando-migliaccio/neutron,mattt416/neutron,suneeth51/neutron,NeCTAR-RC/neutron,ykaneko/neutron,pnavarro/neutron,aristanetworks/arista-ovs-quantum,ykaneko/quantum,yamahata/tacker,glove747/liberty-neutron,leeseuljeong/leeseulstack_neutron,kaiweifan/vse-lbaas-plugin-poc,adelina-t/neutron,Stavitsky/neutron,CiscoSystems/neutron,paninetworks/neutron,MaximNevrov/neutron,citrix-openstack-build/neutron,liqin75/vse-vpnaas-plugin,cernops/neutron,aristanetworks/arista-ovs-quantum,waltBB/neutron_read,FreescaleSemiconductor/quantum,igor-toga/local-snat,rossella/neutron,oeeagle/quantum,openstack/neutron,chitr/neutron,Stavitsky/neutron,silenci/neutron,mmnelemane/neutron,ntt-sic/neutron,neoareslinux/neutron,CiscoSystems/vespa,yamahata/neutron,kaiweifan/vse-lbaas-plugin-poc,liqin75/vse-vpnaas-plugin,ykaneko/quantum,bgxavier/neutron,silenci/neutron,ykaneko/neutron,SmartInfrastructures/neutron,kaiweifan/vse-lbaas-plugin-poc,vijayendrabvs/ssl-neutron,leeseuljeong/leeseulstack_neutron,watonyweng/neutron,sasukeh/neutron,bigswitch/neutron,gopal1cloud/neutron,vbannai/neutron,Metaswitch/calico-neutron,Comcast/neutron,shahbazn/neutron,sajuptpm/neutron-ipam,ntt-sic/neutron,rdo-management/neutron,ykaneko/quantum,armando-migliaccio/neutron,gkotton/neutron,netscaler/neutron,psiwczak/quantum,kaiweifan/neutron,FreescaleSemiconductor/quantum,alexandrucoman/vbox-neutron-agent,barnsnake351/neutron,skyddv/neutron,sajuptpm/neutron-ipam,wenhuizhang/neutron,yuewko/neutron,Brocade-OpenSource/OpenStack-DNRM-Neutron,netscaler/neutron,kaiweifan/vse-lbaas-plugin-poc,rdo-management/neutron,cisco-openstack/neutron,leeseulstack/openstack,Juniper/contrail-dev-neutron,CiscoSystems/neutron,aristanetworks/arista-ovs-quantum,jumpojoy/neutron,noironetworks/neutron,wolverineav/neutron,beagles/neutron_hacking,FreescaleSemiconductor/quantum,apporc/neutron,javaos74/neutron,yanheven/neutron,openstack/neutron,eayunstack/neutron,eonpatapon/neutron,rickerc/neutron_audit,gkotton/neutron,armando-migliaccio/neutron,leeseuljeong/leeseulstack_neutron,watonyweng/neutron,paninetworks/neutron,ntt-sic/neutron,tpaszkowski/quantum,tpaszkowski/quantum,swdream/neutron,Comcast/neutron,apporc/neutron,waltBB/neutron_read,dhanunjaya/neutron,kaiweifan/neutron,aristanetworks/arista-ovs-quantum,mandeepdhami/neutron,tpaszkowski/quantum,jacknjzhou/neutron,asgard-lab/neutron,yuewko/neutron,projectcalico/calico-neutron,CiscoSystems/quantum,jerryz1982/neutron,CiscoSystems/vespa,zhhf/charging,JianyuWang/neutron,yamahata/tacker,cloudbase/neutron,jerryz1982/neutron,vijayendrabvs/hap,glove747/liberty-neutron,rossella/neutron,barnsnake351/neutron,adelina-t/neutron,rickerc/neutron_audit,Comcast/neutron,CiscoSystems/vespa,gopal1cloud/neutron,wolverineav/neutron,yamt/neutron,beagles/neutron_hacking,blueboxgroup/neutron,bgxavier/neutron,vivekanand1101/neutron,shahbazn/neutron,dhanunjaya/neutron,psiwczak/quantum,gkotton/neutron,Juniper/neutron,psiwczak/quantum,cloudbase/neutron-virtualbox,vijayendrabvs/ssl-neutron,antonioUnina/neutron,huntxu/neutron,sebrandon1/neutron,yamt/neutron,dims/neutron,JioCloud/neutron,liqin75/vse-vpnaas-plugin,takeshineshiro/neutron,alexandrucoman/vbox-neutron-agent,yamt/neutron,bigswitch/neutron,MaximNevrov/neutron,rossella/neutron,ykaneko/quantum,klmitch/neutron,citrix-openstack-build/neutron,magic0704/neutron,sebrandon1/neutron,armando-migliaccio/neutron,vveerava/Openstack,aristanetworks/neutron,kaiweifan/neutron,vveerava/Openstack,huntxu/neutron,yamahata/neutron,mmnelemane/neutron,rickerc/neutron_audit,openstack/neutron,NeCTAR-RC/neutron,JioCloud/neutron,blueboxgroup/neutron,yamt/neutron,redhat-openstack/neutron,vbannai/neutron,cisco-openstack/neutron,tpaszkowski/quantum,SamYaple/neutron,miyakz1192/neutron,klmitch/neutron,vijayendrabvs/hap,leeseulstack/openstack,beagles/neutron_hacking,cloudbase/neutron-virtualbox,redhat-openstack/neutron,magic0704/neutron,virtualopensystems/neutron,Metaswitch/calico-neutron,Juniper/neutron,miyakz1192/neutron,CiscoSystems/quantum,netscaler/neutron,igor-toga/local-snat,skyddv/neutron,SamYaple/neutron,jacknjzhou/neutron,noironetworks/neutron,mahak/neutron,vijayendrabvs/ssl-neutron,vveerava/Openstack,CiscoSystems/quantum,rossella/neutron,CiscoSystems/neutron,neoareslinux/neutron,Brocade-OpenSource/OpenStack-DNRM-Neutron,liqin75/vse-vpnaas-plugin,zhhf/charging,eayunstack/neutron,SmartInfrastructures/neutron,blueboxgroup/neutron,chitr/neutron,asgard-lab/neutron,mandeepdhami/neutron,FreescaleSemiconductor/quantum,cernops/neutron,Juniper/contrail-dev-neutron,yamahata/neutron,sasukeh/neutron,mahak/neutron,vijayendrabvs/hap,infobloxopen/neutron,javaos74/neutron,virtualopensystems/neutron,projectcalico/calico-neutron,virtualopensystems/neutron,Brocade-OpenSource/OpenStack-DNRM-Neutron,citrix-openstack-build/neutron,mattt416/neutron,sajuptpm/neutron-ipam,yamahata/tacker,swdream/neutron,dims/neutron,infobloxopen/neutron,leeseulstack/openstack,ykaneko/neutron,wenhuizhang/neutron,suneeth51/neutron,mahak/neutron,psiwczak/quantum,pnavarro/neutron,Juniper/neutron,antonioUnina/neutron,zhhf/charging,cloudbase/neutron,Juniper/contrail-dev-neutron,jumpojoy/neutron,eonpatapon/neutron | quantum/api/versions.py | quantum/api/versions.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Citrix Systems.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import webob.dec
from quantum.api.views import versions as versions_view
from quantum import wsgi
LOG = logging.getLogger(__name__)
class Versions(object):
@classmethod
def factory(cls, global_config, **local_config):
return cls()
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Respond to a request for all Quantum API versions."""
version_objs = [
{
"id": "v2.0",
"status": "CURRENT",
},
]
if req.path != '/':
return webob.exc.HTTPNotFound()
builder = versions_view.get_view_builder(req)
versions = [builder.build(version) for version in version_objs]
response = dict(versions=versions)
metadata = {
"application/xml": {
"attributes": {
"version": ["status", "id"],
"link": ["rel", "href"],
}
}
}
content_type = req.best_match_content_type()
body = (wsgi.Serializer(metadata=metadata).
serialize(response, content_type))
response = webob.Response()
response.content_type = content_type
response.body = body
return response
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 Citrix Systems.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import webob.dec
from quantum.api.views import versions as versions_view
from quantum import wsgi
LOG = logging.getLogger(__name__)
class Versions(object):
@classmethod
def factory(cls, global_config, **local_config):
return cls()
@webob.dec.wsgify(RequestClass=wsgi.Request)
def __call__(self, req):
"""Respond to a request for all Quantum API versions."""
version_objs = [
{
"id": "v1.0",
"status": "DEPRECATED",
},
{
"id": "v1.1",
"status": "CURRENT",
},
{
"id": "v2.0",
"status": "PROPOSED",
},
]
if req.path != '/':
return webob.exc.HTTPNotFound()
builder = versions_view.get_view_builder(req)
versions = [builder.build(version) for version in version_objs]
response = dict(versions=versions)
metadata = {
"application/xml": {
"attributes": {
"version": ["status", "id"],
"link": ["rel", "href"],
}
}
}
content_type = req.best_match_content_type()
body = (wsgi.Serializer(metadata=metadata).
serialize(response, content_type))
response = webob.Response()
response.content_type = content_type
response.body = body
return response
| apache-2.0 | Python |
5db291b8a745f8dc640e7cdc7a274535abcc63af | Create rPiEinkQR.py | bhive01/rPiEinkQR | rPiEinkQR.py | rPiEinkQR.py | import os
from PIL import Image
from epyper.displayCOGProcess import Display
from epyper.displayController import DisplayController
# code to create QR code of good size for eink screen
# qrencode -o qrcode.png -s 7 -l L -v 1 -m 1 "TestThree003"
QRname = "qrencode -o qrcode.png -s 7 -l L -v 1 -m 1 \"" + "TestThree04" + "\""
os.system(QRname)
imgName = "qrcode.png"
im = Image.open(imgName)
convertname = "convert " + imgName + " -background white -gravity center -extent 264x176 "+ imgName
os.system(convertname)
im = Image.open(imgName)
#create DisplayController instance specifying display type as an argument
display = DisplayController(Display.EPD_TYPE_270)
#display it!
display.displayImg(im)
| unlicense | Python | |
dff76b6518b1de1be56def7469180d841a9e6121 | Create __init__.py | emeric254/gala-stri-website,emeric254/gala-stri-website,emeric254/gala-stri-website | Tools/__init__.py | Tools/__init__.py | # -*- coding: utf-8 -*-
| mit | Python | |
a152c7c48baa0f1c82e7d84bebbee674eb4f2761 | Add command to queue expired tiles | tilezen/tilequeue,mapzen/tilequeue | tilequeue/commands.py | tilequeue/commands.py | from tilequeue.queues import make_sqs_queue
from tilequeue.tile import explode_with_parents
from tilequeue.tile import parse_expired_coord_string
import argparse
import os
def add_aws_cred_options(arg_parser):
arg_parser.add_argument('--aws_access_key_id')
arg_parser.add_argument('--aws_secret_access_key')
return arg_parser
def enqueue_arg_parser():
parser = argparse.ArgumentParser()
parser = add_aws_cred_options(parser)
parser.add_argument('--queue',
required=True,
help='Name of aws sqs queue, should already exist.',
)
parser.add_argument('--expired-tiles-file',
required=True,
help='Path to file containing list of expired tiles. Should be one per line, <zoom>/<column>/<row>',
)
return parser
def assert_aws_config(args):
if (args.aws_access_key_id is not None or
args.aws_secret_access_key is not None):
# assert that if either is specified, both are specified
assert (args.aws_access_key_id is not None and
args.aws_secret_access_key is not None), 'Must specify both aws key and secret'
else:
assert 'AWS_ACCESS_KEY_ID' in os.environ, 'Missing AWS_ACCESS_KEY_ID config'
assert 'AWS_SECRET_ACCESS_KEY' in os.environ, 'Missing AWS_SECRET_ACCESS_KEY config'
def enqueue_process_main():
parser = enqueue_arg_parser()
args = parser.parse_args()
assert_aws_config(args)
queue = make_sqs_queue(
args.queue, args.aws_access_key_id, args.aws_secret_access_key)
expired_tiles = []
with open(args.expired_tiles_file) as f:
for line in f:
line = line.strip()
if not line:
continue
coord = parse_expired_coord_string(line)
if coord is None:
print 'Could not parse coordinate from line: ' % line
continue
expired_tiles.append(coord)
print 'Number of expired tiles: %d' % len(expired_tiles)
exploded_coords = explode_with_parents(expired_tiles)
print 'Number of total expired tiles with all parents: %d' % len(exploded_coords)
print 'Queuing ... '
# sort in any way?
# zoom level strategy?
# only enqueue work for zooms > 10 if in metro extract area?
# exploded_coords is a set, but enqueue_batch expects a list for slicing
exploded_coords = list(exploded_coords)
queue.enqueue_batch(list(exploded_coords))
print 'Queuing ... Done'
if __name__ == '__main__':
enqueue_process_main()
| mit | Python | |
707781ac58318af002cc1e75d8c31839d4e66e77 | add module to support search result export | cvast/arches,cvast/arches,cvast/arches,archesproject/arches,archesproject/arches,cvast/arches,archesproject/arches,archesproject/arches | arches/app/utils/geos_to_pyshp.py | arches/app/utils/geos_to_pyshp.py | from django.contrib.gis.geos import MultiPoint
from django.contrib.gis.geos import MultiLineString
from django.contrib.gis.geos import MultiPolygon
def convert_geom(geos_geom):
if geos_geom.geom_type == 'Point':
multi_geom = MultiPoint(geos_geom)
shp_geom = [[c for c in multi_geom.coords]]
if geos_geom.geom_type == 'LineString':
multi_geom = MultiLineString(geos_geom)
shp_geom = [c for c in multi_geom.coords]
if geos_geom.geom_type == 'Polygon':
multi_geom = MultiPolygon(geos_geom)
shp_geom = [c[0] for c in multi_geom.coords]
if geos_geom.geom_type == 'MultiPoint':
shp_geom = [[c for c in geos_geom.coords]]
if geos_geom.geom_type == 'MultiLineString':
shp_geom = [c for c in geos_geom.coords]
if geos_geom.geom_type == 'MultiPolygon':
shp_geom = [c[0] for c in geos_geom.coords]
return shp_geom | agpl-3.0 | Python | |
3e0903ba2f74d5f73241d1ffc5056f2a77c709e0 | Add a simple test for SetupPrometheusEndpointOnPortRange | obytes/django-prometheus,korfuri/django-prometheus,obytes/django-prometheus,korfuri/django-prometheus | tests/test_exports.py | tests/test_exports.py | #!/usr/bin/env python
from django_prometheus.exports import SetupPrometheusEndpointOnPortRange
import unittest
class ExportTest(unittest.TestCase):
def testPortRange(self):
port_range = [8000, 8001]
SetupPrometheusEndpointOnPortRange(port_range)
SetupPrometheusEndpointOnPortRange(port_range)
if __name__ == 'main':
unittest.main()
| apache-2.0 | Python | |
a4ce015943da37335114aa8b384f2ee7371f6446 | Test in app-factory. | kennethreitz/flask-sslify | tests/test_factory.py | tests/test_factory.py | from flask import Flask
from flask_sslify import SSLify
from pytest import fixture
class AppFactoryContext(object):
def __init__(self):
self.sslify = SSLify()
self.app = None
self.appctx = None
def __enter__(self):
self.app = self.create_app()
self.appctx = self.app.app_context()
self.appctx.push()
return self.appctx
def __exit__(self, exc_type, exc_value, exc_tb):
self.appctx.pop()
self.app = None
self.appctx = None
def create_app(self):
app = Flask(__name__)
app.config['DEBUG'] = False
app.config['TESTING'] = False
app.config['SERVER_NAME'] = 'example.com'
app.config['SSLIFY_PERMANENT'] = True
self.sslify.init_app(app)
app.add_url_rule('/', 'home', self.view_home)
return app
def view_home(self):
return 'home'
@fixture
def app_factory():
context = AppFactoryContext()
with context:
yield context
def test_config(app_factory):
assert app_factory.sslify.hsts_include_subdomains is False
assert app_factory.sslify.permanent is True
assert app_factory.sslify.skip_list is None
def test_redirection(app_factory):
client = app_factory.app.test_client()
r = client.get('/')
assert r.status_code == 301
assert r.headers['Location'] == 'https://example.com/'
def test_hsts_header(app_factory):
client = app_factory.app.test_client()
r = client.get('/', base_url='https://example.com')
assert r.status_code == 200
assert r.data == 'home'
assert r.headers['Strict-Transport-Security'] == 'max-age=31536000'
| bsd-2-clause | Python | |
a866b7e2de7e76e8bfb3b1feb22d7692afa5111d | Add test exposing stale promise job store cache (connected to #817) | BD2KGenomics/slugflow,BD2KGenomics/slugflow | src/toil/test/src/promisesTest.py | src/toil/test/src/promisesTest.py | # Copyright (C) 2015 UCSC Computational Genomics Lab
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from toil.job import Job
from toil.test import ToilTest
class CachedUnpicklingJobStoreTest(ToilTest):
"""
https://github.com/BD2KGenomics/toil/issues/817
"""
def test(self):
"""
Runs two identical Toil workflows with different job store paths
"""
for _ in range(2):
options = Job.Runner.getDefaultOptions(self._getTestJobStorePath())
options.logLevel = "INFO"
root = Job.wrapJobFn(parent)
value = Job.Runner.startToil(root, options)
def parent(job):
return job.addChildFn(child).rv()
def child():
return 1
| apache-2.0 | Python | |
e99807f81dea6bac82f373a210af0c4f26b61334 | test - Test for exception on syntax error | DinoTools/python-overpy,DinoTools/python-overpy | tests/test_request.py | tests/test_request.py | import pytest
import overpy
class TestQuery(object):
def test_syntax_error(self):
with pytest.raises(overpy.exception.OverpassBadRequest):
api = overpy.Overpass()
# Missing ; after way(1)
api.query((
"way(1)"
"out body;"
)) | mit | Python | |
69c5015a1a9dc3233530d691d20befa529f7c880 | Create lookupAndStoreTweets.py utility. | MichaelCurrin/twitterverse,MichaelCurrin/twitterverse | app/utils/insert/lookupAndStoreTweets.py | app/utils/insert/lookupAndStoreTweets.py | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Lookup and Store Tweets utility.
"""
import argparse
import os
import sys
# Allow imports to be done when executing this file directly.
appDir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.path.pardir, os.path.pardir))
sys.path.insert(0, appDir)
from lib import tweets
from lib.twitter import auth
def main():
"""
Command-line interface to lookup tweet GUIDs and store them in the db.
"""
parser = argparse.ArgumentParser(description="Lookup and Store Tweets"
" utility.")
parser.add_argument('tweetGuids',
nargs='+',
help="List of one or more tweet GUIDS to lookup and"
" store in the db, separated by spaces. "
" Profiles are also stored so that tweets can be"
" linked to them.")
args = parser.parse_args()
APIConn = auth.getAppOnlyConnection()
tweets.lookupTweetGuids(APIConn, args.tweetGuids)
if __name__ == '__main__':
main()
| mit | Python | |
bb2644fc14dd92cf54c6a22da6fe3a66f89535e6 | Create VNSautoRotator.py | Robbie1977/NRRDtools,Robbie1977/NRRDtools | VNSautoRotator.py | VNSautoRotator.py | import numpy as np
import sys, os
import nrrd
if (len(sys.argv) < 2):
print 'Error: missing arguments!'
print 'e.g. python VNSautoRotator.py imageIn.nrrd [ImageOut.nrrd]'
print 'rotate RPI to LPS orientation for CMTK (as it doesn't like RPI)'
else:
print 'Processing %s...'% (str(sys.argv[1]))
data1, header1 = nrrd.read(str(sys.argv[1]))
print header1
header1['space'] = 'left-posterior-superior'
header1.pop("space dimension", None)
print header1
if (np.sum(np.sum(data1,(0,1))[-100:]) < np.sum(np.sum(data1,(0,1))[:100])):
print 'RPI orientation detected so rotating to LPS...'
data2 = np.flip(data1, (0, 2))
data1 = data2
print 'saving...'
if (len(sys.argv) == 3):
nrrd.write(str(sys.argv[2]), data1, header1)
print 'saved to ' + str(sys.argv[2])
else:
nrrd.write(str(sys.argv[1]).replace('.nrrd','_LPS.nrrd'), data1, header1)
print 'saved to ' + str(sys.argv[1]).replace('.nrrd','_LPS.nrrd')
| mit | Python | |
16b73476daedaf1b111e900c0db947dcdab1c9a6 | Add zits crawler | datagutten/comics,jodal/comics,klette/comics,klette/comics,klette/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,jodal/comics,datagutten/comics | comics/crawler/crawlers/zits.py | comics/crawler/crawlers/zits.py | from comics.crawler.utils.lxmlparser import LxmlParser
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Zits'
language = 'en'
url = 'http://www.arcamax.com/zits'
start_date = '1997-07-01'
history_capable_days = 14
schedule = 'Mo,Tu,We,Tu,Fr,Sa,Su'
time_zone = -5
rights = 'Jerry Scott and Jim Borgman'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.parse_feed('http://www.arcamax.com/zits/channelfeed')
for entry in self.feed.entries:
if entry.title.endswith(self.pub_date.strftime('%-1m/%-1d/%Y')):
self.web_url = entry.link
break
if not self.web_url:
return
page = LxmlParser(self.web_url)
self.url = page.src('p.m0 img')
| agpl-3.0 | Python | |
9b0d6239bf73dce4cc981f13dd16d3c5f46b40b3 | Create dominick.py | mduckles/CodeClub | dominick.py | dominick.py | mit | Python | ||
5ffdaf778157d112c26b96020408f80ec3820e02 | Create __init__.py | thegreathippo/crispy | crispy/actions/__init__.py | crispy/actions/__init__.py | from crispy.actions.core import *
| mit | Python | |
fb9915a481e3161325eb5200db2232e6e34b2acc | Add support for Jawbone | foauth/foauth.org,foauth/foauth.org,foauth/foauth.org | services/jawbone.py | services/jawbone.py | from oauthlib.common import add_params_to_uri
import foauth.providers
class Jawbone(foauth.providers.OAuth2):
# General info about the provider
provider_url = 'https://jawbone.com/'
docs_url = 'https://jawbone.com/up/developer/endpoints'
category = 'Fitness'
# URLs to interact with the API
authorize_url = 'https://jawbone.com/auth/oauth2/auth'
access_token_url = 'https://jawbone.com/auth/oauth2/token'
api_domain = 'jawbone.com'
available_permissions = [
(None, 'Read your name and profile picture'),
('extended_read', 'Read your age, gender, weight, and height'),
('location_read', "Read the places you've visited"),
('friends_read', 'Read your list of friends'),
('mood_read', 'Read your mood'),
('mood_write', 'Write to your mood'),
('move_read', 'Read your moves and workouts'),
('move_write', 'Write to your movies and create a workout'),
('sleep_read', 'Read your sleep data'),
('sleep_write', 'Write to your sleep data'),
('meal_read', 'Read your meals'),
('meal_write', 'Write to your meals'),
('weight_read', 'Read your body metrics'),
('weight_write', 'Write to your body metrics'),
('cardiac_read', 'Read your heart data'),
('cardiac_write', 'Write your heart data'),
('generic_event_read', 'Read all other types of events'),
('generic_event_write', 'Write to all other types of events'),
]
def get_authorize_params(self, redirect_uri, scopes):
# Always request at least user information
scopes.append('basic_read')
return super(Jawbone, self).get_authorize_params(redirect_uri, scopes)
def get_user_id(self, key):
r = self.api(key, self.api_domain, u'/nudge/api/v.1.1/users/@me')
return r.json()[u'data'][u'xid']
| bsd-3-clause | Python | |
4f50891c1a7d918010dbcecd640bb4b83f7bd2a3 | ADD taobao login | yueyoum/social-oauth,bopo/social-oauth | socialoauth/sites/taobao.py | socialoauth/sites/taobao.py | # -*- coding: utf-8 -*-
from socialoauth.sites.base import OAuth2
class TaoBao(OAuth2):
AUTHORIZE_URL = 'https://oauth.taobao.com/authorize'
ACCESS_TOKEN_URL = 'https://oauth.taobao.com/token'
TAOBAO_API_URL = 'https://eco.taobao.com/router/rest'
def build_api_url(self, url):
return self.TAOBAO_API_URL
def build_api_data(self, **kwargs):
data = {
'access_token': self.access_token,
'v': 2.0,
'format':'json'
}
data.update(kwargs)
return data
def parse_token_response(self, res):
self.uid = res['taobao_user_id']
self.access_token = res['access_token']
self.expires_in = res['expires_in']
self.refresh_token = res['refresh_token']
res = self.api_call_get(method='taobao.user.buyer.get',
fields='nick,avatar')
user = res['user_buyer_get_response']['user']
self.name = user['nick']
self.avatar = user['avatar']
self.avatar_large = "" | mit | Python | |
20eb83e4e8e0391c9efaca7f30a80220f9a14e9a | Add codelists management tools | markbrough/maedi-projects,markbrough/maedi-projects,markbrough/maedi-projects | maediprojects/query/codelists.py | maediprojects/query/codelists.py | from maediprojects import db, models
import datetime
def create_code(data):
codelistcode = models.CodelistCode()
for attr, val in data.items():
setattr(codelistcode, attr, val)
db.session.add(codelistcode)
db.session.commit()
return codelistcode
def update_attr(data):
codelistcode = models.CodelistCode.query.filter_by(
code = data['code'],
codelist_code = data["codelist_code"]
).first()
setattr(codelistcode, data['attr'], data['value'])
db.session.add(codelistcode)
db.session.commit()
return True
def delete_code(data):
codelistcode = models.CodelistCode.query.filter_by(
code = data['code'],
codelist_code = data["codelist_code"]
).first()
db.session.delete(codelistcode)
db.session.commit()
return True
| agpl-3.0 | Python | |
3b74b2c0c8f06cd7262cd9dd9093a82038a23d59 | Create saxparser.py | RDBinns/datactrl | saxparser.py | saxparser.py | #!/usr/bin/python
import sys
import xml.sax
import io
import MySQLdb
class MyHandler(xml.sax.ContentHandler):
def __init__(self):
xml.sax.ContentHandler.__init__(self)
self.db = MySQLdb.connect(host="localhost", user="root", passwd="trowel", db="registerdb2011")
self.cursor = self.db.cursor()
self.buffer = []
self.ctrlId = 0
self.purposeId = 0
def getCharacters(self):
data = ''.join(self.buffer).strip()
self.buffer = []
return data.strip()
def characters(self, name):
self.buffer.append(name)
def endElement(self, name):
data = self.getCharacters()
if name == "DATA_CTLR_NAME":
self.ctrlId = self.ctrlId +1
self.insertDatactrl(data)
elif name == "OTHER_NAME":
self.insertOthername(data)
elif name == "PURPOSE" and data != "":
self.purposeId = self.purposeId +1
self.insertPurpose(data)
elif name == "PURPOSE_TEXT":
self.insertPurposeOthername(data)
elif name == "CLASS":
self.insertPurposeClass(data)
elif name == "RECIPIENT":
self.insertPurposeRecipient(data)
elif name == "TRANSFER":
self.insertPurposeTransfer(data)
elif name == "SUBJECT":
self.insertPurposeSubject(data)
def insertDatactrl(self, data):
self.cursor.execute('insert into datactrl(datactrl_id, datactrl_name) values("%s", "%s")' % (self.ctrlId, data))
self.db.commit()
sys.stdout.write("inserted datactrl %s %s\n" % (self.ctrlId, data))
def insertOthername(self, data):
self.cursor.execute('insert into datactrl_othernames(datactrl_id, othername) values("%s", "%s")' % (self.ctrlId, data))
def insertPurpose(self, data):
self.cursor.execute('insert into purpose(purpose_id, datactrl_id, purpose_name) values("%s", "%s", "%s")' % (self.purposeId, self.ctrlId, data))
def insertPurposeClass(self, data):
self.cursor.execute('insert into purpose_classes(purpose_id, datactrl_id, class) values("%s", "%s", "%s")' % (self.purposeId, self.ctrlId, data))
def insertPurposeOthername(self, data):
self.cursor.execute('insert into purpose_othernames(purpose_id, datactrl_id, othername) values("%s", "%s", "%s")' % (self.purposeId, self.ctrlId, data))
def insertPurposeRecipient(self, data):
self.cursor.execute('insert into purpose_recipients(purpose_id, datactrl_id, recipient) values("%s", "%s", "%s")' % (self.purposeId, self.ctrlId, data))
def insertPurposeSubject(self, data):
self.cursor.execute('insert into purpose_subjects(purpose_id, datactrl_id, subject) values("%s", "%s", "%s")' % (self.purposeId, self.ctrlId, data))
def insertPurposeTransfer(self, data):
self.cursor.execute('insert into purpose_transfers(purpose_id, datactrl_id, transfer) values("%s", "%s", "%s")' % (self.purposeId, self.ctrlId, data))
handler = MyHandler()
stream = io.open("register_31072011.xml", "r")
xml.sax.parse(stream, handler)
| apache-2.0 | Python | |
1c810e9026f2d2c7ce3722d89a0cd7d333904e0f | add ipdb.py for easier debugging | dmr/Ldtools | examples/ipdb.py | examples/ipdb.py | """
This module provides a quick n dirty way to get a debug ipython shell.
2 ways to achieve that:
1. call set_trace() will immediately stop your program at that position
2. import ipdb will overwrite sys.excepthook with ipdb.info. This will
provide the ipython shell
"""
import sys
from IPython.core.debugger import Pdb
from IPython.core.shellapp import InteractiveShellApp
from IPython.core import ipapi
shell = InteractiveShellApp(argv=[''])
def_colors = ipapi.get().colors
def set_trace():
frame = sys._getframe().f_back
Pdb(def_colors).set_trace(frame)
# Post-Mortem interface, copied from pdb
def post_mortem(t=None):
# handling the default
if t is None:
# sys.exc_info() returns (type, value, traceback) if an exception is
# being handled, otherwise it returns None
t = sys.exc_info()[2]
if t is None:
raise ValueError("A valid traceback must be passed if no "
"exception is being handled")
# added def_colors here for ipython colors
p = Pdb(def_colors)
#p.reset()
p.interaction(None, t)
# code snippet from http://code.activestate.com/recipes/65287-automatically-start-the-debugger-on-an-exception/
def info(type, value, tb):
if hasattr(sys, 'ps1') or not sys.stderr.isatty():
# we are in interactive mode or we don't have a tty-like
# device, so we call the default hook
sys.__excepthook__(type, value, tb)
else:
import traceback
# we are NOT in interactive mode, print the exception...
traceback.print_exception(type, value, tb)
print
# ...then start the debugger in post-mortem mode.
# pdb.pm() does pdb.post_mortem
post_mortem(sys.last_traceback)
sys.excepthook = info | bsd-2-clause | Python | |
1e3011f728dc522ba82abf3526dfb50c9d874558 | Create invertJulesRT_new.py | braghiere/Thesis | chapter4/Minimising/invertJulesRT_new.py | chapter4/Minimising/invertJulesRT_new.py | #!/usr/bin/env python
import sys
import os
from copy import copy
import numpy as np
import matplotlib.pyplot as plt
import scipy.optimize as opt
from runJulesRTStruct import runJulesRTStruct
class julesRTData():
def __init__(self):
self.lai=float()
self.leafT=float()
self.leafR=float()
self.soilR=float()
self.sza=float()
self.obsUncert=float()
self.obsVal=float()
self.diffuse=False
self.unifrom=True
self.obsType="fapar"
def julesRT_wrapper( argList, data ):
"""Call the Jules 2S model using a list variable
Also implement any specific relationships between
variables here (e.g. leaf_r=leaf_t)
argList is stuff to be minimised (i.e. structural parameters)
controlVars are other arguments required to be passed
szaList are the solar angles at which to evaluate the model
"""
astruc=argList[0]
bstruc=argList[1]
julesReturn=runJulesRTStruct(
astruc=astruc,
bstruc=bstruc,
lai=data.lai,
leafT=data.leafT,
leafR=data.leafR,
soilR=data.soilR,
sza=data.sza,
diffuse=data.diffuse,
uniform=data.uniform)
if data.obsType=='fapar':
return julesReturn[0]
else:
return julesReturn[1]
def costFunction( params, controlData, Xprior=None, Xuncert=None ):
"""Var-type cost function for JULES
"""
n=len(controlData)
Ymodel=np.zeros(n)
Yobs=np.zeros(n)
R=np.zeros((n,n))
#compute the modelled albedo/fAPAR values
for (i,data) in enumerate(controlData):
Ymodel[i]=julesRT_wrapper( params, data )
Yobs[i]=data.obsVal
R[i,i]=1./(data.obsUncert**2)
#VAR term one (obs):
diff=Ymodel-Yobs
cost=0.5*np.dot(diff,np.dot(R,diff.T))
if Xprior != None:
#compute B matrix
B=np.diag(1./(np.array(Xuncert)**2))
#VAR term two:
diff=np.array(params)-np.array(Xprior)
cost+=0.5*np.dot(diff,np.dot(B,diff.T))
return cost
def fminJulesRT( initParams, controls, Xprior=None, Xuncert=None ):
'''Run the chosen minimisers over the data
'''
xOpt=opt.fmin( costFunction, initParams, args=(controls, Xprior, Xuncert ), disp=True, maxfun=10000)
return xOpt
def solveJulesStruct(controlData, initParams=np.array([0.0,0.0])):
''' An example function for running the minimiser
'''
ret=fminJulesRT( initParams, controlData, Xprior=None, Xuncert=None )
print ret
return ret
def addTwinObs(controlData,astruc=1.0,bstruc=0.0):
'''Add dummy (or "twin") observations into the control
data given a value for the structure parameters
'''
for (i,data) in enumerate(controlData):
controlData[i].obsVal=julesRT_wrapper( [astruc,bstruc], data )
#print controlData[i].obsVal
if __name__=="__main__":
controlData=[]
controlData.append(julesRTData())
controlData[-1].lai=2.0
controlData[-1].leafT=0.1
controlData[-1].leafR=0.15
controlData[-1].soilR=0.05
controlData[-1].sza=30.
controlData[-1].diffuse=False
controlData[-1].uniform=True
controlData[-1].obsType='fapar'
controlData[-1].obsVal=0.0
controlData[-1].obsUncert=1.0
controlData.append(julesRTData())
controlData[-1].lai=2.0
controlData[-1].leafT=0.1
controlData[-1].leafR=0.15
controlData[-1].soilR=0.05
controlData[-1].sza=10.
controlData[-1].diffuse=False
controlData[-1].uniform=True
controlData[-1].obsType='fapar'
controlData[-1].obsVal=0.0
controlData[-1].obsUncert=1.0
controlData.append(julesRTData())
controlData[-1].lai=2.0
controlData[-1].leafT=0.1
controlData[-1].leafR=0.15
controlData[-1].soilR=0.05
controlData[-1].sza=60.
controlData[-1].diffuse=False
controlData[-1].uniform=True
controlData[-1].obsType='fapar'
controlData[-1].obsVal=0.0
controlData[-1].obsUncert=1.0
addTwinObs(controlData,astruc=1.0,bstruc=0.0)
solveJulesStruct(controlData)
| apache-2.0 | Python | |
da4436ec5ec3c982e42e9f85749ac8c8cf8b8a94 | add codegen submodule | ellisonbg/altair,altair-viz/altair,jakevdp/altair | altair/codegen.py | altair/codegen.py | """
Object for generating Python code calls
"""
class CodeGen(object):
def __init__(self, name, args=None, kwargs=None, methods=None):
self.name = name
self.args = (args or [])
self.kwargs = (kwargs or {})
self.methods = (methods or [])
def to_str(self, tablevel=0, tabsize=4):
"""Return a string representation of the code"""
def get_str(obj, tablevel=tablevel, tabsize=tabsize):
if isinstance(obj, CodeGen):
return obj.to_str(tablevel=tablevel, tabsize=tabsize)
else:
return str(obj)
args = [get_str(arg) for arg in self.args]
kwargs = [((tablevel + tabsize) * ' '
+ '{0}={1}'.format(k, get_str(v, tablevel + tabsize)))
for k, v in sorted(self.kwargs.items())]
if kwargs:
kwargs = kwargs + [tablevel * ' ']
if not kwargs and not args:
call = '{0}()'.format(self.name)
elif not kwargs:
call = '{0}({1})'.format(self.name, ', '.join(args))
elif not args:
call = '{0}(\n{1})'.format(self.name, ',\n'.join(kwargs))
else:
call = '{0}({1}{2})'.format(self.name, ', '.join(args),
',\n'.join([''] + kwargs))
for method in self.methods:
call += '.{0}'.format(get_str(method))
return call
def __str__(self):
return self.to_str()
def rename(self, newname):
self.name = newname
return self
| bsd-3-clause | Python | |
9d20d1f87f509ce51fde5c51460ff0b17c051ca1 | Create pytest_setup.py | IlfirinPL/robotframework-MarcinKoperski,IlfirinPL/robotframework-MarcinKoperski,IlfirinPL/robotframework-MarcinKoperski,IlfirinPL/robotframework-MarcinKoperski | utils/pytest_setup.py | utils/pytest_setup.py | pip install -U pytest-xdist
pip install -U parameterized
pip install -U pytest-flake8
pip install -U pytest-html
| mit | Python | |
7039e4f25d8eecdf2d5d2b4a4a769e05c5075222 | Fix description of 'api_read_full_member' permission | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/members/migrations/0020_auto_20171031_1048.py | bluebottle/members/migrations/0020_auto_20171031_1048.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2017-10-31 09:48
from __future__ import unicode_literals
from django.db import migrations
def rename_full_member_permission(apps, schema_editor):
Permission = apps.get_model('auth', 'Permission')
perm = Permission.objects.get(codename='api_read_full_member')
perm.name = 'Can view full members through the API'
perm.save()
class Migration(migrations.Migration):
dependencies = [
('members', '0019_auto_20170824_1812'),
]
operations = [
migrations.RunPython(rename_full_member_permission)
]
| bsd-3-clause | Python | |
4b4e9bc8f9605519b12d4da25dc6822baa629d2e | Add test_core | mph-/lcapy | unit_tests/test_core.py | unit_tests/test_core.py | from lcapy import *
import unittest
import sympy as sym
s = sym.var('s')
class LcapyTester(unittest.TestCase):
"""Unit tests for lcapy
"""
def test_sExpr(self):
"""Lcapy: check sExpr
"""
a = sExpr('(s+2)/(s-2)')
self.assertEqual(a.N, sExpr('s+2'), "N incorrect.")
self.assertEqual(a.D, sExpr('s-2'), "D incorrect.")
| lgpl-2.1 | Python | |
30be74075e761f932a10ea0806a08991b8fd9cb4 | Add script to list nodes without an external ID | ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content,ScriptRock/content | code/python/find-nodes-without-external-id.py | code/python/find-nodes-without-external-id.py | #!/usr/bin/env python
import httplib
import urllib
import json
import ssl
import argparse
import re
parser = argparse.ArgumentParser(description='Find any node that does not have an external ID set.')
parser.add_argument('--target-url', required=True, help='URL for the UpGuard instance. This should be the hostname only (appliance.upguard.org instead of https://appliance.upguard.org)')
parser.add_argument('--api-key', required=True, help='API key for the UpGuard instance')
parser.add_argument('--secret-key', required=True, help='Secret key for the UpGuard instance')
parser.add_argument('--insecure', action='store_true', help='Ignore SSL certificate check?')
parser.add_argument('--per-page', type=int, default=10, help='Number of nodes to retrieve in each call. (Default: 100)')
args = parser.parse_args()
# Initializations
browser = None
def getNodes(browser, method, endpoint, page=1, per_page=100):
"""
Return a JSON-parsed dictionary of nodes
"""
get_headers = {
"Authorization": "Token token=\"{}{}\"".format(args.api_key, args.secret_key),
"Accept": "application/json"}
browser.request("GET", "{}?page={}&per_page={}".format(endpoint, page, per_page), '', get_headers)
response = browser.getresponse()
if response.status >= 400:
raise httplib.HTTPException("{}: {}".format(str(response.status), str(response.reason)))
return json.loads(response.read())
try:
# Setup browser object
url = args.target_url
if 'http' in url:
# URL needs to be a hostname, so remove 'https://'
url = re.sub('https?:\/\/', '', url)
browser = httplib.HTTPConnection(url)
if args.insecure:
context = ssl._create_unverified_context()
browser = httplib.HTTPSConnection(url, context=context)
page = 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
print "Searching for nodes without an external ID..."
while nodes:
for node in nodes:
if not node['external_id']:
print "{} (hostname: {})".format(node['name'])
page += 1
nodes = getNodes(browser, "GET", "/api/v2/nodes.json", page=page, per_page=args.per_page)
except httplib.HTTPException as h:
print h.message;
finally:
if browser:
browser.close()
| mit | Python | |
f12af379ec31b8c14bf871768c558c81bad95301 | Add grains for the cloud metadata server | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/grains/metadata.py | salt/grains/metadata.py | # -*- coding: utf-8 -*-
'''
Grains from cloud metadata servers at 169.254.169.254
.. versionadded:: Nitrogen
:depends: requests
'''
from __future__ import absolute_import
# Import python libs
import os
import socket
# Import salt libs
import salt.utils.http as http
# metadata server information
IP = '169.254.169.254'
HOST = 'http://{0}/'.format(IP)
def __virtual__():
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(1)
result = sock.connect_ex((IP, 80))
if result != 0:
return False
if http.query(os.path.join(HOST, 'latest/'), status=True).get('status') != 200:
return False
return True
def _search(prefix="latest/"):
'''
Recursively look up all grains in the metadata server
'''
ret = {}
for line in http.query(os.path.join(HOST, prefix))['body'].split('\n'):
if line.endswith('/'):
ret[line[:-1]] = _search(prefix=os.path.join(prefix, line))
elif '=' in line:
key, value = line.split('=')
ret[value] = _search(prefix=os.path.join(prefix, key))
else:
ret[line] = http.query(os.path.join(HOST, prefix, line))['body']
return ret
def metadata():
return _search()
| apache-2.0 | Python | |
101189c319c2d0fadc97fd1077a87c11ab159a12 | add a new test folder. | myinxd/sim21ps | Test/new_module.py | Test/new_module.py | #!/usr/bin/env python3
#
# Copyright (c) 2016 Zhixian MA <zxma_sjtu@qq.com>
# MIT license
"""
A test script to learn the grammar and code tyle of python, and try to make interesting docstrings.
"""
import os
import sys
import argparse
import logging
import numpy as np
from astropy.io import fits
import fg21sim
from fg21sim.configs import configs
from fg21sim.utils import setup_logging
| mit | Python | |
6183347fc0f0309bf2c700f75b5b51f7cdbda1b4 | Create Detect_Faces.py | mavlyutovrus/person_detection | src/Detect_Faces.py | src/Detect_Faces.py | __author__ = 'Guggi'
import urllib2
import urllib
import unirest
import json
from poster.encode import multipart_encode
from poster.streaminghttp import register_openers
from PIL import Image, ImageDraw, ImageFont
register_openers()
api_key = "YOUR_API_KEY" #you need to exchange the YOUR_API_KEY with your own API key from animetrics
mashape_key = "YOUR_MASHAPE_KEY"
def detect_faces(image_fname):
# request
data, headers = multipart_encode({"image": open(image_fname, "rb"), "selector": "FACE"})
headers["X-Mashape-Key"] = mashape_key
request = urllib2.Request("https://animetrics.p.mashape.com/detect?api_key=" + api_key, data, headers)
response = urllib2.urlopen(request).read()
return json.loads(response)
# returned_json = detect_faces("13.JPG")
# print json.dumps(returned_json, indent=4, sort_keys=True)
def draw_text(img, text, top, left, text_height):
from PIL import Image, ImageDraw, ImageFont
font = ImageFont.load_default()
draw = ImageDraw.Draw(img)
text_image = Image.new("RGB", draw.textsize(text, font=font), "black")
text_image_draw = ImageDraw.Draw(text_image)
text_image_draw.text((0, 0), text, font=font, fill=(0, 255, 0))
del text_image_draw
new_width = (float(text_height) / text_image.size[1]) * text_image.size[0]
new_width = int(new_width)
text_image = text_image.resize((new_width, text_height))
img.paste(text_image, (left, top, left + text_image.size[0], top + text_image.size[1]))
del draw
return img
def show_faces(big_image, left, top, height, width, text):
draw = ImageDraw.Draw(big_image)
draw.line((left, top, left, top + height), fill='green', width=5)
draw.line((left, top, left + width, top), fill='green', width=5)
draw.line((left + width, top, left + width, top + height), fill='green', width=5)
draw.line((left, top + height, left + width, top + height), fill='green', width=5)
# font = ImageFont.truetype("arial.ttf", 20)
# draw.text((left, top + height + 1), selected_candidate, fill='green', font=font)
del draw
if text:
big_image = draw_text(big_image, text, top + height, left, 50)
return big_image
def get_timestamp():
import time
import datetime
ts = time.time()
st = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d-%H-%M-%S')
timestamp = str(st)
return timestamp
def make_filename(person_name):
return get_timestamp() + "_" + person_name + ".jpg"
def recognize_and_save_person(image_fname):
faces_data = detect_faces(image_fname)["images"][0]
if not faces_data["faces"]:
print "NO FACES DETECTED! Finishing..."
return
im = Image.open(image_fname)
for face in faces_data["faces"]:
query_uri = """https://animetrics.p.mashape.com/recognize?
api_key=%s&
gallery_id=%s&
image_id=%s&
height=%d&
width=%d&
topLeftX=%d&
topLeftY=%d
""" % (api_key, "Designers", faces_data["image_id"],
face["height"], face["width"], face["topLeftX"], face["topLeftY"])
query_uri = query_uri.replace("\t", "").replace(" ", "").replace("\n", "")
response = unirest.get(query_uri,
headers={
"X-Mashape-Key": mashape_key,
"Accept": "application/json"
}
)
candidates_probs = response.body["images"][0]["candidates"]
max_prob, selected_candidate = max([(prob, cand_name) for cand_name, prob in candidates_probs.items()])
im = show_faces(im, face["topLeftX"], face["topLeftY"], face["height"], face["width"], selected_candidate + ":" + str(max_prob)[:4])
import time
time.sleep(3)
im.save("recognized/" + image_fname.split("/")[-1])
import os
folder = "GoPro/"
for fname in os.listdir(folder):
if fname.endswith(".jpg") or fname.endswith(".JPG") or fname.endswith(".JPEG"):
print fname
recognize_and_save_person(folder + fname)
| apache-2.0 | Python | |
0f66e2cdcf653ea772a726ef2a5be0d12eeb1372 | add converter for Nifti volumes (anything Nibabel can read) | HumanBrainProject/neuroglancer-scripts | volume_to_raw_chunks.py | volume_to_raw_chunks.py | #! /usr/bin/env python3
#
# Copyright (c) 2016, 2017, Forschungszentrum Juelich GmbH
# Author: Yann Leprince <y.leprince@fz-juelich.de>
#
# This software is made available under the MIT licence, see LICENCE.txt.
import gzip
import json
import os
import os.path
import sys
import numpy as np
import nibabel
import nibabel.orientations
RAW_CHUNK_PATTERN = "{key}/{0}-{1}/{2}-{3}/{4}-{5}"
def volume_to_raw_chunks(info, volume):
assert len(info["scales"][0]["chunk_sizes"]) == 1 # more not implemented
chunk_size = info["scales"][0]["chunk_sizes"][0] # in order x, y, z
size = info["scales"][0]["size"] # in order x, y, z
dtype = np.dtype(info["data_type"]).newbyteorder("<")
num_channels = info["num_channels"]
if volume.ndim < 4:
volume = np.atleast_3d(volume)[:, :, :, np.newaxis]
elif volume.ndim > 4:
raise ValueError("Volumes with more than 4 dimensions not supported")
# Volume given by nibabel are using Fortran indexing (X, Y, Z, T)
assert volume.shape[:3] == tuple(size)
assert volume.shape[3] == num_channels
for x_chunk_idx in range((size[0] - 1) // chunk_size[0] + 1):
x_slicing = np.s_[chunk_size[0] * x_chunk_idx:
min(chunk_size[0] * (x_chunk_idx + 1), size[0])]
for y_chunk_idx in range((size[1] - 1) // chunk_size[1] + 1):
y_slicing = np.s_[chunk_size[1] * y_chunk_idx:
min(chunk_size[1] * (y_chunk_idx + 1), size[1])]
for z_chunk_idx in range((size[2] - 1) // chunk_size[2] + 1):
z_slicing = np.s_[chunk_size[2] * z_chunk_idx:
min(chunk_size[2] * (z_chunk_idx + 1), size[2])]
chunk = np.moveaxis(volume[x_slicing, y_slicing, z_slicing, :],
(0, 1, 2, 3), (3, 2, 1, 0))
assert chunk.size == ((x_slicing.stop - x_slicing.start) *
(y_slicing.stop - y_slicing.start) *
(z_slicing.stop - z_slicing.start) *
num_channels)
chunk_name = RAW_CHUNK_PATTERN.format(
x_slicing.start, x_slicing.stop,
y_slicing.start, y_slicing.stop,
z_slicing.start, z_slicing.stop,
key=info["scales"][0]["key"])
os.makedirs(os.path.dirname(chunk_name), exist_ok=True)
with gzip.open(chunk_name + ".gz", "wb") as f:
f.write(chunk.astype(dtype).tobytes())
def volume_file_to_raw_chunks(volume_filename):
"""Convert from neuro-imaging formats to pre-computed raw chunks"""
with open("info") as f:
info = json.load(f)
img = nibabel.load(volume_filename)
affine = img.affine
ornt = nibabel.orientations.io_orientation(affine)
print("Detected input axis orientations {0}+"
.format("".join(nibabel.orientations.ornt2axcodes(ornt))))
new_affine = affine * nibabel.orientations.inv_ornt_aff(ornt, img.shape)
sys.stderr.write("Now loading volume... ")
sys.stderr.flush()
volume = nibabel.orientations.apply_orientation(img.get_data(), ornt)
sys.stderr.write("done.\n")
print("Loaded volume has data type {0}, chunks will be saved with {1}"
.format(volume.dtype.name, info["data_type"]))
volume_to_raw_chunks(info, volume)
def parse_command_line(argv):
"""Parse the script's command line."""
import argparse
parser = argparse.ArgumentParser(
description="""\
Convert from neuro-imaging formats to Neuroglancer pre-computed raw chunks
The affine transformation on the input volume (as read by Nibabel) is to point
to a RAS+ oriented space. Chunks are saved in RAS+ order (X from left to Right,
Y from posterior to Anterior, Z from inferior to Superior).
""")
parser.add_argument("volume_filename")
args = parser.parse_args(argv[1:])
return args
def main(argv):
"""The script's entry point."""
args = parse_command_line(argv)
return volume_file_to_raw_chunks(args.volume_filename) or 0
if __name__ == "__main__":
sys.exit(main(sys.argv))
| mit | Python | |
3d624b5693a753ee8ecdd6f979eaa3d17736dca7 | Create Syllabifier.py | LBenzahia/cltk,LBenzahia/cltk,TylerKirby/cltk,D-K-E/cltk,TylerKirby/cltk,cltk/cltk,kylepjohnson/cltk,diyclassics/cltk | cltk/corpus/middle_english/Syllabifier.py | cltk/corpus/middle_english/Syllabifier.py | """
Sonority hierarchy for Middle English
"""
Syllabifier = {
'a': 1,
'æ': 1,
'e': 1,
'i': 1,
'o': 1,
'u': 1,
'y': 1,
'm': 2,
'n': 2,
'p': 3,
'b': 3,
'd': 3,
'g': 3,
't': 3,
'k': 3,
'ð': 3,
'c': 4,
'f': 4,
's': 4,
'h': 4,
'v': 4,
'x': 4,
'þ': 4,
'r': 5,
'ƿ': 5,
'l': 6
}
| mit | Python | |
bace8f65e696211db5a6ffa2cefc70d2e061b950 | Add Support for /r/greentext | Fillll/reddit2telegram,nsiregar/reddit2telegram,nsiregar/reddit2telegram,Fillll/reddit2telegram | greentext/app.py | greentext/app.py | #encoding:utf-8
from utils import get_url, weighted_random_subreddit
from utils import SupplyResult
# Subreddit that will be a source of content
subreddit = weighted_random_subreddit({
'greentext': 1.0,
# If we want get content from several subreddits
# please provide here 'subreddit': probability
# 'any_other_subreddit': 0.02
})
# Telegram channel with @reddit2telegram_bot as an admin
t_channel = '@r_greentext'
def send_post(submission, r2t):
what, url, ext = get_url(submission)
# If this func returns:
# False – it means that we will not send
# this submission, let's move to the next.
# True – everything is ok, we send the submission
# None – we do not want to send anything this time,
# let's just sleep.
# Get all data from submission that we need
title = submission.title
link = submission.shortlink
text = '{}\n{}'.format(title, link)
if what == 'text':
# If it is text submission, it is not really funny.
# return r2t.send_text(submission.selftext)
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
elif what == 'other':
# Also we are not interesting in any other content.
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
elif what == 'album':
# It is ok if it is an album.
base_url = submission.url
text = '{}\n{}\n\n{}'.format(title, base_url, link)
r2t.send_text(text)
r2t.send_album(url)
return SupplyResult.SUCCESSFULLY
elif what in ('gif', 'img'):
# Also it is ok if it is gif or any kind of image.
# Check if content has already appeared in
# out telegram channel.
if r2t.dup_check_and_mark(url) is True:
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
return r2t.send_gif_img(what, url, ext, text)
else:
return SupplyResult.DO_NOT_WANT_THIS_SUBMISSION
| mit | Python | |
b1af12dfc111c6550c166d00fdabf7fa707bfc1b | Create main.py | erocs/2017Challenges,erocs/2017Challenges,m181190/2017Challenges,m181190/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,m181190/2017Challenges,DakRomo/2017Challenges,Tursup/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,Tursup/2017Challenges,erocs/2017Challenges,Tursup/2017Challenges,m181190/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,Tursup/2017Challenges,popcornanachronism/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,m181190/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,m181190/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,paper-squares/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,DakRomo/2017Challenges,DakRomo/2017Challenges,popcornanachronism/2017Challenges,DakRomo/2017Challenges,m181190/2017Challenges,Tursup/2017Challenges,popcornanachronism/2017Challenges,m181190/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,mindm/2017Challenges,erocs/2017Challenges,erocs/2017Challenges,DakRomo/2017Challenges,Tursup/2017Challenges,erocs/2017Challenges,popcornanachronism/2017Challenges,paper-squares/2017Challenges,m181190/2017Challenges,popcornanachronism/2017Challenges,m181190/2017Challenges,Tursup/2017Challenges,paper-squares/2017Challenges,DakRomo/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,popcornanachronism/2017Challenges,m181190/2017Challenges,mindm/2017Challenges,popcornanachronism/2017Challenges,mindm/2017Challenges | challenge_0/python/wost/main.py | challenge_0/python/wost/main.py | '''
Written in Python 3.6
'''
def main(text):
print(f"Hello world, additional text: {text}")
if __name__ == "__main__":
main(input("What would you like to say?"))
| mit | Python | |
14aba0695514866439164f48fe1f66390719431f | Add selcet_gamma.py (authored by Amnon) | EmbrietteH/American-Gut,wasade/American-Gut,JWDebelius/American-Gut,mortonjt/American-Gut,wasade/American-Gut,biocore/American-Gut,EmbrietteH/American-Gut,biocore/American-Gut,JWDebelius/American-Gut | scripts/select_gamma.py | scripts/select_gamma.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
Created on Fri Oct 18 10:13:48 2013
@author: amnon
### 80 char max please
Look at all the gammaproteobacteria and select candidate contamination sequence
OTUs
output: a list of sorted gammaproteobacteria (or other) otuids, according to
mean frequency
"""
import sys
import argparse
import numpy as np
# to load a BIOM table
from biom.parse import parse_biom_table
from biom.util import biom_open
def TestAll(biomfile, outputfile, taxonomyclass, taxonomyname,level):
"""doc string here, a one liner
...and then more detail
"""
odat=[]
t = parse_biom_table(biom_open(biomfile,'U'))
t2 = t.normObservationBySample()
# to iterate over the table by observation, doing something based on the
# taxonomy:
class_idx = taxonomyclass
for values, ids, metadata in t2.iterObservations():
tname=metadata['taxonomy'][class_idx].lstrip()
if tname == taxonomyname:
mv = np.mean(values)
odat.append((ids,mv))
# odat.sort(key=lambda tup: tup[1], reverse=True)
odat.sort(key=lambda tup: tup[1])
csum=[(odat[0][0],odat[0][1],odat[0][1])]
for cval in odat[1:]:
csum.append((cval[0],cval[1],csum[-1][2]+cval[1]))
# no get it from big to small
csum.reverse()
# and write everything above the threshold (to filter)
snames=open(outputfile,'w')
for cval in csum:
if cval[2]>=level:
snames.write(cval[0]+"\t"+str(cval[1])+"\t"+str(cval[2])+'\n')
snames.close()
def main(argv):
parser=argparse.ArgumentParser(description='Select Gammaproteobacteria (or other group) contamination candidates')
parser.add_argument('-i','--biom',help='biom file of the experiment')
parser.add_argument('-o','--output',help='output file name')
parser.add_argument('-c','--classpos',help='class of taxonomy name (0-kingdom,1-phylum etc.',default=2)
parser.add_argument('-t','--taxonomy',help='taxonomy name (including c__ or equivalent)',default='c__Gammaproteobacteria')
parser.add_argument('-l','--level',help='minimal cumulative level for OTUs to filter (use 0 to get all of them)',default='0.03')
args=parser.parse_args(argv)
TestAll(args.biom,args.output,int(args.classpos),args.taxonomy,float(args.level))
if __name__ == "__main__":
main(sys.argv[1:])
| bsd-3-clause | Python | |
83fe6892c5b061f5fbba64c9f870f30c80b1a12a | create word bigram matrix | juditacs/dsl,juditacs/dsl | dsl/features/word_level.py | dsl/features/word_level.py | import logging
from os import path
from argparse import ArgumentParser
from featurize import Tokenizer, BigramModel
def parse_args():
p = ArgumentParser()
p.add_argument('--train', type=str)
p.add_argument('--test', type=str)
p.add_argument('--raw-matrix-dir', type=str)
p.add_argument('--workdir', type=str)
p.add_argument('--topn', type=int, default=100)
p.add_argument('--threshold', type=int, default=2)
return p.parse_args()
def get_paths(base, workdir):
raw_paths = [
'train_raw.mtx',
'test_raw.mtx',
'train.labels',
'train.labels.int',
'test.labels',
'test.labels.int',
'frequent_features',
'labeldict',
'featdict',
]
workdir_paths = [
'top_corr_features',
'train_top_corr.mtx',
'test_top_corr.mtx',
'train_dense.mtx',
'test_dense.mtx',
]
paths = {k: path.join(base, k) for k in raw_paths}
paths.update({k: path.join(workdir, k) for k in workdir_paths})
return paths
def main():
FORMAT = '%(asctime)s %(levelname)s %(message)s'
logging.basicConfig(format=FORMAT)
logging.getLogger().setLevel(logging.DEBUG)
args = parse_args()
paths = get_paths(args.raw_matrix_dir, args.workdir)
t = Tokenizer(ws_norm=True, filter_punct=True)
b_train = BigramModel(t, padding=True)
b_train.paths = paths
b_train.load_or_build_train(args.train, args.threshold)
b_train.choose_top_pearson(args.topn)
b_train.save_top_corr_features()
b_train.to_filtered_matrix()
b_train.save_matrix(paths['train_top_corr.mtx'])
b_train.save_as_dense_matrix(paths['train_dense.mtx'])
b_train.load_or_build_test(args.test)
b_train.to_filtered_matrix()
b_train.save_matrix(paths['test_top_corr.mtx'])
b_train.save_as_dense_matrix(paths['test_dense.mtx'])
if __name__ == '__main__':
main()
| mit | Python | |
fec8de91954230b44b717f4b3d5a3a774c108fdf | Create monitor.py | mosscylium/forestfloor,mosscylium/forestfloor | assets/monitor.py | assets/monitor.py | #!/usr/bin/env python
import sqlite3
import os
import time
import glob
# global variables
speriod=(15*60)-1
dbname='/var/www/templog.db'
# store the temperature in the database
def log_temperature(temp):
conn=sqlite3.connect(dbname)
curs=conn.cursor()
curs.execute("INSERT INTO temps values(datetime('now'), (?))", (temp,))
# commit the changes
conn.commit()
conn.close()
# display the contents of the database
def display_data():
conn=sqlite3.connect(dbname)
curs=conn.cursor()
for row in curs.execute("SELECT * FROM temps"):
print str(row[0])+" "+str(row[1])
conn.close()
# get temerature
# returns None on error, or the temperature as a float
def get_temp(devicefile):
try:
fileobj = open(devicefile,'r')
lines = fileobj.readlines()
fileobj.close()
except:
return None
# get the status from the end of line 1
status = lines[0][-4:-1]
# is the status is ok, get the temperature from line 2
if status=="YES":
print status
tempstr= lines[1][-6:-1]
tempvalue=float(tempstr)/1000
print tempvalue
return tempvalue
else:
print "There was an error."
return None
# main function
# This is where the program starts
def main():
# enable kernel modules
os.system('sudo modprobe w1-gpio')
os.system('sudo modprobe w1-therm')
# search for a device file that starts with 28
devicelist = glob.glob('/sys/bus/w1/devices/28*')
if devicelist=='':
return None
else:
# append /w1slave to the device file
w1devicefile = devicelist[0] + '/w1_slave'
# while True:
# get the temperature from the device file
temperature = get_temp(w1devicefile)
if temperature != None:
print "temperature="+str(temperature)
else:
# Sometimes reads fail on the first attempt
# so we need to retry
temperature = get_temp(w1devicefile)
print "temperature="+str(temperature)
# Store the temperature in the database
log_temperature(temperature)
# display the contents of the database
# display_data()
# time.sleep(speriod)
if __name__=="__main__":
main()
| apache-2.0 | Python | |
81b178677a3c217f62be85bf16964a1f0717930f | fix #1 | idf/commons-util-py | commons_util/os_utils/memory.py | commons_util/os_utils/memory.py | __author__ = 'Danyang'
| apache-2.0 | Python | |
cd1ed470e319c6aa5d2ed5206d6fb6fba63876ee | add k-fold splitter | kavinyao/SKBPR,kavinyao/SKBPR | splitter.py | splitter.py | """
Stuff which splits dataset into train and test sets.
"""
class KFoldSplitter(object):
"""Splitter that splits a table into k groups of (almost) equal size.
Before using this splitter, make sure the table to split has a `group_id` column.
Sample usage:
>>> splitter.split('query')
>>> while splitter.more_rounds():
>>> splitter.next_round()
>>> # use query_traina and query_test
"""
def __init__(self, dbm, k):
"""
@param dbm a DatabaseManager
@param k the number of folds to split into, k should be > 1
"""
self.dbm = dbm
self.k = k
self.current_table = ''
self.current_round = 1
def split(self, table):
"""After splitting, 1 <= table.group_id <= k"""
self.current_table = table
self.dbm.begin()
self.dbm.query('UPDATE %s SET group_id = FLOOR(1 + RAND()*%d)' % (table, self.k))
self.dbm.commit()
def more_rounds(self):
return self.current_round <= self.k
def next_round(self):
"""Prepare table_train and table_test tables.
They are **actually** views."""
self.dbm.begin()
self.dbm.query('CREATE OR REPLACE VIEW %s_test AS SELECT * FROM %s WHERE group_id = %d' % (self.current_table, self.current_table, self.current_round))
self.dbm.query('CREATE OR REPLACE VIEW %s_train AS SELECT * FROM %s WHERE group_id != %d' % (self.current_table, self.current_table, self.current_round))
self.dbm.commit()
# don't forget to increment round otherwise client might get stuck in infinite loop
self.current_round += 1
| mit | Python | |
6119f7998d918d3b38f129b7afd720f9a35e35c1 | Add script for fetching metadata from audio file | voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts | audio-metadata.py | audio-metadata.py | #! /usr/bin/env python
import os
import sys
import re
import tempfile
def getVideoDetails(filepath):
tmpf = tempfile.NamedTemporaryFile()
os.system("ffmpeg -i \"%s\" 2> %s" % (filepath, tmpf.name))
lines = tmpf.readlines()
tmpf.close()
metadata = {}
for l in lines:
l = l.strip()
if l.startswith('Duration'):
metadata['duration'] = re.search('Duration: (.*?),', l).group(0).split(':',1)[1].strip(' ,')
metadata['bitrate'] = re.search("bitrate: (\d+ kb/s)", l).group(0).split(':')[1].strip()
if l.startswith('Stream #0:0'):
metadata['video'] = {}
metadata['video']['codec'], metadata['video']['profile'] = \
[e.strip(' ,()') for e in re.search('Video: (.*? \(.*?\)),? ', l).group(0).split(':')[1].split('(')]
metadata['video']['resolution'] = re.search('([1-9]\d+x\d+)', l).group(1)
metadata['video']['bitrate'] = re.search('(\d+ kb/s)', l).group(1)
metadata['video']['fps'] = re.search('(\d+ fps)', l).group(1)
if l.startswith('Stream #0:1'):
metadata['audio'] = {}
metadata['audio']['codec'] = re.search('Audio: (.*?) ', l).group(1)
metadata['audio']['frequency'] = re.search(', (.*? Hz),', l).group(1)
metadata['audio']['bitrate'] = re.search(', (\d+ kb/s)', l).group(1)
return metadata
if __name__ == '__main__':
if len(sys.argv) != 2:
print("Usage: ./getVideoDetails.py <filepath(absolute or relative)>")
sys.exit("Syntax Error")
print( getVideoDetails(sys.argv[1]) )
| mit | Python | |
21df69e2b2be4d59b5c8257d7efbf27a75eeb8dd | Add priming_output example | tgarc/python-sounddevice,spatialaudio/python-sounddevice,dholl/python-sounddevice,tgarc/python-sounddevice,spatialaudio/python-sounddevice,dholl/python-sounddevice | examples/priming_output.py | examples/priming_output.py | #!/usr/bin/env python3
"""Test priming output buffer.
See http://www.portaudio.com/docs/proposals/020-AllowCallbackToPrimeStream.html
Note that this is only supported in some of the host APIs.
"""
import sounddevice as sd
def callback(indata, outdata, frames, time, status):
outdata.fill(0)
if status.priming_output:
assert status.input_underflow, 'input underflow flag should be set'
assert not indata.any(), 'input buffer should be filled with zeros'
print('Priming output buffer!')
outdata[0] = 1
else:
print('Not priming, I quit!')
raise sd.CallbackStop
with sd.Stream(channels=2, callback=callback,
prime_output_buffers_using_stream_callback=True) as stream:
while stream.active:
sd.sleep(100)
| mit | Python | |
98398398f590c3a98733193fc0ea45a1948edd0e | Add example to compare layers in a char-rnn task. | lmjohns3/theanets,chrinide/theanets | examples/recurrent-text.py | examples/recurrent-text.py | #!/usr/bin/env python
import climate
import matplotlib.pyplot as plt
import numpy as np
import theanets
import utils
climate.enable_default_logging()
COLORS = ['#d62728', '#1f77b4', '#2ca02c', '#9467bd', '#ff7f0e',
'#e377c2', '#8c564b', '#bcbd22', '#7f7f7f', '#17becf']
URL = 'http://www.gutenberg.org/cache/epub/2701/pg2701.txt'
with open(utils.find('moby.txt', URL)) as handle:
text = theanets.recurrent.Text(handle.read().lower().replace('\n', ' '))
seed = text.encode(text.text[200000:200010])
for i, layer in enumerate((
dict(form='rnn', activation='sigmoid'),
dict(form='gru', activation='sigmoid'),
dict(form='scrn', activation='linear'),
dict(form='lstm'),
dict(form='mrnn', activation='sigmoid', factors=len(text.alpha)),
dict(form='clockwork', activation='linear', periods=(1, 2, 4, 8, 16)))):
losses = []
layer.update(size=100)
net = theanets.recurrent.Classifier([
1 + len(text.alpha), layer, 1000, 1 + len(text.alpha)])
for tm, _ in net.itertrain(text.classifier_batches(30, 16),
min_improvement=0.99,
validate_every=50,
patience=0,
algo='rmsprop',
learning_rate=0.0001):
if np.isnan(tm['loss']):
break
print('{}|{} ({:.1f}%)'.format(
text.decode(seed),
text.decode(net.predict_sequence(seed, 30)),
100 * tm['acc']))
losses.append(tm['loss'])
plt.plot(losses, label=layer['form'], alpha=0.7, color=COLORS[i])
plt.gca().xaxis.tick_bottom()
plt.gca().yaxis.tick_left()
plt.gca().spines['top'].set_color('none')
plt.gca().spines['right'].set_color('none')
plt.gca().spines['bottom'].set_position(('outward', 6))
plt.gca().spines['left'].set_position(('outward', 6))
plt.gca().set_ylabel('Loss')
plt.gca().set_xlabel('Training Epoch')
plt.gca().grid(True)
plt.legend()
plt.show()
| mit | Python | |
f41dc1eb966da1505d4dedd00034debf79774807 | add tests | JiangTao11/object_model | smalltalk_like/tests.py | smalltalk_like/tests.py | from obj_model import Class, Instance, TYPE, OBJECT
def test_creation():
test_attribute()
test_subclass()
test_callmethod()
def test_attribute():
# Python Code
class A(object):
pass
obj = A()
obj.a = 1
assert obj.a == 1
obj.b = 2
assert obj.b == 2
obj.a = 3
assert obj.a == 3
# Object Model Code
A = Class(name='A', base_class=OBJECT, fields={}, metaclass=TYPE)
obj = Instance(A)
obj.write_attribute('a', 1)
assert obj.read_attribute('a') == 1
obj.write_attribute('b', 2)
assert obj.read_attribute('b') == 2
obj.write_attribute('a', 3)
assert obj.read_attribute('a') == 3
def test_subclass():
# Python Code
class A(object):
pass
class B(A):
pass
obj_b = B()
assert isinstance(obj_b, B) and isinstance(obj_b, A) and isinstance(obj_b, object)
assert not isinstance(obj_b, type)
# Object Model Code
A = Class(name='A', base_class=OBJECT, fields={}, metaclass=TYPE)
B = Class(name='B', base_class=A, fields={}, metaclass=TYPE)
obj_b = Instance(B)
assert obj_b.isinstance(B) and obj_b.isinstance(A) and obj_b.isinstance(OBJECT)
assert not obj_b.isinstance(TYPE)
def test_callmethod():
# Python Code
class A(object):
def m1(self):
return self.a
def m2(self, n):
return self.a + n
obj = A()
obj.a = 1
assert obj.m1() == 1
assert obj.m2(3) == 4
# Object Model Code
def m1_A(self):
return self.read_attribute('a')
def m2_A(self, n):
return self.read_attribute('a') + n
A = Class(name='A', base_class=OBJECT, fields={'m1_A': m1_A, 'm2_A': m2_A}, metaclass=TYPE)
obj = Instance(A)
obj.write_attribute('a', 1)
assert obj.call_method('m1_A') == 1
assert obj.call_method('m2_A', 3) == 4
if __name__ == '__main__':
test_creation() | mit | Python | |
6c9cf71064cf8a0c47147efeb742b2d66caa1c47 | add stub models file | dimagi/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq | corehq/apps/toggle_ui/models.py | corehq/apps/toggle_ui/models.py | # Stub models file
from couchdbkit.ext.django.schema import Document
class _(Document): pass | bsd-3-clause | Python | |
01c8d16df94ce558593b29974e30aa96679c6862 | add stylize.py for feed-forward mode | tonypeng/tensorstyle | stylize.py | stylize.py | """
Copyright 2016-present Tony Peng
Load a trained feed-forward model to stylize an image.
"""
import nets
import numpy as np
import tensorflow as tf
import utils
import time
MODEL_PATH = 'models/trained/Udnie'
CONTENT_IMAGE_PATH = 'runs/Udnie/content_small.jpg'
OUTPUT_IMAGE_PATH = 'runs/Udnie/styled4.jpg'
content_image = utils.read_image(CONTENT_IMAGE_PATH)
with tf.Session() as sess:
x = tf.placeholder(tf.float32, shape=(1, ) + content_image.shape)
stylzr = nets.stylzr(x)
# load the model
model = tf.train.latest_checkpoint(MODEL_PATH)
saver = tf.train.Saver()
saver.restore(sess, model)
# evaluate!
start_time = time.time()
styled_image = stylzr.eval(feed_dict={x: np.array([content_image])})
print("eval: "+str(time.time() - start_time)+"s")
styled_image = styled_image.reshape(styled_image.shape[1:])
utils.write_image(styled_image, OUTPUT_IMAGE_PATH)
| mit | Python | |
544c9cf63f54ca9e77fa37ab5e529791f9e00c3c | Create sysinfo.py | jadams/sysinfo,scensorECHO/sysinfo | sysinfo.py | sysinfo.py | #!/usr/bin/env python3
if __name__ == '__main__':
print
| mit | Python | |
64e028ed51c8cd485586623b295391c00526f5f9 | add speed test example | jrversteegh/flexx,zoofIO/flexx,jrversteegh/flexx,zoofIO/flexx | flexx/ui/examples/speed_test.py | flexx/ui/examples/speed_test.py | """
This little app runs some speed tests by sending binary data over the
websocket (from JS to Py and back), and measuring the time it costs to
do this.
Note that the data is buffered by the websocket (and to some extend in Flexx'
event system), so when multiple messages are send in quick succession, the
last message appears to take a relatively long time.
Also note that when sending singular messages, you also measure the time
of some of Flexx' event loop iterations (on both ends).
Websockets apparently limit the size of messages to somewhere between
5 and 10 MiB. Perhaps Flexx should chunk long messages.
On my machine, with Firefox, it takes about 1.4 seconds to send 100 MiB
messages to Python and back.
"""
from flexx import app, event, ui
class SpeedTest(app.PyComponent):
def init(self):
self.widget = SpeedTestWidget(self)
@event.action
def echo(self, data):
self.widget.receive_data(data)
class SpeedTestWidget(ui.Widget):
def init(self, pycomp):
self.pycomp = pycomp
self._start_time = 0
self._start_times = []
with ui.VBox():
with ui.HBox() as self.buttons:
ui.Button(text='1 x 1 MiB roundtrip')
ui.Button(text='1 x 5 MiB roundtrip')
ui.Button(text='10 x 1 MiB roundtrip')
ui.Button(text='10 x 5 MiB roundtrip')
ui.Button(text='100 x 1 MiB roundtrip')
ui.Button(text='100 x 5 MiB roundtrip')
self.progress = ui.ProgressBar()
self.status = ui.Label(text='Status: waiting for button press ...',
wrap=1, flex=1, style='overflow-y:scroll;')
@event.reaction('buttons.children*.mouse_down')
def run_test(self, *events):
global window, perf_counter
self.status.set_text('Test results: ')
self.progress.set_value(0)
tests = []
for ev in events:
if isinstance(ev.source, ui.Button):
sze = 5 if '5' in ev.source.text else 1
n = int(ev.source.text.split(' ')[0])
for i in range(n):
tests.append(sze)
self.progress.set_max(len(tests))
self._start_time = perf_counter()
for n in tests:
data = window.Uint8Array(n * 1024 * 1024)
self.send_data(data)
@event.action
def send_data(self, data):
global perf_counter
self._start_times.append(perf_counter())
self.pycomp.echo(data)
@event.action
def receive_data(self, data):
global perf_counter
t = perf_counter() - self._start_times.pop(0)
mib = len(data) / 1024 / 1024
text = 'Received %i MiB in %s seconds.' % (mib, str(t)[:5])
self.status.set_html(self.status.html + ' ' + text)
self.progress.set_value(self.progress.value + 1)
if len(self._start_times) == 0:
t = perf_counter() - self._start_time
text = 'Total time %s.' % str(t)[:5]
self.status.set_html(self.status.html + ' ' + text)
if __name__ == '__main__':
m = app.launch(SpeedTest, 'chrome-browser')
app.run()
| bsd-2-clause | Python | |
475ea65cce34b7af03a7355e16d95104292aa7fb | Create suntimes.py | ioangogo/Suntimes | suntimes.py | suntimes.py | #! /bin/python
# -*- coding: UTF-8 -*-
import urllib2, json, datetime, time
import dateutil.parser
global latitude
global longitude
api=json.loads(urllib2.urlopen("http://freegeoip.net/json/").read().decode("UTF-8"))
latitude=str(api['latitude'])
longitude=str(api["longitude"])
def getsunrise(lat="", lng="", formatted=1):
if lat=="" or lng == "":
lat=latitude
lng=longitude
url="http://api.sunrise-sunset.org/json?lat=" + lat + "&lng=" + lng + "&formatted=" + str(formatted)
print url
sunapi=urllib2.urlopen(url)
return json.loads(sunapi.read().decode("UTF-8"))['results']['sunrise']
def getsunset(lat="", lng="", formatted="1"):
if lat=="" or lng == "":
lat=latitude
lng=longitude
sunapi=urllib2.urlopen("http://api.sunrise-sunset.org/json?lat=" + lat + "&lng=" + lng + "&formatted=" + str(formatted))
return json.loads(sunapi.read().decode("UTF-8"))['results']['sunset']
def nighttrue(lat="", lng=""):
sunrise = dateutil.parser.parse(getsunrise(lat, lng, 0).replace("+00:00",""))
sunset = dateutil.parser.parse(getsunset(lat, lng, 0).replace("+00:00",""))
timenow = datetime.datetime.now()
if sunrise >= timenow >= sunset ==False:
return False
else:
return True
if __name__ == '__main__':
bools=nighttrue()
if bools == True:
print "night time"
elif bools == False:
print "day"
else:
print bools
| bsd-3-clause | Python | |
561b1b0bf1950bac54bc9c079daf6c09b3f87158 | Create pd.py | jacksu/machine-learning | src/ml/pd.py | src/ml/pd.py | #encoding=utf8
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
s = pd.Series([1,3,5,np.nan,6,8])
print(s)
dates = pd.date_range('20130101', periods=6)
print(dates)
#创建DataFrame
df = pd.DataFrame(np.random.randn(6,4), index=dates, columns=list('ABCD'))
print(df)
#通过字典创建DataFrame
f2 = pd.DataFrame({ 'A' : 1.,
'B' : pd.Timestamp('20130102'),
'C' : pd.Series(1,index=list(range(4)),dtype='float32'),
'D' : np.array([3] * 4,dtype='int32'),
'E' : pd.Categorical(["test","train","test","train"]),
'F' : 'foo' })
print(f2)
#探索数据
print("前五行:",df.head())
print("后三行:",df.tail(3))
print("index: ",df.index)
print("columns: ",df.columns)
print("values: ",df.values)
print("describe: ",df.describe())
print("转置:",df.T)
print("按照axis排列:",df.sort_index(axis=0, ascending=False))
print("按照某列排序:",df.sort_values(by='B'))
| mit | Python | |
f9317419417ec348b6520ce6aecf852a391d4b01 | Add importers module init | qurami/po2strings | po2strings/importers/__init__.py | po2strings/importers/__init__.py | # -*- coding: utf-8 -*- | mit | Python | |
124190aae0f39885011a5f12667d2348ffa32d09 | add invoke task to remve trailing ws | jrversteegh/flexx,zoofIO/flexx,zoofIO/flexx,jrversteegh/flexx | tasks/ws.py | tasks/ws.py | import os
from invoke import task
from ._config import ROOT_DIR, NAME
def trim_py_files(directory):
for root, dirs, files in os.walk(directory):
for fname in files:
filename = os.path.join(root, fname)
if fname.endswith('.py'):
with open(filename, 'rb') as f:
code = f.read().decode()
lines = [line.rstrip() for line in code.splitlines()]
while lines and not lines[-1]:
lines.pop(-1)
lines.append('') # always end with a newline
with open(filename, 'wb') as f:
f.write('\n'.join(lines).encode())
@task
def ws(ctx):
""" Remove trailing whitespace from all py files.
"""
trim_py_files(os.path.join(ROOT_DIR, 'flexx'))
trim_py_files(os.path.join(ROOT_DIR, 'flexxamples'))
trim_py_files(os.path.join(ROOT_DIR, 'tasks'))
| bsd-2-clause | Python | |
9f6952e0c46795bb704c9169cd71fdf18d952ebf | Add ChEBI client | bgyori/indra,jmuhlich/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,bgyori/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,jmuhlich/indra,jmuhlich/indra,sorgerlab/belpy,johnbachman/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/indra,sorgerlab/belpy,johnbachman/belpy,sorgerlab/indra | indra/databases/chebi_client.py | indra/databases/chebi_client.py | import os
import csv
from functools32 import lru_cache
chebi_to_pubchem_file = os.path.dirname(os.path.abspath(__file__)) + \
'/../resources/chebi_to_pubchem.tsv'
try:
fh = open(chebi_to_pubchem_file, 'rt')
rd = csv.reader(fh, delimiter='\t')
chebi_pubchem = {}
for row in rd:
chebi_pubchem[row[0]] = row[1]
except IOError:
chebi_pubchem = {}
def get_pubchem_id(chebi_id):
return chebi_pubchem.get(chebi_id)
| bsd-2-clause | Python | |
29e0644f5becc9833743f35aaa07011863fa9a12 | add gas art | KingPixil/ice,KingPixil/ice | src/art/gas/__init__.py | src/art/gas/__init__.py | import math
import colorsys
import png
from ...loader import load
from ...seed import generateSeed
from ...random import random, randomNoise2D
from ...pixel import generatePixel
# Configuration
width = 1000 # Width
height = 1000 # Height
xs = 700 # Filled width
ys = 700 # Filled height
xo = int((width - xs) / 2) # X filled offset margins
yo = int((height - ys) / 2) # Y filled offset margins
data = [] # Image data
lums = {} # Color lightnesses
# Put
def put(x, y):
global data
global lums
x = x * 3
y = height - y - 1
lum = lums.get((x, y))
if lum is None:
lum = lums[(x, y)] = 0.1
else:
lum = lums[(x, y)] = lum + 0.1
if lum > 0.7:
lum = 0.7
colorHue = (color2 - color1) * randomNoise2D(x / 500, y / 500) + color1
(r, g, b) = colorsys.hls_to_rgb(colorHue, lum, 1.0)
row = data[y]
row[x] = r * 255.0
row[x + 1] = g * 255.0
row[x + 2] = b * 255.0
# Clear
def putClear(x, y):
global data
x = x * 3
y = height - y - 1
row = data[y]
row[x] = 0.0
row[x + 1] = 0.0
row[x + 2] = 0.0
# Generate
def generate():
global data
global color1
global color2
global lums
p = [] # Points
o = 10 # Point offset
tt = 1000 # Total time
# Initialize
seedText = generateSeed()
data = []
for y in range(height):
current = []
for x in range(width):
current.append(0.0)
current.append(0.0)
current.append(0.0)
data.append(current)
# Colors
color1 = float(random()) / float(0xFFFFFFFFFFFFFFFF)
color2 = color1 + ((float(4.0 * random()) / (21.0 * float(0xFFFFFFFFFFFFFFFF))) + (1.0 / 7.0))
if color2 > 1.0:
color2 = 1.0
lums = {}
# Points
for x in range(xo, xo + xs, o):
pcol = []
p.append(pcol)
for y in range(yo, yo + ys, o):
pcol.append([x, y])
# Movement
for t in range(tt):
for pcol in p:
for pc in pcol:
x = pc[0]
y = pc[1]
va = 2 * math.pi * randomNoise2D(x / 500, y / 500)
vx = math.cos(va)
vy = math.sin(va)
pc[0] = x + vx
pc[1] = y + vy
put(int(x % width), int(y % height))
load(t / (tt - 1))
# Clear
for x in range(0, width):
for y in range(0, height):
if x < xo or x >= (xo + xs) or y < yo or y >= (yo + ys):
putClear(x, y)
# Write
f = open("art.png", "wb")
w = png.Writer(width, height)
w.write(f, data)
f.close()
return seedText
| mit | Python | |
3826858481c4f9bbf8d887fa390322f8190c96e2 | Add module to list ip addresses | alexoneill/py3status,valdur55/py3status,Andrwe/py3status,tobes/py3status,Andrwe/py3status,ultrabug/py3status,tobes/py3status,ultrabug/py3status,ultrabug/py3status,guiniol/py3status,guiniol/py3status,valdur55/py3status,vvoland/py3status,docwalter/py3status,valdur55/py3status | py3status/modules/net_iplist.py | py3status/modules/net_iplist.py | # -*- coding: utf-8 -*-
"""
Display the list of current IPs. This excludes loopback IPs and displays
"no connection" if there is no connection.
Configuration parameters
ignore: list of IPs to ignore. Can use shell style wildcards.
(default: ['127.*'])
no_connection: string to display if there are no non-ignored IPs
(default: 'no connection')
separator: string to use between IPs.
(default: ' ')
"""
# import your useful libs here
import socket
import struct
import fcntl
import array
from fnmatch import fnmatch
class Py3status:
cache_timeout = 30
separator = ' '
no_connection = 'no connection'
ignore = ['127.*']
def __init__(self):
pass
def ip_list(self):
response = {
'cached_until': self.py3.time_in(seconds=self.cache_timeout),
'full_text': ''
}
ip = []
ifaces = self._list_ifaces()
for iface in ifaces:
addr = self._get_ip(iface)
add = True
for ignore in self.ignore:
if fnmatch(addr, ignore):
add = False
break
if add:
ip.append(addr)
if len(ip) == 0:
response['full_text'] = self.no_connection
response['color'] = self.py3.COLOR_BAD
else:
response['full_text'] = self.separator.join(ip)
response['color'] = self.py3.COLOR_GOOD
return response
def _list_ifaces(self):
SIOCGIFCONF = 0x8912
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockfd = sock.fileno()
max_possible = 128 # arbitrary. raise if needed.
data = max_possible * 32
names = array.array('B', [0]) * data
outbytes = struct.unpack('iL', fcntl.ioctl(sockfd, SIOCGIFCONF,
struct.pack('iL', data,
names.buffer_info()[0])))[0]
namestr = names.tostring()
lst = []
for i in range(0, outbytes, 40):
name = namestr[i:i+16].split(b'\x00', 1)[0]
lst.append(name)
return lst
def _get_ip(self, iface):
SIOCGIFADDR = 0x8915
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sockfd = sock.fileno()
ifreq = struct.pack('16sH14s', iface, socket.AF_INET, b'\x00'*14)
try:
res = fcntl.ioctl(sockfd, SIOCGIFADDR, ifreq)
except:
return None
ip = struct.unpack('16sH2x4s8x', res)[2]
return socket.inet_ntoa(ip)
if __name__ == "__main__":
"""
Test this module by calling it directly.
"""
from py3status.module_test import module_test
module_test(Py3status)
| bsd-3-clause | Python | |
66ad00861f7143e35ab80674295fa5bf7998cfa5 | Create pytabcomplete.py | TingPing/plugins,TingPing/plugins | HexChat/pytabcomplete.py | HexChat/pytabcomplete.py | from __future__ import print_function
import hexchat
__module_name__ = "PythonTabComplete"
__module_author__ = "TingPing"
__module_version__ = "0"
__module_description__ = "Tab completes modules in Interactive Console"
lastmodule = ''
lastcomplete = 0
lasttext = ''
def keypress_cb(word, word_eol, userdata):
global lastmodule
global lastcomplete
global lasttext
if not word[0] == '65289': # Tab
return
if not hexchat.get_info('channel') == '>>python<<':
return
text = hexchat.get_info('inputbox')
#pos = hexchat.get_prefs('state_cursor') # TODO: allow completing mid line
if not text:# or not pos:
return
try:
module = text.split(' ')[-1].split('.')[0]
except IndexError:
return
if lastmodule != module:
lastcomplete = 0
lasttext = text
lastmodule = module
try:
exec('import {}'.format(module)) # Has to be imported to dir() it
completes = eval('dir({})'.format(module))
if lastcomplete + 1 < len(completes):
lastcomplete = lastcomplete + 1
else:
lastcomplete = 0
except (NameError, SyntaxError, ImportError):
return
if lasttext[-1] != '.':
sep = '.'
else:
sep = ''
newtext = lasttext + sep + completes[lastcomplete]
hexchat.command('settext {}'.format(newtext))
hexchat.command('setcursor {}'.format(len(newtext)))
def unload_cb(userdata):
print(__module_name__, 'version', __module_version__, 'unloaded.')
hexchat.hook_print('Key Press', keypress_cb)
hexchat.hook_unload(unload_cb)
print(__module_name__, 'version', __module_version__, 'loaded.')
| mit | Python | |
248023106d4e881110a646e9d078ecad4f58e24d | Add a Python program which reads from a pipe and writes the data it gets to syslog. | tonnerre/pipelogger | pipelogger.py | pipelogger.py | #!/usr/bin/env python
#
import argparse
import os
import syslog
parser = argparse.ArgumentParser(
description='Syslog messages as read from a pipe')
parser.add_argument('-i', '--ident',
help='Use the given identifier for syslogging',
required=True)
parser.add_argument('pipe', help='Pipe file to read log records from')
args = parser.parse_args()
syslog.openlog(args.ident, 0)
if not os.path.exists(args.pipe):
os.mkfifo(args.pipe)
while os.path.exists(args.pipe):
f = open(args.pipe, 'r')
for l in f:
syslog.syslog(l)
f.close()
syslog.closelog()
| bsd-3-clause | Python | |
85d29ef779687a3b9db5333ce9921fc20e66b985 | Create test_get.py | luoweis/python,luoweis/python | test_get.py | test_get.py | #!/usr/bin/env python
# -*- coding=utf-8 -*-
#以get明文的方式传递数据
import urllib
import urllib2
values={}
values['username'] = "1016903103@qq.com"
values['password']="XXXX"
data = urllib.urlencode(values)
url = "http://passport.csdn.net/account/login"
geturl = url + "?"+data #字符串合并
request = urllib2.Request(geturl)
response = urllib2.urlopen(request)
print response.read()
| apache-2.0 | Python | |
66201e6d73a909bc0ad932ad4b5de9d2ce30d4fe | add Blob class | PhloxAR/phloxar,PhloxAR/phloxar | PhloxAR/features/blob.py | PhloxAR/features/blob.py | # -*- coding:utf-8 -*-
from __future__ import division, print_function
from __future__ import absolute_import, unicode_literals
from PhloxAR.base import math
from PhloxAR.base import sss
from PhloxAR.base import *
from PhloxAR.features.feature import Feature
from PhloxAR.color import Color
from PhloxAR.image import Image
class Blob(Feature):
pass | apache-2.0 | Python | |
0b06fb26fa5393e4ba80e2942ebba34d9f9fa4de | Create 1st Python script | Robinlovelace/r-vs-python-geo,Robinlovelace/r-vs-python-geo | Python/spatial-basics.py | Python/spatial-basics.py | from shapely.wkt import loads
g = loads('POINT (0.0 0.0)')
| mit | Python | |
a61d37449f8000a83942513f2ad71151ef26822d | Add unit tests for synapse.cells | vertexproject/synapse,vivisect/synapse,vertexproject/synapse,vertexproject/synapse | synapse/tests/test_cells.py | synapse/tests/test_cells.py | import synapse.axon as s_axon
import synapse.cells as s_cells
import synapse.cryotank as s_cryotank
from synapse.tests.common import *
class CellTest(SynTest):
def test_cell_cryo(self):
with self.getTestDir() as dirn:
with s_cells.cryo(dirn) as cryo:
self.isinstance(cryo, s_cryotank.CryoCell)
def test_cell_axon(self):
with self.getTestDir() as dirn:
with s_cells.axon(dirn) as axon:
self.isinstance(axon, s_axon.AxonCell)
| apache-2.0 | Python | |
7454abdfba5d37d81dc3ad4bf7fb2f63bc552f38 | Add wsgi file | BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit,BenMotz/cubetoolkit | toolkit.wsgi | toolkit.wsgi | import os
import sys
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
sys.path.append(os.path.abspath("."))
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler()
| agpl-3.0 | Python | |
ea5d2be685d7b144e29fa7d362f290a0569875cb | add radio.py | HSU-MilitaryLogisticsClub/pysatcatcher,HAYASAKA-Ryosuke/pysatcatcher | radio.py | radio.py | # -*- coding: utf-8 -*-
import unittest
import serial
class IC911:
def connect(self, radioport):
print "IC911"
#self._ser=serial.Serial(radioport,38400)
def chengefreq(self,freqvalue):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="05"
subcommand="00"
data = str(freqvalue)
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
def getfreq(self):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="03"
subcommand="00"
data=""
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
def changemode(self,mode):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="06"
subcommand=""
if mode == "LSB":
subcommand = "00"
elif mode == "USB":
subcommand = "01"
elif mode == "CW":
subcommand = "03"
elif mode == "FM":
subcommand = "04"
else:
subcommand = "04"
data=""
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
def getmode(self):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="04"
subcommand="00"
data=""
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
class IC910:
def connect(self, radioport):
print radioport
#self._ser=serial.Serial(radioport,38400)
def chengefreq(self,freqvalue):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="05"
subcommand="00"
data = str(freqvalue)
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
def getfreq(self):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="03"
subcommand="00"
data=""
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
def changemode(self,mode):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="06"
subcommand=""
if mode == "LSB":
subcommand = "00"
elif mode == "USB":
subcommand = "01"
elif mode == "CW":
subcommand = "03"
elif mode == "FM":
subcommand = "04"
else:
subcommand = "04"
data=""
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
def getmode(self):
priansumble = "FE"*2
receiveaddress = "60"
sendeaddress = "E0"
command="04"
subcommand="00"
data=""
postansumble = "FD"
sendcommand = priansumble+receiveaddress+sendeaddress+command+subcommand+data+postansumble
self._ser.sendvalue(sendcommand)
class Radio(object):
def __init__(self,radiomodel):
if radiomodel == "IC910":
self._radio = IC910()
if radiomodel == "IC911":
self._radio = IC911()
def connect(self,radioport):
self._radio.connect(radioport)
class testradio(unittest.TestCase):
def testradioconnect(self):
radio = Radio("IC910")
radio.connect("connect")
unittest.main()
| mit | Python | |
324161f37b54aee71de801b4206f925c967d11d4 | Add a couple of simple tests and fix typo | tbabej/tasklib,robgolding63/tasklib,robgolding/tasklib | tasklib/tests.py | tasklib/tests.py | import shutil
import tempfile
import unittest
import uuid
from .task import TaskWarrior
class TasklibTest(unittest.TestCase):
def setUp(self):
self.tmp = tempfile.mkdtemp()
self.tw = TaskWarrior(data_location=self.tmp)
def tearDown(self):
shutil.rmtree(self.tmp)
class TaskFilterTest(TasklibTest):
def test_all_empty(self):
self.assertEqual(len(self.tw.tasks.all()), 0)
def test_all_non_empty(self):
self.tw.execute_command(['add', 'test task'])
self.assertEqual(len(self.tw.tasks.all()), 1)
self.assertEqual(self.tw.tasks.all()[0]['description'], 'test task')
self.assertEqual(self.tw.tasks.all()[0]['status'], 'pending')
| bsd-3-clause | Python | |
94013176a1dfe7724106ec2deed5f650b71b8f65 | Create basic admin interface... | CentreForResearchInAppliedLinguistics/clic,CentreForCorpusResearch/clic,CentreForResearchInAppliedLinguistics/clic,CentreForCorpusResearch/clic,CentreForResearchInAppliedLinguistics/clic,CentreForCorpusResearch/clic | clic/web/admin.py | clic/web/admin.py | # from __future__ import absolute_import # help python find modules within clic package (see John H email 09.04.2014)
from flask import Flask, render_template
from flask.ext.security import Security, SQLAlchemyUserDatastore, \
UserMixin, RoleMixin, login_required
from flask.ext.admin.contrib import sqla
from flask_admin import Admin, BaseView, expose
from flask_mail import Mail
from flask_admin.contrib.sqla import ModelView
from wtforms.fields import PasswordField
from models import db, Annotation, Category, Role, User, List
# app = Flask(__name__, static_url_path='')
app = Flask('clic.web', static_url_path='')
app.config["SQLALCHEMY_DATABASE_URI"] = "postgresql://jdejoode:isabelle@localhost/annotation_dev"
app.config["DEBUG"] = True
# when testing = True, the login_required decorator is disabled.
app.config["TESTING"] = True
app.config["SECRET_KEY"] = "qdfmkqj fmqksjfdm k"
app.config['MAIL_SERVER'] = 'smtp.qsdfqsdfqskjdfmlqsjdfmlkjjqsdf.com'
app.config['MAIL_PORT'] = 465
app.config['MAIL_USE_SSL'] = True
app.config['MAIL_USERNAME'] = 'username'
app.config['MAIL_PASSWORD'] = 'password'
app.config['SECURITY_REGISTERABLE'] = True
app.config['SECURITY_TRACKABLE'] = True
app.config['SECURITY_RECOVERABLE'] = True
mail = Mail(app)
db.init_app(app)
# Setup Flask-Security
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
# Taken from: https://github.com/sasaporta/flask-security-admin-example/blob/master/main.py
# Customized User model for SQL-Admin
class UserAdmin(sqla.ModelView):
# Don't display the password on the list of Users
column_exclude_list = list = ('password',)
# Don't include the standard password field when creating or editing a User (but see below)
form_excluded_columns = ('password',)
# Automatically display human-readable names for the current and available Roles when creating or editing a User
column_auto_select_related = True
# Prevent administration of Users unless the currently logged-in user has the "admin" role
def is_accessible(self):
return current_user.has_role('admin')
# On the form for creating or editing a User, don't display a field corresponding to the model's password field.
# There are two reasons for this. First, we want to encrypt the password before storing in the database. Second,
# we want to use a password field (with the input masked) rather than a regular text field.
def scaffold_form(self):
# Start with the standard form as provided by Flask-Admin. We've already told Flask-Admin to exclude the
# password field from this form.
form_class = super(UserAdmin, self).scaffold_form()
# Add a password field, naming it "password2" and labeling it "New Password".
form_class.password2 = PasswordField('New Password')
return form_class
# This callback executes when the user saves changes to a newly-created or edited User -- before the changes are
# committed to the database.
def on_model_change(self, form, model, is_created):
# If the password field isn't blank...
if len(model.password2):
# ... then encrypt the new password prior to storing it in the database. If the password field is blank,
# the existing password in the database will be retained.
model.password = utils.encrypt_password(model.password2)
# Customized Role model for SQL-Admin
class RoleAdmin(sqla.ModelView):
# Prevent administration of Roles unless the currently logged-in user has the "admin" role
def is_accessible(self):
return current_user.has_role('admin')
class MyAnnotations(BaseView):
@expose('/')
@login_required
def index(self):
return self.render('concordance-results.html')
@app.route('/')
@login_required
def home():
return render_template('test.html')
class AnnotationModelView(ModelView):
column_filters = ('notes',)
column_searchable_list = ('notes',)
admin = Admin(app)
# admin.add_view(MyAnnotations(name="Test"))
admin.add_view(AnnotationModelView(Annotation, db.session))
admin.add_view(ModelView(Category, db.session))
admin.add_view(ModelView(User, db.session))
admin.add_view(ModelView(Role, db.session))
admin.add_view(ModelView(List, db.session))
# Add Flask-Admin views for Users and Roles
# admin.add_view(UserAdmin(User, db.session))
# admin.add_view(RoleAdmin(Role, db.session))
if __name__ == "__main__":
app.run()
| mit | Python | |
86d51e36ca0f5772717d72d4729fb331a0066636 | Fix smoke tests to delete resources synchronously. | rakeshmi/tempest,flyingfish007/tempest,tonyli71/tempest,neerja28/Tempest,Mirantis/tempest,BeenzSyed/tempest,rzarzynski/tempest,manasi24/jiocloud-tempest-qatempest,alinbalutoiu/tempest,afaheem88/tempest,flyingfish007/tempest,Tesora/tesora-tempest,Tesora/tesora-tempest,pczerkas/tempest,akash1808/tempest,sebrandon1/tempest,ebagdasa/tempest,yamt/tempest,tudorvio/tempest,varunarya10/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,CiscoSystems/tempest,vedujoshi/tempest,bigswitch/tempest,armando-migliaccio/tempest,jamielennox/tempest,cloudbase/lis-tempest,NexusIS/tempest,zsoltdudas/lis-tempest,cloudbase/lis-tempest,Juniper/tempest,tonyli71/tempest,danielmellado/tempest,adkerr/tempest,JioCloud/tempest,jamielennox/tempest,masayukig/tempest,dkalashnik/tempest,Vaidyanath/tempest,adkerr/tempest,rzarzynski/tempest,rakeshmi/tempest,hayderimran7/tempest,afaheem88/tempest,queria/my-tempest,JioCloud/tempest,manasi24/tempest,neerja28/Tempest,redhat-cip/tempest,afaheem88/tempest_neutron,afaheem88/tempest_neutron,ntymtsiv/tempest,roopali8/tempest,armando-migliaccio/tempest,nunogt/tempest,xbezdick/tempest,manasi24/tempest,jaspreetw/tempest,Juraci/tempest,izadorozhna/tempest,citrix-openstack/build-tempest,manasi24/jiocloud-tempest-qatempest,hayderimran7/tempest,varunarya10/tempest,Lilywei123/tempest,LIS/lis-tempest,itskewpie/tempest,Lilywei123/tempest,nunogt/tempest,masayukig/tempest,alinbalutoiu/tempest,openstack/tempest,vmahuli/tempest,CiscoSystems/tempest,danielmellado/tempest,dkalashnik/tempest,ntymtsiv/tempest,cisco-openstack/tempest,bigswitch/tempest,xbezdick/tempest,openstack/tempest,pandeyop/tempest,hpcloud-mon/tempest,eggmaster/tempest,zsoltdudas/lis-tempest,BeenzSyed/tempest,citrix-openstack/build-tempest,vedujoshi/tempest,Juraci/tempest,itskewpie/tempest,redhat-cip/tempest,ebagdasa/tempest,cisco-openstack/tempest,vedujoshi/os_tempest,tudorvio/tempest,pczerkas/tempest,queria/my-tempest,citrix-openstack-build/tempest,izadorozhna/tempest,akash1808/tempest,NexusIS/tempest,pandeyop/tempest,citrix-openstack-build/tempest,jaspreetw/tempest,FujitsuEnablingSoftwareTechnologyGmbH/tempest,hpcloud-mon/tempest,Mirantis/tempest,eggmaster/tempest,roopali8/tempest,LIS/lis-tempest,vmahuli/tempest,Vaidyanath/tempest,yamt/tempest,Juniper/tempest,vedujoshi/os_tempest,sebrandon1/tempest | tempest/smoke.py | tempest/smoke.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from tempest import test
LOG = logging.getLogger(__name__)
class SmokeTest(object):
"""
Base test case class mixin for "smoke tests"
Smoke tests are tests that have the following characteristics:
* Test basic operations of an API, typically in an order that
a regular user would perform those operations
* Test only the correct inputs and action paths -- no fuzz or
random input data is sent, only valid inputs.
* Use only the default client tool for calling an API
"""
pass
class DefaultClientSmokeTest(test.DefaultClientTest, SmokeTest):
"""
Base smoke test case class that provides the default clients to
access the various OpenStack APIs.
"""
@classmethod
def tearDownClass(cls):
# NOTE(jaypipes): Because smoke tests are typically run in a specific
# order, and because test methods in smoke tests generally create
# resources in a particular order, we destroy resources in the reverse
# order in which resources are added to the smoke test class object
while cls.resources:
thing = cls.resources.pop()
LOG.debug("Deleting %r from shared resources of %s" %
(thing, cls.__name__))
# OpenStack resources are assumed to have a delete()
# method which destroys the resource...
thing.delete()
def is_deletion_complete():
# Deletion testing is only required for objects whose
# existence cannot be checked via retrieval.
if isinstance(thing, dict):
return True
try:
thing.get()
except Exception as e:
# Clients are expected to return an exception
# called 'NotFound' if retrieval fails.
if e.__class__.__name__ == 'NotFound':
return True
raise
return False
# Block until resource deletion has completed or timed-out
test.call_until_true(is_deletion_complete, 10, 1)
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from tempest import test
LOG = logging.getLogger(__name__)
class SmokeTest(object):
"""
Base test case class mixin for "smoke tests"
Smoke tests are tests that have the following characteristics:
* Test basic operations of an API, typically in an order that
a regular user would perform those operations
* Test only the correct inputs and action paths -- no fuzz or
random input data is sent, only valid inputs.
* Use only the default client tool for calling an API
"""
pass
class DefaultClientSmokeTest(test.DefaultClientTest, SmokeTest):
"""
Base smoke test case class that provides the default clients to
access the various OpenStack APIs.
"""
@classmethod
def tearDownClass(cls):
# NOTE(jaypipes): Because smoke tests are typically run in a specific
# order, and because test methods in smoke tests generally create
# resources in a particular order, we destroy resources in the reverse
# order in which resources are added to the smoke test class object
if not cls.resources:
return
thing = cls.resources.pop()
while True:
LOG.debug("Deleting %r from shared resources of %s" %
(thing, cls.__name__))
# Resources in novaclient all have a delete() method
# which destroys the resource...
thing.delete()
if not cls.resources:
return
thing = cls.resources.pop()
| apache-2.0 | Python |
24f16c8e012000a86ccba564fb0be84504b60824 | Use Jacoco to create a code coverage report; update build scripts to run it. | marcinkwiatkowski/buck,vine/buck,brettwooldridge/buck,facebook/buck,1yvT0s/buck,illicitonion/buck,brettwooldridge/buck,siddhartharay007/buck,romanoid/buck,dsyang/buck,ilya-klyuchnikov/buck,daedric/buck,vschs007/buck,zhan-xiong/buck,janicduplessis/buck,dsyang/buck,1yvT0s/buck,shybovycha/buck,rowillia/buck,dsyang/buck,liuyang-li/buck,rowillia/buck,vschs007/buck,pwz3n0/buck,Addepar/buck,raviagarwal7/buck,marcinkwiatkowski/buck,dsyang/buck,vschs007/buck,dushmis/buck,zpao/buck,Distrotech/buck,rhencke/buck,mogers/buck,janicduplessis/buck,darkforestzero/buck,brettwooldridge/buck,dushmis/buck,dsyang/buck,marcinkwiatkowski/buck,bocon13/buck,facebook/buck,zhuxiaohao/buck,Distrotech/buck,SeleniumHQ/buck,dushmis/buck,mogers/buck,mikekap/buck,Distrotech/buck,luiseduardohdbackup/buck,bocon13/buck,illicitonion/buck,rmaz/buck,rowillia/buck,pwz3n0/buck,clonetwin26/buck,mikekap/buck,LegNeato/buck,sdwilsh/buck,1yvT0s/buck,zhuxiaohao/buck,tgummerer/buck,rowillia/buck,JoelMarcey/buck,luiseduardohdbackup/buck,raviagarwal7/buck,mnuessler/buck,OkBuilds/buck,shs96c/buck,rhencke/buck,clonetwin26/buck,rowillia/buck,k21/buck,kageiit/buck,zhuxiaohao/buck,mikekap/buck,dushmis/buck,shs96c/buck,ilya-klyuchnikov/buck,stuhood/buck,clonetwin26/buck,mogers/buck,Learn-Android-app/buck,zpao/buck,daedric/buck,vschs007/buck,marcinkwiatkowski/buck,bocon13/buck,tgummerer/buck,rmaz/buck,illicitonion/buck,OkBuilds/buck,shs96c/buck,sdwilsh/buck,neonichu/buck,illicitonion/buck,romanoid/buck,JoelMarcey/buck,liuyang-li/buck,hgl888/buck,stuhood/buck,Addepar/buck,zhan-xiong/buck,LegNeato/buck,justinmuller/buck,Distrotech/buck,davido/buck,lukw00/buck,SeleniumHQ/buck,sdwilsh/buck,robbertvanginkel/buck,tgummerer/buck,daedric/buck,facebook/buck,illicitonion/buck,hgl888/buck,OkBuilds/buck,siddhartharay007/buck,brettwooldridge/buck,shs96c/buck,mnuessler/buck,janicduplessis/buck,marcinkwiatkowski/buck,1yvT0s/buck,Addepar/buck,MarkRunWu/buck,LegNeato/buck,grumpyjames/buck,JoelMarcey/buck,raviagarwal7/buck,facebook/buck,bocon13/buck,davido/buck,justinmuller/buck,kageiit/buck,darkforestzero/buck,mogers/buck,davido/buck,JoelMarcey/buck,dushmis/buck,mnuessler/buck,kageiit/buck,SeleniumHQ/buck,justinmuller/buck,vine/buck,MarkRunWu/buck,lukw00/buck,raviagarwal7/buck,mread/buck,pwz3n0/buck,shybovycha/buck,liuyang-li/buck,Dominator008/buck,LegNeato/buck,JoelMarcey/buck,liuyang-li/buck,illicitonion/buck,bocon13/buck,liuyang-li/buck,zhan-xiong/buck,brettwooldridge/buck,k21/buck,artiya4u/buck,justinmuller/buck,kageiit/buck,SeleniumHQ/buck,grumpyjames/buck,janicduplessis/buck,rmaz/buck,illicitonion/buck,daedric/buck,grumpyjames/buck,facebook/buck,raviagarwal7/buck,artiya4u/buck,dushmis/buck,Addepar/buck,illicitonion/buck,JoelMarcey/buck,artiya4u/buck,shs96c/buck,davido/buck,pwz3n0/buck,artiya4u/buck,shs96c/buck,MarkRunWu/buck,pwz3n0/buck,dsyang/buck,SeleniumHQ/buck,neonichu/buck,lukw00/buck,Distrotech/buck,rhencke/buck,saleeh93/buck-cutom,OkBuilds/buck,SeleniumHQ/buck,brettwooldridge/buck,LegNeato/buck,hgl888/buck,rowillia/buck,tgummerer/buck,illicitonion/buck,raviagarwal7/buck,Addepar/buck,Distrotech/buck,marcinkwiatkowski/buck,Addepar/buck,darkforestzero/buck,tgummerer/buck,hgl888/buck,tgummerer/buck,zhuxiaohao/buck,Distrotech/buck,clonetwin26/buck,bocon13/buck,clonetwin26/buck,vine/buck,Learn-Android-app/buck,ilya-klyuchnikov/buck,grumpyjames/buck,LegNeato/buck,brettwooldridge/buck,Addepar/buck,zhuxiaohao/buck,LegNeato/buck,daedric/buck,zhan-xiong/buck,pwz3n0/buck,romanoid/buck,illicitonion/buck,Distrotech/buck,darkforestzero/buck,1yvT0s/buck,robbertvanginkel/buck,robbertvanginkel/buck,sdwilsh/buck,mnuessler/buck,OkBuilds/buck,JoelMarcey/buck,bocon13/buck,neonichu/buck,Heart2009/buck,JoelMarcey/buck,grumpyjames/buck,Learn-Android-app/buck,saleeh93/buck-cutom,MarkRunWu/buck,SeleniumHQ/buck,clonetwin26/buck,mnuessler/buck,mikekap/buck,vschs007/buck,rhencke/buck,bocon13/buck,vine/buck,raviagarwal7/buck,nguyentruongtho/buck,bocon13/buck,dushmis/buck,robbertvanginkel/buck,marcinkwiatkowski/buck,sdwilsh/buck,stuhood/buck,brettwooldridge/buck,pwz3n0/buck,ilya-klyuchnikov/buck,Dominator008/buck,romanoid/buck,robbertvanginkel/buck,siddhartharay007/buck,marcinkwiatkowski/buck,vine/buck,sdwilsh/buck,dsyang/buck,zhan-xiong/buck,ilya-klyuchnikov/buck,rhencke/buck,artiya4u/buck,bocon13/buck,dsyang/buck,liuyang-li/buck,mread/buck,shs96c/buck,janicduplessis/buck,rhencke/buck,k21/buck,marcinkwiatkowski/buck,k21/buck,dpursehouse/buck,dsyang/buck,neonichu/buck,shybovycha/buck,romanoid/buck,daedric/buck,Heart2009/buck,vschs007/buck,raviagarwal7/buck,LegNeato/buck,JoelMarcey/buck,sdwilsh/buck,Addepar/buck,ilya-klyuchnikov/buck,zhan-xiong/buck,tgummerer/buck,nguyentruongtho/buck,mread/buck,illicitonion/buck,lukw00/buck,mogers/buck,lukw00/buck,JoelMarcey/buck,dushmis/buck,romanoid/buck,LegNeato/buck,brettwooldridge/buck,vschs007/buck,pwz3n0/buck,zhan-xiong/buck,robbertvanginkel/buck,grumpyjames/buck,clonetwin26/buck,Heart2009/buck,zpao/buck,Learn-Android-app/buck,sdwilsh/buck,luiseduardohdbackup/buck,luiseduardohdbackup/buck,siddhartharay007/buck,grumpyjames/buck,darkforestzero/buck,shybovycha/buck,SeleniumHQ/buck,zhuxiaohao/buck,vschs007/buck,davido/buck,nguyentruongtho/buck,romanoid/buck,OkBuilds/buck,grumpyjames/buck,mikekap/buck,tgummerer/buck,Dominator008/buck,darkforestzero/buck,zhan-xiong/buck,Dominator008/buck,janicduplessis/buck,pwz3n0/buck,1yvT0s/buck,rmaz/buck,vine/buck,mikekap/buck,rmaz/buck,stuhood/buck,stuhood/buck,hgl888/buck,lukw00/buck,siddhartharay007/buck,daedric/buck,robbertvanginkel/buck,zhuxiaohao/buck,luiseduardohdbackup/buck,darkforestzero/buck,romanoid/buck,mikekap/buck,vschs007/buck,janicduplessis/buck,stuhood/buck,shybovycha/buck,JoelMarcey/buck,dpursehouse/buck,mread/buck,LegNeato/buck,liuyang-li/buck,janicduplessis/buck,nguyentruongtho/buck,zhan-xiong/buck,romanoid/buck,shybovycha/buck,illicitonion/buck,vine/buck,Dominator008/buck,artiya4u/buck,brettwooldridge/buck,k21/buck,daedric/buck,romanoid/buck,janicduplessis/buck,artiya4u/buck,marcinkwiatkowski/buck,dsyang/buck,sdwilsh/buck,rmaz/buck,darkforestzero/buck,clonetwin26/buck,Heart2009/buck,shybovycha/buck,vschs007/buck,dpursehouse/buck,justinmuller/buck,SeleniumHQ/buck,justinmuller/buck,Learn-Android-app/buck,ilya-klyuchnikov/buck,zhan-xiong/buck,rowillia/buck,romanoid/buck,Distrotech/buck,robbertvanginkel/buck,vschs007/buck,liuyang-li/buck,lukw00/buck,rmaz/buck,vschs007/buck,rowillia/buck,darkforestzero/buck,rmaz/buck,MarkRunWu/buck,mikekap/buck,Addepar/buck,mikekap/buck,sdwilsh/buck,raviagarwal7/buck,rhencke/buck,rmaz/buck,1yvT0s/buck,LegNeato/buck,OkBuilds/buck,lukw00/buck,ilya-klyuchnikov/buck,k21/buck,darkforestzero/buck,liuyang-li/buck,justinmuller/buck,dpursehouse/buck,Distrotech/buck,artiya4u/buck,neonichu/buck,vine/buck,daedric/buck,daedric/buck,rhencke/buck,rhencke/buck,dushmis/buck,MarkRunWu/buck,mikekap/buck,sdwilsh/buck,shs96c/buck,clonetwin26/buck,lukw00/buck,MarkRunWu/buck,SeleniumHQ/buck,shs96c/buck,siddhartharay007/buck,shs96c/buck,robbertvanginkel/buck,sdwilsh/buck,marcinkwiatkowski/buck,raviagarwal7/buck,zhuxiaohao/buck,luiseduardohdbackup/buck,mogers/buck,MarkRunWu/buck,shybovycha/buck,dpursehouse/buck,k21/buck,nguyentruongtho/buck,shybovycha/buck,davido/buck,grumpyjames/buck,Dominator008/buck,facebook/buck,zpao/buck,liuyang-li/buck,hgl888/buck,zhan-xiong/buck,dpursehouse/buck,vschs007/buck,Addepar/buck,zpao/buck,facebook/buck,artiya4u/buck,mogers/buck,justinmuller/buck,tgummerer/buck,Learn-Android-app/buck,Dominator008/buck,rowillia/buck,davido/buck,mread/buck,tgummerer/buck,neonichu/buck,janicduplessis/buck,zpao/buck,shs96c/buck,SeleniumHQ/buck,OkBuilds/buck,grumpyjames/buck,clonetwin26/buck,dpursehouse/buck,siddhartharay007/buck,OkBuilds/buck,janicduplessis/buck,hgl888/buck,kageiit/buck,zpao/buck,brettwooldridge/buck,davido/buck,LegNeato/buck,rowillia/buck,stuhood/buck,robbertvanginkel/buck,ilya-klyuchnikov/buck,neonichu/buck,Addepar/buck,dsyang/buck,romanoid/buck,OkBuilds/buck,justinmuller/buck,dsyang/buck,justinmuller/buck,davido/buck,k21/buck,ilya-klyuchnikov/buck,kageiit/buck,davido/buck,JoelMarcey/buck,nguyentruongtho/buck,mnuessler/buck,vine/buck,clonetwin26/buck,SeleniumHQ/buck,Heart2009/buck,mogers/buck,Dominator008/buck,siddhartharay007/buck,grumpyjames/buck,brettwooldridge/buck,sdwilsh/buck,tgummerer/buck,k21/buck,Learn-Android-app/buck,raviagarwal7/buck,hgl888/buck,kageiit/buck,mogers/buck,illicitonion/buck,1yvT0s/buck,LegNeato/buck,rmaz/buck,lukw00/buck,marcinkwiatkowski/buck,Dominator008/buck,justinmuller/buck,rhencke/buck,janicduplessis/buck,OkBuilds/buck,saleeh93/buck-cutom,davido/buck,1yvT0s/buck,k21/buck,shs96c/buck,darkforestzero/buck,shybovycha/buck,nguyentruongtho/buck,shs96c/buck,shybovycha/buck,mread/buck,mogers/buck,zhuxiaohao/buck,bocon13/buck,mnuessler/buck,OkBuilds/buck,luiseduardohdbackup/buck,liuyang-li/buck,mikekap/buck,Addepar/buck,stuhood/buck,daedric/buck,clonetwin26/buck,hgl888/buck,marcinkwiatkowski/buck,raviagarwal7/buck,saleeh93/buck-cutom,darkforestzero/buck,mogers/buck,vine/buck,mikekap/buck,Heart2009/buck,SeleniumHQ/buck,pwz3n0/buck,rowillia/buck,grumpyjames/buck,rmaz/buck,dushmis/buck,davido/buck,Heart2009/buck,Learn-Android-app/buck,k21/buck,Addepar/buck,k21/buck,robbertvanginkel/buck,raviagarwal7/buck,Heart2009/buck,ilya-klyuchnikov/buck,JoelMarcey/buck,rmaz/buck,luiseduardohdbackup/buck,lukw00/buck,brettwooldridge/buck,saleeh93/buck-cutom,stuhood/buck,darkforestzero/buck,mnuessler/buck,Dominator008/buck,ilya-klyuchnikov/buck,rmaz/buck,saleeh93/buck-cutom,rhencke/buck,robbertvanginkel/buck,Distrotech/buck,pwz3n0/buck,artiya4u/buck,dpursehouse/buck,OkBuilds/buck,robbertvanginkel/buck,dsyang/buck,romanoid/buck,Learn-Android-app/buck,luiseduardohdbackup/buck,Dominator008/buck,zhan-xiong/buck,k21/buck,Heart2009/buck,justinmuller/buck,vine/buck,artiya4u/buck,shybovycha/buck,Dominator008/buck,davido/buck,stuhood/buck,ilya-klyuchnikov/buck,justinmuller/buck,daedric/buck,siddhartharay007/buck,clonetwin26/buck,bocon13/buck,shybovycha/buck,neonichu/buck,rowillia/buck,mnuessler/buck,Learn-Android-app/buck,Learn-Android-app/buck,daedric/buck,zhan-xiong/buck,neonichu/buck,stuhood/buck | scripts/assert_code_coverage.py | scripts/assert_code_coverage.py | #!/usr/bin/env python
import xml.etree.ElementTree as ElementTree
import sys
# This parses buck-out/gen/jacoco/code-coverage/index.html after
# `buck test --all --code-coverage --code-coverage-format xml --no-results-cache`
# has been run.
PATH_TO_CODE_COVERAGE_XML = 'buck-out/gen/jacoco/code-coverage/coverage.xml'
# If the code coverage for the project drops below this threshold,
# fail the build. This is designed to far enough below our current
# standards (80% coverage) that this should be possible to sustain
# given the inevitable ebb and flow of the code coverage level.
CODE_COVERAGE_BY_LINE_GOAL = 78
def is_covered_package_name(package_name):
"""We exclude third-party code and code generated by javacc."""
if not package_name.startswith('com/facebook/buck/'):
return False
if package_name == 'com/facebook/buck/apple/xcode/xcconfig':
# TODO: This package has a mix of handwritten and autogenerated code.
# This script should be updated to include the handwritten code in the
# metrics.
return False
return True
def calculate_code_coverage():
root = ElementTree.parse(PATH_TO_CODE_COVERAGE_XML)
line_coverage = []
max_package_name = 0
total_line_covered = 0
total_line = 0
for element in root.findall('.//package'):
package_name = element.attrib['name']
if not is_covered_package_name(package_name):
continue
max_package_name = max(max_package_name, len(package_name))
for counter in element.findall('./counter'):
counter_type = counter.attrib.get('type')
missed = int(counter.attrib.get('missed'))
covered = int(counter.attrib.get('covered'))
percentage = round(100 * covered / float(missed + covered), 2)
if counter_type == 'LINE':
total_line_covered += covered
total_line += missed + covered
line_coverage.append({'package_name': package_name, 'percentage': percentage})
def pair_compare(p1, p2):
# High percentage should be listed first.
diff1 = cmp(p2['percentage'], p1['percentage'])
if diff1:
return diff1
# Ties are broken by lexicographic comparison.
return cmp(p1['package_name'], p2['package_name'])
def label_with_padding(label):
return label + ' ' * (max_package_name - len(label)) + ' '
def print_separator():
print '-' * (max_package_name + 7)
# Print header.
print label_with_padding('PACKAGE') + 'LINE'
print_separator()
# Print rows sorted by line coverage then package name.
line_coverage.sort(cmp=pair_compare)
for item in line_coverage:
package_name = item['package_name']
padding = ' ' * (max_package_name - len(package_name))
percentage = item['percentage']
color = '\033[92m' if percentage >= CODE_COVERAGE_BY_LINE_GOAL else '\033[93m'
print '%s%s%s%%\033[0m' % (color, label_with_padding(package_name), percentage)
# Print aggregate numbers.
total_line_percentage = round(100 * total_line_covered / float(total_line), 2)
color = '\033[92m' if total_line_percentage >= CODE_COVERAGE_BY_LINE_GOAL else '\033[93m'
print_separator()
print '%s%s%s%%\033[0m' % (color, label_with_padding('TOTAL'), total_line_percentage)
return total_line_percentage
def is_code_coverage_met():
coverage = calculate_code_coverage()
return coverage >= CODE_COVERAGE_BY_LINE_GOAL
def main():
"""Exits with 0 or 1 depending on whether the code coverage goal is met."""
if not is_code_coverage_met():
sys.exit(1)
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
ce8f335b8b52d682cd233a96529201a4c537e88d | Add Python 3.5 | cle1109/scot,scot-dev/scot,scot-dev/scot,mbillingr/SCoT,cle1109/scot,cbrnr/scot,mbillingr/SCoT,cbrnr/scot | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
from codecs import open
from scot import __version__ as ver
with open('README.md', encoding='utf-8') as readme:
long_description = readme.read()
setup(
name='scot',
version=ver,
description='EEG/MEG Source Connectivity Toolbox',
long_description=long_description,
url='https://github.com/scot-dev/scot',
author='SCoT Development Team',
author_email='scotdev@googlegroups.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5'
],
keywords='source connectivity EEG MEG ICA',
packages=['scot', 'scot.eegtopo', 'scot.external'],
package_data={'scot': ['scot.ini']}
)
| #!/usr/bin/env python
from setuptools import setup
from codecs import open
from scot import __version__ as ver
with open('README.md', encoding='utf-8') as readme:
long_description = readme.read()
setup(
name='scot',
version=ver,
description='EEG/MEG Source Connectivity Toolbox',
long_description=long_description,
url='https://github.com/scot-dev/scot',
author='SCoT Development Team',
author_email='scotdev@googlegroups.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Science/Research',
'Topic :: Scientific/Engineering',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='source connectivity EEG MEG ICA',
packages=['scot', 'scot.eegtopo', 'scot.external'],
package_data={'scot': ['scot.ini']}
)
| mit | Python |
fc70bf43639f34d92b21c66269ee2e15da9f0e5c | Fix missing dev dependency | dargueta/binobj | setup.py | setup.py | import setuptools
import sys
# Thwart installation for unsupported versions of Python. `pip` didn't start
# enforcing `python_requires` until 9.0.
if sys.version_info < (3, 4):
raise RuntimeError('Unsupported Python version: ' + sys.version)
setuptools.setup(
author='Diego Argueta',
author_email='dargueta@users.noreply.github.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
description='A Python library for reading and writing structured binary data.',
extras_require={
'dev': [
'bumpversion==0.5.*',
'detox>=0.10',
'pylint>=2.0',
'Sphinx>=1.6',
'tox>=2.8, <3.0', # 3.x breaks some plugins
'tox-pyenv>=1.0',
],
'test': [
'pytest>=3.0, !=3.2.0, !=3.3.0',
'pytest-cov>=2.0',
'pytest-mock>=1.3.0',
'pytest-randomly>=1.0',
],
},
license='BSD 3-Clause License',
name='binobj',
python_requires='>=3.4',
packages=setuptools.find_packages(
exclude=['docs', 'docs.*', 'tests', 'tests.*']),
url='https://www.github.com/dargueta/binobj',
version='0.4.4'
)
| import setuptools
import sys
# Thwart installation for unsupported versions of Python. `pip` didn't start
# enforcing `python_requires` until 9.0.
if sys.version_info < (3, 4):
raise RuntimeError('Unsupported Python version: ' + sys.version)
setuptools.setup(
author='Diego Argueta',
author_email='dargueta@users.noreply.github.com',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
description='A Python library for reading and writing structured binary data.',
extras_require={
'dev': [
'bumpversion==0.5.*',
'detox>=0.10',
'Sphinx>=1.6',
'tox>=2.8, <3.0', # 3.x breaks some plugins
'tox-pyenv>=1.0',
],
'test': [
'pytest>=3.0, !=3.2.0, !=3.3.0',
'pytest-cov>=2.0',
'pytest-mock>=1.3.0',
'pytest-randomly>=1.0',
],
},
license='BSD 3-Clause License',
name='binobj',
python_requires='>=3.4',
packages=setuptools.find_packages(
exclude=['docs', 'docs.*', 'tests', 'tests.*']),
url='https://www.github.com/dargueta/binobj',
version='0.4.4'
)
| bsd-3-clause | Python |
a17efdceeeec0932ff403ebeb6f787ea8b08a3a4 | Add print lists function practice problem | HKuz/Test_Code | Problems/printLists.py | Problems/printLists.py | #!/Applications/anaconda/envs/Python3/bin
def main():
# Test suite
test_list_1 = ["puppy", "kitten", "lion cub"]
test_list_2 = ["lettuce",
"bacon",
"turkey",
"mayonnaise",
"tomato",
"white bread"]
pretty_print_lists(test_list_1)
pretty_print_lists(test_list_2)
def pretty_print_lists(l):
output = ""
last_index = len(l) - 1
for i, item in enumerate(l):
if i == last_index:
output += "and {}".format(item)
else:
output += "{}, ".format(item)
print(output)
return None
if __name__ == '__main__':
main()
| mit | Python | |
9bbea15cd6832f9a0a75a05775fcf2a12297f8c8 | Update setup.py | refinery29/chassis,refinery29/chassis | setup.py | setup.py | """Chassis: Opinionated REST Framework."""
from distutils.core import setup
setup(
name='chassis',
version='0.0.5',
packages=['chassis'],
description="Tornado framework for self-documenting JSON RESTful APIs.",
author="Refinery 29",
author_email="chassis-project@refinery29.com",
url="https://github.com/refinery29/chassis",
download_url="https://github.com/refinery29/chassis/archive/v0.0.5.tar.gz",
keywords=['Tornado', 'RESTful', 'REST', 'API', 'JSON', 'framework'],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
long_description="""\
Chassis is Refinery29's framework layer on top of Tornado for rapidly
building performant, self-documenting JSON-based REST APIs.
"""
)
| """Chassis: Opinionated REST Framework."""
from distutils.core import setup
setup(
name='chassis',
version='0.0.5',
packages=['chassis'],
description="Tornado framework for self-documenting JSON RESTful APIs.",
author="Refinery 29",
author_email="chassis-project@refinery29.com",
url="https://github.com/refinery29/chassis",
download_url="https://github.com/refinery29/chassis/archive/v0.0.5.tar.gz",
keywords=['Tornado', 'RESTful', 'REST', 'API', 'JSON', 'framework'],
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Environment :: Web Environment',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: HTTP Servers',
'Topic :: Software Development :: Libraries :: Application Frameworks'
],
long_description="""\
Chassis is Refinery29's framework layer on top of Tornado for rapidly
building performant, self-documenting JSON-based REST APIs.
"""
)
| mit | Python |
14ff724cd05f51973af9ede47d9f8cfe2a1ce908 | Add optional flag to setuptools extension (#78) | agronholm/cbor2,agronholm/cbor2,agronholm/cbor2 | setup.py | setup.py | import sys
import platform
from pkg_resources import parse_version
from setuptools import setup, Extension
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
windows = sys.platform.startswith('win')
if is_glibc:
glibc_ver = platform.libc_ver()[1]
libc_ok = parse_version(glibc_ver) >= parse_version('2.9')
else:
libc_ok = not windows
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
# Enable GNU features for libc's like musl, should have no effect
# on Apple/BSDs
if libc_ok:
gnu_flag = ['-D_GNU_SOURCE']
else:
gnu_flag = []
if cpython and ((min_unix_version and libc_ok) or min_win_version):
_cbor2 = Extension(
'_cbor2',
# math.h routines are built-in to MSVCRT
libraries=['m'] if not windows else [],
extra_compile_args=['-std=c99'] + gnu_flag,
sources=[
'source/module.c',
'source/encoder.c',
'source/decoder.c',
'source/tags.c',
'source/halffloat.c',
],
optional=True
)
kwargs = {'ext_modules': [_cbor2]}
else:
kwargs = {}
setup(
use_scm_version={
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag'
},
setup_requires=[
'setuptools >= 40.7.0',
'setuptools_scm >= 1.7.0'
],
**kwargs
)
| import sys
import platform
from pkg_resources import parse_version
from setuptools import setup, Extension
cpython = platform.python_implementation() == 'CPython'
is_glibc = platform.libc_ver()[0] == 'glibc'
windows = sys.platform.startswith('win')
if is_glibc:
glibc_ver = platform.libc_ver()[1]
libc_ok = parse_version(glibc_ver) >= parse_version('2.9')
else:
libc_ok = not windows
min_win_version = windows and sys.version_info >= (3, 5)
min_unix_version = not windows and sys.version_info >= (3, 3)
# Enable GNU features for libc's like musl, should have no effect
# on Apple/BSDs
if libc_ok:
gnu_flag = ['-D_GNU_SOURCE']
else:
gnu_flag = []
if cpython and ((min_unix_version and libc_ok) or min_win_version):
_cbor2 = Extension(
'_cbor2',
# math.h routines are built-in to MSVCRT
libraries=['m'] if not windows else [],
extra_compile_args=['-std=c99'] + gnu_flag,
sources=[
'source/module.c',
'source/encoder.c',
'source/decoder.c',
'source/tags.c',
'source/halffloat.c',
]
)
kwargs = {'ext_modules': [_cbor2]}
else:
kwargs = {}
setup(
use_scm_version={
'version_scheme': 'post-release',
'local_scheme': 'dirty-tag'
},
setup_requires=[
'setuptools >= 40.7.0',
'setuptools_scm >= 1.7.0'
],
**kwargs
)
| mit | Python |
34643864e52f3231aa40256bc160569af234e8e7 | Add setup.py | eduardoklosowski/vdlkino-python | setup.py | setup.py | from setuptools import find_packages, setup
version = __import__('vdlkino').__version__
setup(
name='vdlkino',
version=version,
description='Library in Python for comunicate computer with Arduino running VDLKino',
author='Eduardo Klosowski',
author_email='eduardo_klosowski@yahoo.com',
license='MIT',
packages=['vdlkino'],
zip_safe=False,
extras_require={
'serial': ['pyserial'],
},
)
| mit | Python | |
8ec65137efcf1f8cf37923b916e7496e10027edc | Bump version. | prakharjain09/qds-sdk-py,adeshr/qds-sdk-py,jainavi/qds-sdk-py,tanishgupta1/qds-sdk-py-1,vrajat/qds-sdk-py,rohitpruthi95/qds-sdk-py,yogesh2021/qds-sdk-py,msumit/qds-sdk-py,qubole/qds-sdk-py | setup.py | setup.py | import os
import sys
from setuptools import setup
INSTALL_REQUIRES = ['python_cjson', 'requests >=1.0.3', 'boto >=2.1.1']
if sys.version_info < (2, 7, 0):
INSTALL_REQUIRES.append('argparse>=1.1')
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "qds_sdk",
version = "1.0.16",
author = "Qubole",
author_email = "dev@qubole.com",
description = ("Python SDK for coding to the Qubole Data Service API"),
keywords = "qubole sdk api",
url = "http://packages.python.org/qds_sdk",
packages=['qds_sdk'],
scripts=['bin/qds.py'],
install_requires=INSTALL_REQUIRES,
long_description="[Please visit the project page at https://github.com/qubole/qds-sdk-py]\n\n" + read('README.rst')
)
| import os
import sys
from setuptools import setup
INSTALL_REQUIRES = ['python_cjson', 'requests >=1.0.3', 'boto >=2.1.1']
if sys.version_info < (2, 7, 0):
INSTALL_REQUIRES.append('argparse>=1.1')
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "qds_sdk",
version = "1.0.15b1",
author = "Qubole",
author_email = "dev@qubole.com",
description = ("Python SDK for coding to the Qubole Data Service API"),
keywords = "qubole sdk api",
url = "http://packages.python.org/qds_sdk",
packages=['qds_sdk'],
scripts=['bin/qds.py'],
install_requires=INSTALL_REQUIRES,
long_description="[Please visit the project page at https://github.com/qubole/qds-sdk-py]\n\n" + read('README.rst')
)
| apache-2.0 | Python |
42a287d23a1153df636c193695615d99b7c75e4d | Test stopping all running file backups | uroni/urbackup-server-python-web-api-wrapper | test/stop_all.py | test/stop_all.py | import urbackup_api
server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo")
for action in server.get_actions():
a = action["action"]
if a ==server.action_full_file or a==server.action_resumed_full_file:
print("Running full file backup: "+action["name"])
print("Stopping...")
server.stop_action(action) | apache-2.0 | Python | |
a9cc03c02b6d8571efd563e04f2cb774f4c3e7bf | add original walk.py | a301-teaching/cpsc189 | lib/walk.py | lib/walk.py | # File: os-path-walk-example-2.py
#http://effbot.org/librarybook/os-path/os-path-walk-example-2.py
import os
def index(directory):
# like os.listdir, but traverses directory trees
stack = [directory]
files = []
while stack:
directory = stack.pop()
for file in os.listdir(directory):
print(directory,file)
fullname = os.path.join(directory, file)
files.append(fullname)
if os.path.isdir(fullname) and not os.path.islink(fullname):
stack.append(fullname)
return files
for file in index("."):
print file
## .\aifc-example-1.py
## .\anydbm-example-1.py
## .\array-example-1.py
## ...
| cc0-1.0 | Python | |
5fc17b6c0f4d9d9862df63c330b257a8ec6932af | Add a test of switching back and forth between Decider() values (specifically 'MD5' and 'timestamp-match'), copied from back when this functionality was configured with the SourceSignatures() function. | azverkan/scons,azverkan/scons,azverkan/scons,azverkan/scons,azverkan/scons | test/Decider/switch-rebuild.py | test/Decider/switch-rebuild.py | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that switching Decider() types between MD5 and timestamp-match
does not cause unnecessary rebuilds.
"""
import TestSCons
test = TestSCons.TestSCons(match=TestSCons.match_re_dotall)
base_sconstruct_contents = """\
Decider('%s')
def build(env, target, source):
open(str(target[0]), 'wt').write(open(str(source[0]), 'rt').read())
B = Builder(action=build)
env = Environment(BUILDERS = { 'B' : B })
env.B(target='switch.out', source='switch.in')
"""
def write_SConstruct(test, sig_type):
contents = base_sconstruct_contents % sig_type
test.write('SConstruct', contents)
# Build first MD5 checksums.
write_SConstruct(test, 'MD5')
test.write('switch.in', "switch.in\n")
switch_out_switch_in = test.wrap_stdout(r'build\(\["switch.out"\], \["switch.in"\]\)\n')
test.run(arguments='switch.out', stdout=switch_out_switch_in)
test.up_to_date(arguments='switch.out')
# Now rebuild with timestamp-match. Because we always store timestamps,
# even when making the decision based on MD5 checksums, the build is
# still up to date.
write_SConstruct(test, 'timestamp-match')
test.up_to_date(arguments='switch.out')
# Now switch back to MD5 checksums. When we rebuilt with the timestamp,
# it wiped out the MD5 value (because the point of timestamps is to not
# open up and checksum the contents), so the file is considered *not*
# up to date and must be rebuilt to generate a checksum.
write_SConstruct(test, 'MD5')
test.not_up_to_date(arguments='switch.out')
# And just for good measure, make sure that we now rebuild in response
# to a content change.
test.write('switch.in', "switch.in 2\n")
test.run(arguments='switch.out', stdout=switch_out_switch_in)
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| mit | Python | |
3a9445c6b3053d492c12bbf808d251c6da55632a | Add a test for the builtin __import__ function. | pozetroninc/micropython,pfalcon/micropython,micropython/micropython-esp32,AriZuu/micropython,torwag/micropython,HenrikSolver/micropython,Timmenem/micropython,pozetroninc/micropython,ryannathans/micropython,tralamazza/micropython,blazewicz/micropython,oopy/micropython,lowRISC/micropython,alex-robbins/micropython,bvernoux/micropython,bvernoux/micropython,pramasoul/micropython,tralamazza/micropython,henriknelson/micropython,lowRISC/micropython,chrisdearman/micropython,blazewicz/micropython,Timmenem/micropython,dmazzella/micropython,AriZuu/micropython,adafruit/circuitpython,MrSurly/micropython,HenrikSolver/micropython,henriknelson/micropython,adafruit/circuitpython,bvernoux/micropython,selste/micropython,pramasoul/micropython,selste/micropython,MrSurly/micropython-esp32,adafruit/circuitpython,Timmenem/micropython,oopy/micropython,MrSurly/micropython-esp32,ryannathans/micropython,blazewicz/micropython,deshipu/micropython,MrSurly/micropython-esp32,infinnovation/micropython,tobbad/micropython,adafruit/micropython,trezor/micropython,blazewicz/micropython,micropython/micropython-esp32,tralamazza/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,PappaPeppar/micropython,pozetroninc/micropython,selste/micropython,SHA2017-badge/micropython-esp32,selste/micropython,pramasoul/micropython,ryannathans/micropython,lowRISC/micropython,pfalcon/micropython,kerneltask/micropython,MrSurly/micropython,micropython/micropython-esp32,selste/micropython,kerneltask/micropython,torwag/micropython,adafruit/micropython,swegener/micropython,alex-robbins/micropython,pfalcon/micropython,tobbad/micropython,torwag/micropython,oopy/micropython,deshipu/micropython,infinnovation/micropython,micropython/micropython-esp32,trezor/micropython,MrSurly/micropython,TDAbboud/micropython,TDAbboud/micropython,lowRISC/micropython,Timmenem/micropython,lowRISC/micropython,pozetroninc/micropython,blazewicz/micropython,dmazzella/micropython,alex-robbins/micropython,tobbad/micropython,torwag/micropython,SHA2017-badge/micropython-esp32,pramasoul/micropython,adafruit/circuitpython,TDAbboud/micropython,adafruit/micropython,Timmenem/micropython,chrisdearman/micropython,alex-robbins/micropython,pozetroninc/micropython,trezor/micropython,oopy/micropython,swegener/micropython,PappaPeppar/micropython,swegener/micropython,AriZuu/micropython,henriknelson/micropython,bvernoux/micropython,tralamazza/micropython,alex-robbins/micropython,PappaPeppar/micropython,adafruit/circuitpython,swegener/micropython,bvernoux/micropython,infinnovation/micropython,MrSurly/micropython-esp32,deshipu/micropython,infinnovation/micropython,henriknelson/micropython,infinnovation/micropython,kerneltask/micropython,torwag/micropython,dmazzella/micropython,kerneltask/micropython,HenrikSolver/micropython,ryannathans/micropython,chrisdearman/micropython,AriZuu/micropython,pfalcon/micropython,MrSurly/micropython,tobbad/micropython,HenrikSolver/micropython,micropython/micropython-esp32,trezor/micropython,chrisdearman/micropython,tobbad/micropython,MrSurly/micropython,SHA2017-badge/micropython-esp32,HenrikSolver/micropython,trezor/micropython,pfalcon/micropython,oopy/micropython,TDAbboud/micropython,PappaPeppar/micropython,ryannathans/micropython,PappaPeppar/micropython,TDAbboud/micropython,chrisdearman/micropython,adafruit/micropython,deshipu/micropython,deshipu/micropython,MrSurly/micropython-esp32,kerneltask/micropython,henriknelson/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,swegener/micropython,AriZuu/micropython,pramasoul/micropython | tests/import/builtin_import.py | tests/import/builtin_import.py | # test calling builtin import function
# basic test
__import__('builtins')
# first arg should be a string
try:
__import__(1)
except TypeError:
print('TypeError')
# level argument should be non-negative
try:
__import__('xyz', None, None, None, -1)
except ValueError:
print('ValueError')
| mit | Python | |
6dcd913e794edbac28d98988d0936262d4663b9f | create input function | viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker | core/get_input.py | core/get_input.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from core.compatible import version
from core.alert import __input_msg
def __input(msg, default):
if version() is 2:
try:
data = raw_input(__input_msg(msg))
if data == '':
data = default
except:
data = default
else:
try:
data = input(__input_msg(msg))
if data == '':
data = default
except:
data = default
return data
| apache-2.0 | Python | |
a72a0674a6db3880ed699101be3c9c46671989f0 | Add a primitive pythonic wrapper. | cfe316/atomic,ezekial4/atomic_neu,ezekial4/atomic_neu | xxdata_11.py | xxdata_11.py | import os
import _xxdata_11
parameters = {
'isdimd' : 200,
'iddimd' : 40,
'itdimd' : 50,
'ndptnl' : 4,
'ndptn' : 128,
'ndptnc' : 256,
'ndcnct' : 100
}
def read_scd(filename):
fd = open(filename, 'r')
fortran_filename = 'fort.%d' % fd.fileno()
os.symlink(filename, fortran_filename)
iclass = 2 # class number for scd files
ret = _xxdata_11.xxdata_11(fd.fileno(), iclass, **parameters)
os.unlink(fortran_filename)
return ret
if __name__ == '__main__':
out = read_scd('scd96_c.dat')
print out[0]
| mit | Python | |
b6cd59f800b254d91da76083546ab7c10689df5f | Add unit test to enforce unique file names. | rdo-management/tripleo-image-elements,rdo-management/tripleo-image-elements,openstack/tripleo-image-elements,radez/tripleo-image-elements,radez/tripleo-image-elements,openstack/tripleo-image-elements | tests/test_no_dup_filenames.py | tests/test_no_dup_filenames.py | # Copyright 2014 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import glob
import os
import testtools
class TestNoDupFilenames(testtools.TestCase):
def test_no_dup_filenames(self):
topdir = os.path.normpath(os.path.dirname(__file__) + '/../')
elements_glob = os.path.join(topdir, "elements", "*")
filenames = []
dirs_to_check = ['block-device.d', 'cleanup.d', 'extra-data.d',
'finalise.d', 'install.d', 'post-install.d',
'pre-install.d', 'root.d']
for element_dir in glob.iglob(elements_glob):
for dir_to_check in dirs_to_check:
target_dir = os.path.join(element_dir, dir_to_check, "*")
for target in glob.iglob(target_dir):
short_path = target[len(element_dir) + 1:]
if not os.path.isdir(target):
err_msg = 'Duplicate file name found %s' % short_path
self.assertFalse(short_path in filenames, err_msg)
filenames.append(short_path)
| apache-2.0 | Python | |
e81fd02cc7431ea01416126b88a22b4bba9b755e | Test - add cmake test tool | sarahmarshy/project_generator,ohagendorf/project_generator,0xc0170/project_generator,project-generator/project_generator | tests/test_tools/test_cmake.py | tests/test_tools/test_cmake.py | # Copyright 2015 0xc0170
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import yaml
import shutil
from unittest import TestCase
from project_generator.generate import Generator
from project_generator.project import Project
from project_generator.settings import ProjectSettings
from project_generator.tools.cmake import CMakeGccArm
from .simple_project import project_1_yaml, projects_1_yaml
class TestProject(TestCase):
"""test things related to the cmake tool"""
def setUp(self):
if not os.path.exists('test_workspace'):
os.makedirs('test_workspace')
# write project file
with open(os.path.join(os.getcwd(), 'test_workspace/project_1.yaml'), 'wt') as f:
f.write(yaml.dump(project_1_yaml, default_flow_style=False))
# write projects file
with open(os.path.join(os.getcwd(), 'test_workspace/projects.yaml'), 'wt') as f:
f.write(yaml.dump(projects_1_yaml, default_flow_style=False))
self.project = next(Generator(projects_1_yaml).generate('project_1'))
self.cmake = CMakeGccArm(self.project.project, ProjectSettings())
def tearDown(self):
# remove created directory
shutil.rmtree('test_workspace', ignore_errors=True)
shutil.rmtree('generated_projects', ignore_errors=True)
def test_export_project(self):
result = self.project.generate('cmake_gcc_arm', False)
projectfiles = self.project.get_generated_project_files('cmake_gcc_arm')
assert result == 0
assert projectfiles
| apache-2.0 | Python | |
15102368281837ace7e67ad915f2ff9c4c4a1ac3 | remove package alias tool | openspending/os-conductor,openspending/os-authz-service,openspending/os-authz-service,openspending/os-authz-service,openspending/os-conductor | tools/remove_packages_alias.py | tools/remove_packages_alias.py | import os
import sys
import logging
import urllib3
from elasticsearch import Elasticsearch, NotFoundError
from os_package_registry import PackageRegistry
from sqlalchemy import MetaData, create_engine
urllib3.disable_warnings()
logging.root.setLevel(logging.INFO)
if __name__ == "__main__":
es_host = os.environ['OS_ELASTICSEARCH_ADDRESS']
es = Elasticsearch(hosts=[es_host], use_ssl='https' in es_host)
target_index = sys.argv[1]
es.indices.delete_alias(target_index, 'packages')
| mit | Python | |
d53cff101248b9c90f5d2ae3f93d0e4933d03266 | add a manifest (.cvmfspublished) abstraction class | reneme/python-cvmfsutils,reneme/python-cvmfsutils | cvmfs/manifest.py | cvmfs/manifest.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created by René Meusel
This file is part of the CernVM File System auxiliary tools.
"""
import datetime
class UnknownManifestField:
def __init__(self, key_char):
self.key_char = key_char
def __str__(self):
return self.key_char
class ManifestValidityError:
def __init__(self, message):
Exception.__init__(self, message)
class Manifest:
""" Wraps information from .cvmfspublished"""
def __init__(self, manifest_file):
""" Initializes a Manifest object from a file pointer to .cvmfspublished """
for line in manifest_file.readlines():
if len(line) == 0:
continue
if line[0:2] == "--":
break
self._read_line(line)
self._check_validity()
def __str__(self):
return "<Manifest for " + self.repository_name + ">"
def __repr__(self):
return self.__str__()
def _read_line(self, line):
""" Parse lines that appear in .cvmfspublished """
key_char = line[0]
data = line[1:-1]
if key_char == "C":
self.root_catalog = data
elif key_char == "X":
self.certificate = data
elif key_char == "H":
self.history_database = data
elif key_char == "T":
self.last_modified = datetime.datetime.fromtimestamp(int(data))
elif key_char == "R":
self.root_hash = data
elif key_char == "D":
self.ttl = int(data)
elif key_char == "S":
self.revision = int(data)
elif key_char == "N":
self.repository_name = data
elif key_char == "L":
self.unknown_field1 = data # TODO: ask Jakob what L means
else:
raise UnknownManifestField(key_char)
def _check_validity(self):
""" Checks that all mandatory fields are found in .cvmfspublished """
if not hasattr(self, 'root_catalog'):
raise ManifestValidityError("Manifest lacks a root catalog entry")
if not hasattr(self, 'root_hash'):
raise ManifestValidityError("Manifest lacks a root hash entry")
if not hasattr(self, 'ttl'):
raise ManifestValidityError("Manifest lacks a TTL entry")
if not hasattr(self, 'revision'):
raise ManifestValidityError("Manifest lacks a revision entry")
if not hasattr(self, 'repository_name'):
raise ManifestValidityError("Manifest lacks a repository name")
| bsd-3-clause | Python | |
66fcd6ab9d8703b2588bc2605278a5e056356de5 | add top level bot class with basic outline of execution | sassoftware/mirrorball,sassoftware/mirrorball | updatebot/bot.py | updatebot/bot.py | #
# Copyright (c) 2008 rPath, Inc.
#
# This program is distributed under the terms of the Common Public License,
# version 1.0. A copy of this license should have been distributed with this
# source file in a file called LICENSE. If it is not present, the license
# is always available at http://www.rpath.com/permanent/licenses/CPL-1.0.
#
# This program is distributed in the hope that it will be useful, but
# without any warranty; without even the implied warranty of merchantability
# or fitness for a particular purpose. See the Common Public License for
# full details.
#
"""
Module for driving the update process.
"""
import logging
import repomd
from rpmvercmp import rpmvercmp
from rpmimport import rpmsource
from updatebot import util
from updatebot import build
from updatebot import update
from updatebot import advise
from updatebot.errors import *
log = logging.getLogger('updatebot.bot')
class Bot(object):
"""
Top level object for driving update process.
"""
def __init__(self, cfg):
self._cfg = cfg
self._clients = {}
self._rpmSource = rpmsource.RpmSource()
self._updater = update.Updater(self._cfg, self._rpmSource)
self._advisor = advise.Advisor(self._cfg, self._rpmSource)
self._builder = build.Builder(self._cfg)
def _populateRpmSource(self):
"""
Populate the rpm source data structures.
"""
for repo in self._cfg.repositoryPaths:
log.info('loading %s/%s repository data'
% (self._cfg.repositoryUrl, repo))
client = repomd.Client(self._cfg.repositoryUrl + '/' + repo)
self._rpmSource.loadFromClient(client, repo)
self._clients[repo] = client
self._rpmSource.finalize()
def run(self):
"""
Update the conary repository from the yum repositories.
"""
log.info('starting update')
# Populate rpm source object from yum metadata.
self._populateRpmSource()
# Get troves to update and send advisories.
toAdvise, toUpdate = self._updater.getUpdates()
# Check to see if advisories exist for all required packages.
self._advisor.check(toAdvise)
# Update sources.
for nvf, srcPkg in toUpdate:
self._updater.update(nvf, srcPkg)
# Make sure to build everything in the toAdvise list, there may be
# sources that have been updated, but not built.
buildTroves = [ x[0] for x in toAdvise ]
trvMap = self._builder.build(buildTroves)
# Build group.
grpTrvMap = self._builder.build(self._cfg.topGroup)
# Promote group.
# FIXME: should be able to get the new versions of packages from
# promote.
helper = self._updater.getConaryHelper()
newTroves = helper.promote(grpTrvMap.values(), self._cfg.targetLabel)
# FIXME: build a trvMap from source tove nvf to new binary trove nvf
# Send advisories.
self._advisor.send(trvMap, toAdvise)
log.info('update completed successfully')
log.info('updated %s packages and sent %s advisories'
% (len(toUpdate), len(toAdvise)))
| apache-2.0 | Python | |
8d2510fd38d946813b96798c745772641f19a5e7 | Create 10MinEmail.py | wolfdale/10MinutesEmailWrapper | 10MinEmail.py | 10MinEmail.py | from bs4 import BeautifulSoup
import threading
import urllib
web=urllib.urlopen('http://www.my10minutemail.com/')
soup=BeautifulSoup(web)
print soup.p.string
print 'Email Valid For 10 minutes'
raw_input()
#def alarm():
# print 'One Minute is Left'
#t = threading.Timer(60.0, alarm)
#t.start()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.