commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
b4b3d133c53db0f182949d1e31b27c87818d3a0c | Create quiz_corrected.py | homelessrobot/IPND-Stage-2 | quiz_corrected.py | quiz_corrected.py | #code for the project final quiz.code
#I'll break it down into single pieces and work from there
#0. Write three quizzes: easy, medium, hard and write blanks for each.
# Easy quiz level and answers
easy_level = '''The internet is basically a huge ___1___ of computers that
can all communicate with each other. When a user visits a webpage via their
___2___, their computer sends a request via the web to a ___3___ to request
files and documents. These documents are then translated by the browser
and displayed to the user. The protocol used to translate what the user
requests into a language that the computer understands and vice versa is
called ___4___.'''
easy_answers = ["network", "browser", "server", "http"]
easy_placeholders = ["___1___", "___2___", "___3___", "___4___"]
# Medium quiz level and corresponding answer
medium_level = '''HTML, CSS, and Javascript all have different purposes
which can be explained by an analogy to a house: ___1___ is like the structure
of the house - where are walls placed, which room will serve as kitchen,
bedroom, etc.
___2___ is like the style of your house - what style is the carpeting? What
color are the walls? Etc.
___3___ is like gadgets in the house - TV remote, garage doors opener, etc.
The tree-like structure of the code is called document ___4___ model.'''
medium_answers = ["html", "css", "javascript", "object"]
medium_placeholders = ["___1___", "___2___", "___3___", "___4___"]
# hard level quiz and corresponding answers.
hard_level = '''HTML stands for ___1___ Markup Language. HTML documents
form the majority of the content on the web. HTML documents contain text
content which describes "what you see" and ___2___ which describes "how it
looks".
HTML documents are made of HTML ___3___. When writing HTML, we tell browsers
the type of each element by using HTML ___4___.'''
hard_answers = ["hypertext", "markup", "elements", "tags" ]
hard_placeholders = ["___1___", "___2___", "___3___", "___4___"]
explanation = '\n' + '''Please fill in the blanks.''' + '\n'
#1. Beginning - Selecting a difficulty level and displaying quiz
#1.1. Prompt user to select a difficulty level upon opening the program - use raw_input.
#will use .lower() to make sure the user doesn't need to worry about upper or lower case.
#if she inputs a wrong difficulty level (not easy, medium or hard), the while loop will ensure she is prompted again.
#Depending on the chosen level, the respective quiz will be chosen.
#actually, the quiz will not be printed here, but the selection will take place in this step. print is in the next step.
def choose_level():
level = raw_input('Please select a level: easy, medium, or hard').lower()
while level not in ('easy', 'medium', 'hard'):
level = raw_input('Please select a level: easy, medium, or hard').lower()
if level == 'easy':
prompt_quiz(easy_level, easy_placeholders, easy_answers)
elif level == 'medium':
prompt_quiz(medium_level, medium_placeholders, medium_answers)
elif level == 'hard':
prompt_quiz(hard_level, hard_placeholders, hard_answers)
#2. Check answers and progress through the blanks
#first, the short explanation sentence will be displayed to explain to the user what she needs to do.
#then the level selected in choose_level will be displayed and the user is prompted to fill in blank one.
#again, .lower() makes sure the answer is not case sensitive
#since there are only 4 blanks, I will insert a while loop to stop prompting the user to fill in more after blank 4 has been
#correctly answered.
#Again, raw.input will be used to gather user input. A simple if/else statement will validate the input as true or false
#if true, user will be presented next question. If false, user will be prompted to answer that blank again.
#the number starts at 0 and it adds 1 each time the user inputs a correct question, all the way up to 4, where the
#while loop makes sure to stop the prompting. At the same time, when 4 is reach, the if statement makes sure the user
#is notified that she has finished the quiz. Thats's it!
def prompt_quiz(level, placeholders, answers):
print explanation
print level
number = 0
total_blanks = 4
while number < total_blanks:
answer_input = raw_input('\n' + 'What is the correct answer for blank' + placeholders[number] + '?' + '\n').lower()
if answer_input == answers[number]:
print '\n' + 'Great job!!!' + '\n' '\n' + 'Now move on to the next blank!' + '\n'
print level
number += +1
else:
print 'oops, please try again'
if number == total_blanks:
print '\n' +' CONGRATS, you have finished the quiz!!!' + '\n'
#kicks it off
choose_level()
| bsd-2-clause | Python | |
d80388591e3a55969688957b7c1bbd9bcda40296 | Create social_feedback_counter.py compatible with hatebu, fb_like & tweet | shiraco/social_feedback_counter | social_feedback_counter.py | social_feedback_counter.py | # coding:utf-8
import urllib
import json
class SocialFeadbackCounter(object):
def __init__(self, url):
self.url = url
def hatebu(self):
api_url = 'http://b.hatena.ne.jp/entry/json/' + self.url
hb_json = json.loads(urllib.urlopen(api_url).read(), encoding='utf-8')
if hb_json is not None:
return int(hb_json['count'])
else: # In case 0, response is null(None)
return 0
def fb_like(self):
api_url = 'http://graph.facebook.com/' + self.url
fb_json = json.loads(urllib.urlopen(api_url).read(), encoding='utf-8')
try:
return fb_json['shares']
except KeyError: # In case 0, 'share' key is not exsist in response json
return 0
def tweet(self):
api_url = 'http://urls.api.twitter.com/1/urls/count.json?url=' + self.url
tw_json = json.loads(urllib.urlopen(api_url).read(), encoding='utf-8')
return tw_json['count']
if __name__ == '__main__':
u = SocialFeadbackCounter('https://www.google.co.jp/')
print u.hatebu(), u.fb_like(), u.tweet()
| mit | Python | |
1866bb1ad5f5c4338c2173327d620e92c2ba5043 | Create basic PodSixNet server | thebillington/pygame_multiplayer_server | server.py | server.py | from PodSixNet.Channel import Channel
from PodSixNet.Server import Server
from time import sleep
#Create the channel to deal with our incoming requests from the client
#A new channel is created every time a client connects
class ClientChannel(Channel):
#Create a function that will respond to every request from the client
def Network(self, data):
#Print the contents of the packet
print(data)
#Create a new server for our game
def GameServer(Server):
#Set the channel to deal with incoming requests
channelClass = ClientChannel
#Function to deal with new connections
def Connected(self, channel, addr):
print("New connection: {}".format(channel))
#Start the server, but only if the file wasn't imported
if __name__ == "__main__":
print("Server starting on LOCALHOST...\n")
#Create a server
s = GameServer()
#Pump the server at regular intervals (check for new requests)
while True:
s.Pump()
sleep(0.0001)
| mit | Python | |
0cc0b16a6f29d31c3c2b3e2ad4eb313b010f7806 | test addBuilds() method | red-hat-storage/errata-tool,mmuzila/errata-tool,ktdreyer/errata-tool,ktdreyer/errata-tool,red-hat-storage/errata-tool,mmuzila/errata-tool | errata_tool/tests/test_add_builds.py | errata_tool/tests/test_add_builds.py | import requests
class TestAddBuilds(object):
def test_add_builds_url(self, monkeypatch, mock_post, advisory):
monkeypatch.setattr(requests, 'post', mock_post)
advisory.addBuilds(['ceph-10.2.3-17.el7cp'], release='RHEL-7-CEPH-2')
assert mock_post.response.url == 'https://errata.devel.redhat.com/api/v1/erratum/26175/add_builds' # NOQA: E501
def test_builds_data(self, monkeypatch, mock_post, advisory):
monkeypatch.setattr(requests, 'post', mock_post)
advisory.addBuilds(['ceph-10.2.3-17.el7cp'], release='RHEL-7-CEPH-2')
expected = {
"product_version": "RHEL-7-CEPH-2",
"build": "ceph-10.2.3-17.el7cp",
}
assert mock_post.kwargs['json'] == [expected]
| mit | Python | |
fbbd6526612bbb450c5c4c1ecffd21e32f4c98c6 | Add simple server | ThibWeb/jean-giono | server.py | server.py | import SimpleHTTPServer
import SocketServer
PORT = 8000
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(('', PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()
| cc0-1.0 | Python | |
5cfcd8fe88fc56bd5738c97152d37be1478560a9 | Add server.py | icersong/twisted-connect-proxy,fmoo/twisted-connect-proxy | server.py | server.py | from twisted.web.proxy import Proxy, ProxyRequest
from twisted.internet.protocol import Protocol, ClientFactory
import urlparse
from twisted.python import log
class ConnectProxyRequest(ProxyRequest):
"""HTTP ProxyRequest handler (factory) that supports CONNECT"""
connectedProtocol = None
def process(self):
if self.method == 'CONNECT':
self.processConnectRequest()
else:
ProxyRequest.process(self)
def fail(self, message, body):
self.setResponseCode(501, message)
self.responseHeaders.addRawHeader("Content-Type", "text/html")
self.write(body)
self.finish()
def splitHostPort(self, hostport, default_port):
port = default_port
parts = hostport.split(':', 1)
if len(parts) == 2:
try:
port = int(parts[1])
except ValueError:
pass
return parts[0], port
def processConnectRequest(self):
parsed = urlparse.urlparse(self.uri)
default_port = self.ports.get(parsed.scheme)
host, port = self.splitHostPort(parsed.netloc or parsed.path,
default_port)
if port is None:
self.fail("Bad CONNECT Request",
"Unable to parse port from URI: %s" % self.uri)
return
clientFactory = ConnectProxyClientFactory(host, port, self)
# TODO provide an API to set proxy connect timeouts
self.reactor.connectTCP(host, port, clientFactory)
class ConnectProxy(Proxy):
"""HTTP Server Protocol that supports CONNECT"""
requestFactory = ConnectProxyRequest
connectedRemote = None
def requestDone(self, request):
if request.method == 'CONNECT' and self.connectedRemote is not None:
self.connectedRemote.connectedClient = self
else:
Proxy.requestDone(self, request)
def connectionLost(self, reason):
if self.connectedRemote is not None:
print "Proxy connection lost!", reason
self.connectedRemote.transport.loseConnection()
Proxy.connectionLost(self, reason)
def dataReceived(self, data):
if self.connectedRemote is None:
Proxy.dataReceived(self, data)
else:
# Once proxy is connected, forward all bytes received
# from the original client to the remote server.
self.connectedRemote.transport.write(data)
class ConnectProxyClient(Protocol):
connectedClient = None
def connectionMade(self):
self.factory.request.channel.connectedRemote = self
self.factory.request.setResponseCode(200, "CONNECT OK")
self.factory.request.setHeader('X-Connected-IP',
self.transport.realAddress[0])
self.factory.request.setHeader('Content-Length', '0')
self.factory.request.finish()
def connectionLost(self, reason):
if self.connectedClient is not None:
print "Client Connection lost!", reason
self.connectedClient.transport.loseConnection()
def dataReceived(self, data):
if self.connectedClient is not None:
# Forward all bytes from the remote server back to the
# original connected client
self.connectedClient.transport.write(data)
else:
log.msg("UNEXPECTED DATA RECEIVED:", data)
class ConnectProxyClientFactory(ClientFactory):
protocol = ConnectProxyClient
def __init__(self, host, port, request):
self.request = request
self.host = host
self.port = port
def clientConnectionFailed(self, connector, reason):
self.request.fail("Gateway Error", str(reason))
if __name__ == '__main__':
import sys
log.startLogging(sys.stderr)
import argparse
ap = argparse.ArgumentParser()
ap.add_argument('port', default=0, nargs='?', type=int)
ns = ap.parse_args()
import twisted.web.http
factory = twisted.web.http.HTTPFactory()
factory.protocol = ConnectProxy
import twisted.internet
c = twisted.internet.reactor.listenTCP(ns.port, factory)
twisted.internet.reactor.run()
| bsd-3-clause | Python | |
9e6bae8aa92ed0332efd689b6f43063b0569ef0a | add 16.py | bm5w/pychal | 16.py | 16.py | """Python challenge #16:
http://www.pythonchallenge.com/pc/return/mozart.html"""
import urllib2
from PIL import Image
url = 'http://www.pythonchallenge.com/pc/return/mozart.gif'
un = 'huge'
pw = 'file'
pink = (255, 0, 255)
def main():
setup_auth_handler()
img = urllib2.urlopen(url)
im = Image.open(img)
rgb_im = im.convert('RGB')
seq = list(rgb_im.getdata())
output = []
for count, x in enumerate(seq):
if x == pink:
if seq[count-1] != pink:
output.append(count)
print len(output)
for count, x in enumerate(output):
if count > 0:
print x-output[count-1]
print len(output)
# matrix = rgb_im.load()
# for y in xrange(int(rgb_im.size[1])):
# import pdb; pdb.set_trace()
# for x in xrange(int(rgb_im.size[0])):
# print matrix[x, y]
# with open('temp{}.jpg'.format(str(i)), "wb") as file_handle:
# file_handle.write(file_content[i::5])
(249, 249, 249)
(255, 0, 255)
(255, 0, 255)
(255, 0, 255)
(255, 0, 255)
(255, 0, 255)
(252, 252, 252)
def setup_auth_handler():
"""Method for setting up authentication."""
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, un, pw)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
urllib2.install_opener(opener)
if __name__ == '__main__':
main()
| mit | Python | |
af320490aaa59d69faed9357d9690d945272bec5 | add empty file to test Slack integration | kvantos/intro_to_python_class | A2.py | A2.py | #!/usr/bin/env python3
| bsd-2-clause | Python | |
944102f3d19b4086e7f7cb9c30e95a3d4b043601 | Add python prototype | davesque/go.py | ho.py | ho.py | class Position(object):
COLORS = {
'black': '@',
'white': 'O',
'empty': '+',
}
class PositionError(Exception):
pass
def __init__(self, color):
if color not in self.COLORS:
raise self.PositionError('Color must be one of the following: {0}'.format(self.COLORS.keys()))
self._color = color
def __eq__(self, other):
return self._color == other._color
def __str__(self):
return self.COLORS[self._color]
def __repr__(self):
return '<Position: {0}>'.format(self._color)
class Canvas(object):
EMPTY = None
class CanvasError(Exception):
pass
def __init__(self, width, height):
self._width = width
self._height = height
self._reset()
def _reset(self, value=None):
value = value or self.EMPTY
self._canvas = [
[value for i in range(self._width)]
for j in range(self._height)
]
def _check_coords(self, x, y):
if (
x < 1 or
x > self._width or
y < 1 or
y > self._height
):
raise self.CanvasError('Coordinates ({x}, {y}) are not within canvas dimensions {w}x{h}'.format(
x=x, y=y, w=self._width, h=self._height
))
def set(self, x, y, value):
self._check_coords(x, y)
self._canvas[y - 1][x - 1] = value
def get(self, x, y):
self._check_coords(x, y)
return self._canvas[y - 1][x - 1]
def __eq__(self, other):
return self._canvas == other._canvas
class BoardCanvas(Canvas):
HOSHI = '*'
HOSHIS = [
(4, 4),
(10, 4),
(16, 4),
(4, 10),
(10, 10),
(16, 10),
(4, 16),
(10, 16),
(16, 16),
]
def __init__(self, board):
self._board = board
super(BoardCanvas, self).__init__(
board._width * 2 - 1,
board._height,
)
def _reset(self):
board_canvas = self._board._canvas
self._canvas = [
list('-'.join([str(pos) for pos in row]))
for row in board_canvas
]
for x, y in self.HOSHIS:
if self.get(x, y) == '+':
self.set(x, y, self.HOSHI)
def set(self, x, y, value):
x = 2 * x - 1
super(BoardCanvas, self).set(x, y, value)
def get(self, x, y):
x = 2 * x - 1
return super(BoardCanvas, self).get(x, y)
def __str__(self):
return '\n'.join([''.join(row) for row in self._canvas])
class Board(Canvas):
BLACK = Position('black')
WHITE = Position('white')
EMPTY = Position('empty')
class BoardError(Canvas.CanvasError):
pass
b = Board(19, 19)
c = BoardCanvas(b)
| mit | Python | |
31a74f1b9b50036a9b1de603f3437516eaef7807 | Create pb.py | NETponents/ParseBasic | pb.py | pb.py | print "ParseBasic interpreter v0.1"
print "Copyright 2015 NETponents"
print "Licensed under MIT license"
print "Commercial use of this build is prohibited"
| mit | Python | |
c4015ed868b65ce5c7ed660c84e252a950294642 | Add basic functionality to query the (horrible) website. | kdungs/R1D2 | r1.py | r1.py | from datetime import date
import bs4
import itertools as it
import re
import requests
def grouper(iterable, n, fillvalue=None):
args = [iter(iterable)] * n
return it.izip_longest(fillvalue=fillvalue, *args)
def extract_name(bsitem):
return bsitem.find('span').text
def extract_price(bsitem):
reg = re.compile(r'CHF ([\d\.]+)')
return float(reg.findall(bsitem.text)[0])
def extract_table(response):
items = bs4.BeautifulSoup(response.text).find(
'table',
class_='menuRestaurant').findAll('table',
class_='HauteurMenu')
return [(extract_name(i), extract_price(i)) for i in items[1::2]]
def create_payload(page):
return {'fa_afficheSemaine_menurestaurant': 'Page {}'.format(page),
'fn_changeType': 2,
'fn_jourSemaine': '{}'.format(date.today()),
'fn_limite': 2 * page - 1,
'fn_refresh': 1,
'fn_numpage': page}
def split_days(items):
xs = [grouper(i, n) for i, n in zip(items, (3, 2, 2))]
return [list(it.chain(*i)) for i in zip(*xs)]
def get_menu():
URL1 = 'http://extranet.novae-restauration.ch/index.php?frame=1&x=d894ddae3c17b40b4fe7e16519f950f0&y=c7b3f79848b99a8e562a1df1d6285365&z=33'
URL2 = 'http://extranet.novae-restauration.ch/novae/traiteur/restauration/restaurant-cern.html?frame=1'
s = requests.Session()
return split_days([extract_table(s.get(URL1)), extract_table(
s.post(URL2,
data=create_payload(2))), extract_table(
s.post(URL2,
data=create_payload(3)))])
| mit | Python | |
7dede788648d5569587214722a2a128f419a7b8a | Create v1.py | GTeninbaum/pizzapi.py | v1.py | v1.py | print "This is Pizza Pi R Squared. What's it do? It lets you determine whether buying a small pizza or a large pizza is a better value in terms of cost per bite of pizza."
diameter_one = int(raw_input("What's the first pizza's diameter (in inches)?"))
cost_one = int(raw_input("How much does the first pizza cost? (in dollars and cents)"))
area_one = (diameter_one / 2) **2 * 3.14
total_one = cost_one / area_one
print "With this pizza, you're paying %s cents per square inch." % (total_one)
diameter_two = int(raw_input("What's the second pizza's diameter (in inches)?"))
cost_two = int(raw_input("How much does the second pizza cost? (in dollars and cents)"))
area_two = (diameter_two / 2) **2 * 3.14
total_two = cost_two / area_two
print "With this pizza, you're paying %s cents per square inch." % (total_two)
if total_one < total_two:
print "The first pizza is a better value. Buy that one!"
elif total_two < total_one:
print "The second pizza is the better deal - more pizza for the buck. Get that one!"
else:
print "Same deal - get whichever you'd like!"
#I'm a noob python programmer and this is my first script, other than those from "Learn Python the Hard Way."
#Suggestions/improvements/ideas appreciated!
#Feel free to drop me a note at gteninbaum@suffolk.edu.
| mit | Python | |
a998bd2686ab924035325d7288131a7141a457bb | Apply orphaned migration | dbinetti/barberscore,dbinetti/barberscore,barberscore/barberscore-api,barberscore/barberscore-api,barberscore/barberscore-api,dbinetti/barberscore-django,dbinetti/barberscore-django,barberscore/barberscore-api | project/apps/api/migrations/0010_remove_chart_song.py | project/apps/api/migrations/0010_remove_chart_song.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0009_auto_20150722_1041'),
]
operations = [
migrations.RemoveField(
model_name='chart',
name='song',
),
]
| bsd-2-clause | Python | |
fe668b882d3c27f8f7bf7f8cf6d338bf3216310e | add testing script in temp location | mozilla-releng/scriptworker,mozilla-releng/scriptworker,escapewindow/scriptworker,escapewindow/scriptworker | aki.py | aki.py | #!/usr/bin/env python
# XXX this is a helpful script, but probably belongs in scriptworker/test/data
from __future__ import print_function
import aiohttp
import asyncio
from copy import deepcopy
import json
import logging
import pprint
import sys
from scriptworker.constants import DEFAULT_CONFIG
from scriptworker.context import Context
from scriptworker.cot.verify import ChainOfTrust, build_chain_of_trust
task_id = "S5pv1_I5SJWwGcjAFW1q6g"
if len(sys.argv) > 1:
task_id = sys.argv[1]
loop = asyncio.get_event_loop()
context = Context()
with open("/Users/asasaki/.scriptworker", "r") as fh:
context.credentials = json.load(fh)['credentials']
context.queue = context.create_queue(context.credentials)
context.task = loop.run_until_complete(context.queue.task(task_id))
context.config = dict(deepcopy(DEFAULT_CONFIG))
context.config.update({
'artifact_dir': '/tmp/artifacts',
'base_gpg_home_dir': '/tmp/gpg',
})
log = logging.getLogger('scriptworker')
log.setLevel(logging.DEBUG)
logging.basicConfig()
with aiohttp.ClientSession() as session:
context.session = session
cot = ChainOfTrust(context, 'signing', task_id="J_RwqU2wR1iAegzl6bIVcg")
loop.run_until_complete(build_chain_of_trust(cot))
pprint.pprint(cot.dependent_task_ids())
print("Cot task_id: {}".format(cot.task_id))
for link in cot.links:
print("task_id: {}".format(link.task_id))
# print(link.cot_dir)
# print(link.decision_task_id)
context.session.close()
context.queue.session.close()
loop.close()
| mpl-2.0 | Python | |
59b01485c70d42e32acb4c80efbe0e393ca8c437 | Add aes.py | catroll/clipboard,catroll/clipboard,catroll/clipboard,catroll/clipboard | aes.py | aes.py | # -*- coding: utf-8 -*-
import base64
from Crypto import Random
from Crypto.Cipher import AES
class AESCipher:
def __init__(self, key):
self.bs = 32
if len(key) >= 32:
self.key = key[:32]
else:
self.key = self._pad(key)
def encrypt(self, raw):
raw = self._pad(raw)
iv = Random.new().read(AES.block_size)
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return base64.b64encode(iv + cipher.encrypt(raw))
def decrypt(self, enc):
enc = base64.b64decode(enc)
iv = enc[:AES.block_size]
cipher = AES.new(self.key, AES.MODE_CBC, iv)
return self._unpad(cipher.decrypt(enc[AES.block_size:]))
def _pad(self, s):
""" 填充字符串到长度为 self.bs 的倍数 """
pad_length = self.bs - len(s) % self.bs
return s + pad_length * chr(pad_length)
def _unpad(self, s):
""" 去掉填充字符,获取原字符串 """
return s[:-ord(s[-1])]
def test(src, key='test_key'):
coder = AESCipher(key)
enc = coder.encrypt(src)
dec = coder.decrypt(enc)
print '\n', '[[ TEST ]]'.center(70, '*'), '\n'
print 'len: %-3d src: %s' % (len(src), repr(src))
print 'len: %-3d enc: %s' % (len(enc), repr(enc))
print 'Decrypt %s!' % ('Right' if dec == src else 'Wrong')
if __name__ == "__main__":
for i in range(0, 10):
test('l' * i)
| mit | Python | |
68b8ad567545c7ec07f13089f2b3e4ecd4cc835e | Create api.py | repopreeth/trep | api.py | api.py | from flask import Flask
from flask.ext.restful import reqparse, abort, Api, Resource
from profile import Profile
app = Flask(__name__)
api = Api(app)
def abort_if_user_doesnt_exist(user):
if not isValid(user):
abort(404, message="User {} doesn't exist".format(user))
# Argument validation
parser = reqparse.RequestParser()
parser.add_argument('user', type=str)
def userInfo(user):
return Profile(user).getProfile()
def isValid(user):
return True
# User profile
# show a profile for a single twitter handle
class User(Resource):
def get(self, user):
abort_if_user_doesnt_exist(user)
return userInfo(user)
# Tweets
# shows a up to 100 tweets from start
class Tweets(Resource):
def get(self, user, start, end):
# TODO paginate with cursor
return None
##
## Setup the Api resource routing here
##
# TODO #api.add_resource(Tweets, '/tweets')
api.add_resource(User, '/handle/<string:user>')
if __name__ == '__main__':
app.run(debug=True)
| apache-2.0 | Python | |
265bedb193f8615f99daa63c921b572408921605 | Add tests for quick sort | nbeck90/data_structures_2 | test_quick_sort.py | test_quick_sort.py | # -*- coding: utf-8 -*-
from quick_sort import quick_sort
def test_sorted():
my_list = list(range(100))
quick_sort(my_list)
assert my_list == list(range(100))
def test_reverse():
my_list = list(range(100))[::-1]
quick_sort(my_list)
assert my_list == list(range(100))
def test_empty():
my_list = []
quick_sort(my_list)
assert my_list == []
def test_abc():
my_list = ['a', 'b', 'c', 'd', 'e']
quick_sort(my_list)
assert my_list == ['a', 'b', 'c', 'd', 'e']
my_list = ['e', 'd', 'c', 'b', 'a']
quick_sort(my_list)
assert my_list == ['a', 'b', 'c', 'd', 'e']
def test_unicode():
my_list = ['π']
quick_sort(my_list)
assert my_list == ['\xcf\x80']
def test_duplicate():
my_list = [1, 2, 2, 5, 3]
quick_sort(my_list)
assert my_list == [1, 2, 2, 3, 5]
def test_combo():
my_list = [42, 1, 'a', 500]
quick_sort(my_list)
assert my_list == [1, 42, 500, 'a']
my_list = [42, '1', 'a', '500']
quick_sort(my_list)
assert my_list == [42, '1', '500', 'a']
def test_function():
my_list = []
new_list = [quick_sort(my_list)]
assert new_list == [None]
| mit | Python | |
017fa0b360c23696d3176f48e2c53accac8bcfc5 | Add version module | SlightlyUnorthodox/PyCap,sburns/PyCap,dckc/PyCap,redcap-tools/PyCap,dckc/PyCap,tjrivera/PyCap | redcap/version.py | redcap/version.py | VERSION = '0.5.2'
| mit | Python | |
426ef95ba1b2f3ac42c16a3594d186c4c9226a6e | add admin | byteweaver/django-referral,Chris7/django-referral | referral/admin.py | referral/admin.py | from django.contrib import admin
from models import Campaign, Referrer
class ReferrerInine(admin.TabularInline):
model = Referrer
extra = 0
class CampaignAdmin(admin.ModelAdmin):
inlines = (ReferrerInine, )
class ReferrerAdmin(admin.ModelAdmin):
list_display = ('name', 'campaign', 'creation_date')
admin.site.register(Campaign, CampaignAdmin)
admin.site.register(Referrer, ReferrerAdmin)
| mit | Python | |
e826c3e95b1a035484a5fa3ab05d5bc5e5a023bb | Add db_fsck.py which checks for some problems with the server and image store. | drougge/wellpapp-pyclient | db_fsck.py | db_fsck.py | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
from hashlib import md5
import Image
from os.path import exists
from os import stat, walk
import re
def usage(argv):
print "Usage:", argv[0], "-opts"
print "Where opts can be:"
print "\t-t Check thumb existance"
print "\t-T Check thumb integrity (decodeability and timestamp)"
print "\t-i Check image existance"
print "\t-I Check image integrity (md5)"
print "\t-f Check that all existing symlinks/thumbnails have a corresponding post"
print "\t-s Check that all posts have the same tags they are findable by."
print "\t-a All of the above"
exit(1)
def check_images(integrity):
print "Checking images"
bad = set()
for m in posts:
path = client.image_path(m)
if not exists(path):
print "Missing image", m
bad.add(m)
continue
if integrity:
if md5(file(path).read()).hexdigest() != m:
print "Bad file", m
bad.add(m)
return bad
def check_thumbs(integrity):
print "Checking thumbs"
sizes = map(int, client.cfg.thumb_sizes.split()) + ["normal", "large"]
bad = set()
for m in posts:
thumbs = []
for z in sizes:
if isinstance(z, int):
path = client.thumb_path(m, z)
else:
path = client.pngthumb_path(m, posts[m]["ext"], z)
thumbs.append((path, z))
for path, z in thumbs:
if not exists(path):
print m, "missing thumb", z
bad.add(m)
if integrity and m not in bad:
for path, z in thumbs:
img = Image.open(path)
try:
# img.verify() doesn't work on jpeg
# (and load() is pretty forgiving too, but that's
# probably a problem with the format.)
img.load()
except Exception:
print m, "bad thumb", z
bad.add(m)
continue
if isinstance(z, basestring):
if "Thumb::URI" not in img.info or "Thumb::MTime" not in img.info:
print m, "bad thumb", z
bad.add(m)
continue
t_mtime = int(img.info["Thumb::MTime"])
f_mtime = int(stat(client.image_path(m)).st_mtime)
if t_mtime != f_mtime:
print m, "outdated thumb", z
bad.add(m)
continue
return bad
def _check_imagestore(msg, bad, dp, fns, name2md5):
initial = dp[-4] + dp[-2:]
for fn in fns:
p = dp + "/" + fn
if fn[:3] != initial:
print "Misplaced", msg, p
bad.add(p)
continue
if name2md5(p)[-32:] not in posts:
print msg.title(), "without post", fn
bad.add(p)
def _pngthumb2md5(fn):
img = Image.open(fn)
return img.info["Thumb::URI"][:32]
def check_imagestore():
bad = set()
print "Checking for stray images"
for dp, dns, fns in walk(client.cfg.image_base):
_check_imagestore("image", bad, dp, fns, str)
print "Checking for stray thumbnails"
jpegz = client.cfg.thumb_sizes.split()
for z, name2md5 in zip(jpegz + ["normal", "large"], [str] * len(jpegz) + [_pngthumb2md5] * 2):
print " " + z
for dp, dns, fns in walk(client.cfg.thumb_base + "/" + z):
_check_imagestore("thumb", bad, dp, fns, name2md5)
return bad
def check_connectivity():
print "Checking tags"
for guid in tags:
strong = client.search_post(guids=["!" + guid])
weak = client.search_post(guids=["~" + guid])
if len(strong) != tags[guid]["posts"] or len(weak) != tags[guid]["weak_posts"]:
print "Post count mismatch on", guid
print "\tclaims", tags[guid]["posts"], "+", tags[guid]["weak_posts"]
print "\tfinds", len(strong), "+", len(weak)
for res, prefix in ((strong, ""), (weak, "~")):
for p in res:
p = posts[p["md5"]]
if prefix + guid not in p["tagguid"] + p["impltagguid"]:
print p["md5"], "reachable with", guid, "but not tagged"
print "Checking posts"
for m in posts:
guids = map(lambda g: g if g[0] == "~" else "!" + g, posts[m]["tagguid"])
while guids:
p = client.search_post(guids=guids[:16])
if m not in map(lambda f: f["md5"], p):
print "Post", m, "not findable with all tags"
guids = guids[16:]
if __name__ == '__main__':
from sys import argv, exit
from dbclient import dbclient
optchars = "tTiIfs"
opts = ""
for a in argv[1:]:
if a[0] != "-": usage(argv)
for c in a[1:]:
if c in optchars and c not in opts:
opts += c
elif c == "a":
opts = optchars
else:
usage(argv)
if not opts: usage(argv)
client = dbclient()
posts = client._search_post("SPFtagguid Fimplied Fext", ["tagguid", "implied", "ext"])
print len(posts), "posts"
posts = dict(map(lambda f: (f["md5"], f), posts))
tags = client.find_tags("EAI", "")
print len(tags), "tags"
bad_images = None
bad_thumbs = None
stray = None
connectivity = None
optlow = opts.lower()
if "i" in optlow: bad_images = check_images("I" in opts)
if "t" in optlow: bad_thumbs = check_thumbs("T" in opts)
if "f" in opts: stray = check_imagestore()
if "s" in opts: connectivity = check_connectivity()
| mit | Python | |
747800528b3709759738081ee580e380bf164c02 | add skeletons of new unit tests to be added | dongsenfo/pymatgen,montoyjh/pymatgen,gVallverdu/pymatgen,gmatteo/pymatgen,richardtran415/pymatgen,montoyjh/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,richardtran415/pymatgen,tschaume/pymatgen,richardtran415/pymatgen,richardtran415/pymatgen,blondegeek/pymatgen,vorwerkc/pymatgen,dongsenfo/pymatgen,blondegeek/pymatgen,tschaume/pymatgen,gmatteo/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,tschaume/pymatgen,montoyjh/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,montoyjh/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,mbkumar/pymatgen,tschaume/pymatgen,tschaume/pymatgen,blondegeek/pymatgen,dongsenfo/pymatgen,mbkumar/pymatgen,dongsenfo/pymatgen,vorwerkc/pymatgen,mbkumar/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,mbkumar/pymatgen,blondegeek/pymatgen,davidwaroquiers/pymatgen | pymatgen/analysis/defects/tests/test_compatibility.py | pymatgen/analysis/defects/tests/test_compatibility.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import unicode_literals
import unittest
from pymatgen.util.testing import PymatgenTest
class DefectCompatibilityTest(PymatgenTest):
def test_process_entry(self):
pass
def test_perform_all_corrections(self):
pass
def test_perform_freysoldt(self):
pass
def test_perform_kumagai(self):
pass
def test_run_bandfilling(self):
pass
def test_run_band_edge_shifting(self):
pass
def test_delocalization_analysis(self):
pass
def test_is_freysoldt_delocalized(self):
pass
def test_is_kumagai_delocalized(self):
pass
def test_is_final_relaxed_structure_delocalized(self):
pass
| mit | Python | |
b68db14e5ecd2e8ccaaa0412798a8669232fb8e5 | Add constraint variables in solver | mpardalos/CS-Internal | solver.py | solver.py | from collections import namedtuple
from past import autotranslate
# python-constraint is python2, so we'll use python-future's autotranslate function
autotranslate(['constraint'])
import constraint
# periods is an int for how many periods per week are required for this subject
subject = namedtuple("subject", ['name', 'periods'])
def solve(subjects: list, max_students_per_class: int, periods_per_week: int):
"""
Create a timetable for the given number of periods, subjects, and students per subject
Args:
periods_per_week (int): The number of periods in the whole timetable
subjects ([str]): The subjects that should appear on the timetable
max_students_per_class (int): The number of students per class
"""
problem = constraint.Problem()
# Add one variable per subject period
for subject in subjects:
# Start numbering from 1
for period_num in range(1, subject.periods+1):
problem.addVariable('{subject_name}-period{period_num}'.format(subject_name=subject.name, period_num=period_num),
constraint.Domain(range(1, periods_per_week + 1)))
# Test data
periods_per_week = 20
HistorySL = subject("HistorySL", 2)
HistoryHL = subject("HistoryHL", 3)
MathSL = subject("MathSL", 2)
MathHL = subject("MathHL", 3)
BiologySL = subject("BiologySL", 2)
BiologyHL = subject("BiologyHL", 3)
subjects = [
HistorySL,
HistoryHL,
MathSL,
MathHL,
BiologySL,
BiologyHL
]
max_students_per_class = 14
solve(subjects, max_students_per_class, periods_per_week)
| mit | Python | |
a14d696cad5b3249997257298150977fa53f9cc8 | Add lc151_reverse_words_in_a_string.py | bowen0701/algorithms_data_structures | lc151_reverse_words_in_a_string.py | lc151_reverse_words_in_a_string.py | """Leetcode 151. Reverse Words in a String
Medium
Given an input string, reverse the string word by word.
Example 1:
Input: "the sky is blue"
Output: "blue is sky the"
Example 2:
Input: " hello world! "
Output: "world! hello"
Explanation: Your reversed string should not contain leading or trailing spaces.
Example 3:
Input: "a good example"
Output: "example good a"
Explanation: You need to reduce multiple spaces between two words to a single
space in the reversed string.
Note:
A word is defined as a sequence of non-space characters.
Input string may contain leading or trailing spaces. However, your reversed
string should not contain leading or trailing spaces.
You need to reduce multiple spaces between two words to a single space in the
reversed string.
"""
class Solution(object):
def reverseWords(self, s):
"""
:type s: str
:rtype: str
"""
pass
def main():
import time
# Ans: "blue is sky the".
s = 'the sky is blue'
# Ans: "world! hello".
s = ' hello world! '
# Ans: "example good a".
s = 'a good example'
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
b80d7927225f172653922317ef5c96e90876588d | Create SemiSupervisedTSNE.py | lmcinnes/sstsne | sstsne/SemiSupervisedTSNE.py | sstsne/SemiSupervisedTSNE.py | bsd-2-clause | Python | ||
2fbcd2c5c47b4066e74619196dc333fa88a015d1 | isolate pipe operator overload code | h2non/paco | tests/pipe_test.py | tests/pipe_test.py | # -*- coding: utf-8 -*-
import asyncio
import pytest
import paco
from paco.pipe import overload
def test_pipe_operator_overload():
@asyncio.coroutine
def filterer(x):
return x < 8
@asyncio.coroutine
def mapper(x):
return x * 2
@asyncio.coroutine
def drop(x):
return x < 10
@asyncio.coroutine
def reducer(acc, x):
return acc + x
@asyncio.coroutine
def task(numbers):
return (yield from (numbers
| paco.filter(filterer)
| paco.map(mapper)
| paco.dropwhile(drop)
| paco.reduce(reducer, initializer=0)))
result = paco.run(task((1, 2, 3, 4, 5, 6, 7, 8, 9, 10)))
assert result == 36
def test_overload_error():
with pytest.raises(TypeError, message='fn must be a callable object'):
overload(None)
with pytest.raises(ValueError,
messsage='invalid function signature or arity'):
overload(lambda x: True)
with pytest.raises(ValueError,
messsage='invalid function signature or arity'):
overload(lambda x, y: True)
| mit | Python | |
fd6eea38f389a440f2c7d69e0de29677a64dbd2c | Add manual wifi table migration script. | mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,therewillbecode/ichnaea,therewillbecode/ichnaea,therewillbecode/ichnaea,mozilla/ichnaea | ichnaea/scripts/migrate.py | ichnaea/scripts/migrate.py | """
Manual migration script to move networks from old single wifi table
to new sharded wifi table structure.
"""
from collections import defaultdict
import sys
import time
from ichnaea.config import read_config
from ichnaea.db import (
configure_db,
db_worker_session,
)
from ichnaea.models.wifi import (
Wifi,
WifiShard,
)
def migrate(db, batch=1000):
added = 0
deleted = 0
skipped = 0
with db_worker_session(db, commit=True) as session:
old_wifis = (session.query(Wifi)
.order_by(Wifi.id.desc())
.limit(batch)).all()
sharded = defaultdict(list)
for old_wifi in old_wifis:
shard = WifiShard.shard_model(old_wifi.key)
sharded[shard].append(shard(
mac=old_wifi.key,
created=old_wifi.created,
modified=old_wifi.modified,
lat=old_wifi.lat,
lon=old_wifi.lon,
max_lat=old_wifi.max_lat,
min_lat=old_wifi.min_lat,
max_lon=old_wifi.max_lon,
min_lon=old_wifi.min_lon,
radius=old_wifi.range,
samples=old_wifi.total_measures,
))
moved_wifis = set()
for shard, wifis in sharded.items():
shard_macs = set([wifi.mac for wifi in wifis])
existing = (session.query(shard.mac)
.filter(shard.mac.in_(list(shard_macs)))).all()
existing = set([e.mac for e in existing])
for wifi in wifis:
if wifi.mac not in existing:
moved_wifis.add(wifi.mac)
session.add(wifi)
added += 1
else:
skipped += 1
if moved_wifis:
query = (session.query(Wifi)
.filter(Wifi.key.in_(list(moved_wifis))))
deleted = query.delete(synchronize_session=False)
else:
deleted = 0
return (added, deleted, skipped)
def main(db, repeat=1, batch=1000):
for i in range(repeat):
start = time.time()
print('Start: %s' % time.strftime('%H:%m', time.gmtime(start)))
added, deleted, skipped = migrate(db, batch=batch)
end = int((time.time() - start) * 1000)
print('Added: %s, Deleted: %s, Skipped: %s' % (
added, deleted, skipped))
print('Took: %s ms\n' % end)
print('End')
if __name__ == '__main__':
argv = sys.argv
batch = 1000
repeat = 1
if len(argv) > 1:
batch = int(argv[-1])
if len(argv) > 2:
repeat = int(argv[-2])
app_config = read_config()
db = configure_db(app_config.get('database', 'rw_url'))
main(db, repeat=repeat, batch=batch)
| apache-2.0 | Python | |
d4c7869d62635eca3108d743c2bc12c9f394d68a | Add archive.File class, which allows downloading from archive.org | brycedrennan/internetarchive,JesseWeinstein/internetarchive,dattasaurabh82/internetarchive,jjjake/internetarchive,wumpus/internetarchive | tests/test_item.py | tests/test_item.py | import os, sys
inc_path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, inc_path)
import archive
def test_item():
item = archive.Item('stairs')
assert item.metadata['metadata']['identifier'] == 'stairs'
def test_file():
item = archive.Item('stairs')
filename = 'glogo.png'
file = item.file(filename)
assert not os.path.exists(filename)
file.download()
assert os.stat(filename).st_size == file.size
os.unlink(filename)
| agpl-3.0 | Python | |
fa4b4de37b38f0ff800bbd2ac007ab6521720258 | Add test for box migration script | ticklemepierce/osf.io,haoyuchen1992/osf.io,Nesiehr/osf.io,wearpants/osf.io,zachjanicki/osf.io,acshi/osf.io,chennan47/osf.io,ZobairAlijan/osf.io,cwisecarver/osf.io,njantrania/osf.io,TomHeatwole/osf.io,chrisseto/osf.io,felliott/osf.io,erinspace/osf.io,TomBaxter/osf.io,abought/osf.io,jnayak1/osf.io,rdhyee/osf.io,aaxelb/osf.io,billyhunt/osf.io,jnayak1/osf.io,binoculars/osf.io,emetsger/osf.io,wearpants/osf.io,saradbowman/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,chrisseto/osf.io,caseyrollins/osf.io,binoculars/osf.io,Nesiehr/osf.io,mfraezz/osf.io,SSJohns/osf.io,hmoco/osf.io,DanielSBrown/osf.io,ticklemepierce/osf.io,KAsante95/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,doublebits/osf.io,cslzchen/osf.io,caseyrygt/osf.io,kch8qx/osf.io,emetsger/osf.io,amyshi188/osf.io,danielneis/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,icereval/osf.io,mattclark/osf.io,DanielSBrown/osf.io,amyshi188/osf.io,jnayak1/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,GageGaskins/osf.io,amyshi188/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,mluke93/osf.io,wearpants/osf.io,alexschiller/osf.io,mluo613/osf.io,kch8qx/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,icereval/osf.io,brandonPurvis/osf.io,samanehsan/osf.io,chrisseto/osf.io,abought/osf.io,cwisecarver/osf.io,KAsante95/osf.io,chennan47/osf.io,GageGaskins/osf.io,mfraezz/osf.io,KAsante95/osf.io,leb2dg/osf.io,ticklemepierce/osf.io,cosenal/osf.io,adlius/osf.io,chennan47/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,aaxelb/osf.io,caneruguz/osf.io,haoyuchen1992/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,acshi/osf.io,Ghalko/osf.io,baylee-d/osf.io,mattclark/osf.io,ZobairAlijan/osf.io,amyshi188/osf.io,Ghalko/osf.io,cosenal/osf.io,billyhunt/osf.io,GageGaskins/osf.io,brianjgeiger/osf.io,felliott/osf.io,acshi/osf.io,cslzchen/osf.io,Nesiehr/osf.io,wearpants/osf.io,zachjanicki/osf.io,mluo613/osf.io,Ghalko/osf.io,sloria/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,doublebits/osf.io,mluke93/osf.io,zamattiac/osf.io,acshi/osf.io,TomBaxter/osf.io,alexschiller/osf.io,caneruguz/osf.io,crcresearch/osf.io,pattisdr/osf.io,billyhunt/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,hmoco/osf.io,asanfilippo7/osf.io,Ghalko/osf.io,acshi/osf.io,HalcyonChimera/osf.io,adlius/osf.io,crcresearch/osf.io,njantrania/osf.io,caseyrollins/osf.io,zamattiac/osf.io,caseyrygt/osf.io,emetsger/osf.io,cwisecarver/osf.io,laurenrevere/osf.io,hmoco/osf.io,kwierman/osf.io,erinspace/osf.io,GageGaskins/osf.io,CenterForOpenScience/osf.io,caseyrygt/osf.io,RomanZWang/osf.io,danielneis/osf.io,sloria/osf.io,billyhunt/osf.io,rdhyee/osf.io,binoculars/osf.io,HalcyonChimera/osf.io,mluo613/osf.io,TomBaxter/osf.io,TomHeatwole/osf.io,ZobairAlijan/osf.io,erinspace/osf.io,doublebits/osf.io,leb2dg/osf.io,TomHeatwole/osf.io,felliott/osf.io,Johnetordoff/osf.io,monikagrabowska/osf.io,alexschiller/osf.io,zachjanicki/osf.io,mluo613/osf.io,samchrisinger/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,samanehsan/osf.io,kwierman/osf.io,rdhyee/osf.io,danielneis/osf.io,samanehsan/osf.io,alexschiller/osf.io,mattclark/osf.io,saradbowman/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,crcresearch/osf.io,njantrania/osf.io,zamattiac/osf.io,abought/osf.io,rdhyee/osf.io,jnayak1/osf.io,pattisdr/osf.io,cosenal/osf.io,asanfilippo7/osf.io,samchrisinger/osf.io,brianjgeiger/osf.io,doublebits/osf.io,baylee-d/osf.io,aaxelb/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,SSJohns/osf.io,mluke93/osf.io,ticklemepierce/osf.io,brandonPurvis/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mluo613/osf.io,caseyrygt/osf.io,doublebits/osf.io,icereval/osf.io,hmoco/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,asanfilippo7/osf.io,adlius/osf.io,SSJohns/osf.io,cslzchen/osf.io,cwisecarver/osf.io,danielneis/osf.io,SSJohns/osf.io,kwierman/osf.io,zamattiac/osf.io,emetsger/osf.io,baylee-d/osf.io,Nesiehr/osf.io,adlius/osf.io,sloria/osf.io,leb2dg/osf.io,mluke93/osf.io,njantrania/osf.io,chrisseto/osf.io,kch8qx/osf.io,laurenrevere/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,kch8qx/osf.io,DanielSBrown/osf.io,KAsante95/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,pattisdr/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,brandonPurvis/osf.io,samchrisinger/osf.io,abought/osf.io,CenterForOpenScience/osf.io,zachjanicki/osf.io,asanfilippo7/osf.io,felliott/osf.io,KAsante95/osf.io | scripts/tests/test_box_migrate_to_external_account.py | scripts/tests/test_box_migrate_to_external_account.py | from nose.tools import *
from scripts.box.migrate_to_external_account import do_migration, get_targets
from framework.auth import Auth
from tests.base import OsfTestCase
from tests.factories import ProjectFactory, UserFactory
from website.addons.box.model import BoxUserSettings
from website.addons.box.tests.factories import BoxOAuthSettingsFactory
class TestBoxMigration(OsfTestCase):
# Note: BoxUserSettings.user_settings has to be changed to foreign_user_settings (model and mongo). See migration instructions
def test_migration_no_project(self):
user = UserFactory()
user.add_addon('box')
user_addon = user.get_addon('box')
user_addon.oauth_settings = BoxOAuthSettingsFactory()
user_addon.save()
do_migration([user_addon])
user_addon.reload()
assert_is_none(user_addon.oauth_settings)
assert_equal(len(user.external_accounts), 1)
account = user.external_accounts[0]
assert_equal(account.provider, 'box')
assert_equal(account.oauth_key, 'abcdef1')
def test_migration_removes_targets(self):
BoxUserSettings.remove()
user = UserFactory()
project = ProjectFactory(creator=user)
user.add_addon('box', auth=Auth(user))
user_addon = user.get_addon('box')
user_addon.oauth_settings = BoxOAuthSettingsFactory()
user_addon.save()
project.add_addon('box', auth=Auth(user))
node_addon = project.get_addon('box')
node_addon.foreign_user_settings = user_addon
node_addon.save()
assert_equal(get_targets().count(), 1)
do_migration([user_addon])
user_addon.reload()
assert_equal(get_targets().count(), 0)
def test_migration_multiple_users(self):
user1 = UserFactory()
user2 = UserFactory()
oauth_settings = BoxOAuthSettingsFactory()
user1.add_addon('box')
user1_addon = user1.get_addon('box')
user1_addon.oauth_settings = oauth_settings
user1_addon.save()
user2.add_addon('box')
user2_addon = user2.get_addon('box')
user2_addon.oauth_settings = oauth_settings
user2_addon.save()
do_migration([user1_addon, user2_addon])
user1_addon.reload()
user2_addon.reload()
assert_equal(
user1.external_accounts[0],
user2.external_accounts[0],
)
def test_get_targets(self):
BoxUserSettings.remove()
addons = [
BoxUserSettings(),
BoxUserSettings(oauth_settings=BoxOAuthSettingsFactory()),
]
for addon in addons:
addon.save()
targets = get_targets()
assert_equal(targets.count(), 1)
assert_equal(targets[0]._id, addons[-1]._id)
| apache-2.0 | Python | |
7056f00934c0956bfe1a6aed7558cb3b9fd1de57 | add ability to retrieve sorted profiler statistics | Aloomaio/tornado-profile,makearl/tornado-profile | tornado_profile.py | tornado_profile.py | """Profile a Tornado application via REST."""
from operator import itemgetter
import tornado.web
import yappi
__author__ = "Megan Kearl Patten <megkearl@gmail.com>"
def start_profiling():
"""Start profiler."""
# POST /profiler
yappi.start(builtins=False, profile_threads=False)
def is_profiler_running():
"""Return True if the profiler is running."""
# GET /profiler
yappi.is_running()
def stop_profiling():
"""Stop the profiler."""
# DELETE /profiler
yappi.stop()
def clear_stats():
"""Clear profiler statistics."""
# DELETE /profiler/stats
yappi.clear_stats()
def get_statistics():
"""Get profiler statistics."""
# GET /profiler/stats?sort=cumulative&total=20
y_func_stats = yappi.get_func_stats()
pstats = yappi.convert2pstats(y_func_stats)
pstats.strip_dirs()
pstats.sort_stats("cumulative").print_stats(20)
def get_profiler_statistics(sort="cum_time", count=20):
"""Return profiler statistics.
:param str sort: dictionary key to sort by
:param int|None count: the number of results to return, None returns all results.
"""
json_stats = []
pstats = yappi.convert2pstats(yappi.get_func_stats())
pstats.strip_dirs()
for func, func_stat in pstats.stats.iteritems():
path, line, func_name = func
cc, num_calls, total_time, cum_time, callers = func_stat
json_stats.append({
"path": path,
"line": line,
"func_name": func_name,
"num_calls": num_calls,
"total_time": total_time,
"total_time_per_call": total_time/num_calls if total_time else 0,
"cum_time": cum_time,
"cum_time_per_call": cum_time/num_calls if cum_time else 0
})
return sorted(json_stats, key=itemgetter(sort))[:count]
class TornadoProfiler(object):
def __init__(self, prefix="", handler_base_class=None):
# class UpdatedClass(cls, handler_base_class): pass
pass
def get_routes(self):
return []
def main(port=8888):
"""Run as sample test server."""
import tornado.ioloop
routes = [] + TornadoProfiler().get_routes()
app = tornado.web.Application(routes)
app.listen(port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main(port=8888)
| """Profile a Tornado application via REST."""
import tornado.web
import yappi
__author__ = "Megan Kearl Patten <megkearl@gmail.com>"
def start_profiling():
"""Start profiler."""
# POST /profiler
yappi.start(builtins=False, profile_threads=False)
def is_profiler_running():
"""Return True if the profiler is running."""
# GET /profiler
yappi.is_running()
def stop_profiling():
"""Stop the profiler."""
# DELETE /profiler
yappi.stop()
def clear_stats():
"""Clear profiler statistics."""
# DELETE /profiler/stats
yappi.clear_stats()
def get_statistics():
"""Get profiler statistics."""
# GET /profiler/stats?sort=cumulative&total=20
y_func_stats = yappi.get_func_stats()
pstats = yappi.convert2pstats(y_func_stats)
pstats.strip_dirs()
pstats.sort_stats("cumulative").print_stats(20)
class TornadoProfiler(object):
def __init__(self, prefix="", handler_base_class=None):
# class UpdatedClass(cls, handler_base_class): pass
pass
def get_routes(self):
return []
def main(port=8888):
"""Run as sample test server."""
import tornado.ioloop
routes = [] + TornadoProfiler().get_routes()
app = tornado.web.Application(routes)
app.listen(port)
tornado.ioloop.IOLoop.current().start()
if __name__ == "__main__":
main(port=8888)
| mit | Python |
97d62cd3cb08c8d43a804eb7989b03df3626f0ab | Create music.py | harryparkdotio/dabdabrevolution,harryparkdotio/dabdabrevolution,harryparkdotio/dabdabrevolution | music.py | music.py | from microbit import *
import music
import random
while True:
music.play(music.NYAN, loop=True, wait=False)
if getValidDab():
music.play(music.POWER_UP)
else:
music.play(music.POWER_DOWN)
| mit | Python | |
9c26b042c38963bf95cc6456b0f9082c1c0827f3 | Add ttype API tests | jalr/privacyidea,XCage15/privacyidea,wheldom01/privacyidea,jh23453/privacyidea,jh23453/privacyidea,privacyidea/privacyidea,woddx/privacyidea,wheldom01/privacyidea,jh23453/privacyidea,XCage15/privacyidea,wheldom01/privacyidea,jalr/privacyidea,XCage15/privacyidea,jh23453/privacyidea,jh23453/privacyidea,privacyidea/privacyidea,jalr/privacyidea,XCage15/privacyidea,woddx/privacyidea,privacyidea/privacyidea,wheldom01/privacyidea,woddx/privacyidea,woddx/privacyidea,jalr/privacyidea,XCage15/privacyidea,privacyidea/privacyidea,wheldom01/privacyidea,jh23453/privacyidea,privacyidea/privacyidea,XCage15/privacyidea,woddx/privacyidea,jalr/privacyidea,jalr/privacyidea,wheldom01/privacyidea,woddx/privacyidea,privacyidea/privacyidea | tests/test_api_ttype.py | tests/test_api_ttype.py | from urllib import urlencode
import json
from .base import MyTestCase
from privacyidea.lib.user import (User)
from privacyidea.lib.tokens.totptoken import HotpTokenClass
from privacyidea.models import (Token)
from privacyidea.lib.config import (set_privacyidea_config, get_token_types,
get_inc_fail_count_on_false_pin,
delete_privacyidea_config)
from privacyidea.lib.token import (get_tokens, init_token, remove_token,
reset_token)
from privacyidea.lib.error import (ParameterError, UserError)
PWFILE = "tests/testdata/passwords"
class TtypeAPITestCase(MyTestCase):
"""
test the api.ttype endpoints
"""
def test_00_create_realms(self):
self.setUp_user_realms()
def test_01_tiqr(self):
init_token({"serial": "TIQR1",
"type": "tiqr",
"user": "cornelius",
"realm": self.realm1})
with self.app.test_request_context('/ttype/tiqr',
method='POST',
data={"action": "metadata",
"serial": "TIQR1",
"session": "12345"}):
res = self.app.full_dispatch_request()
data = json.loads(res.data)
identity = data.get("identity")
service = data.get("service")
self.assertEqual(identity.get("displayName"), "Cornelius ")
self.assertEqual(service.get("displayName"), "privacyIDEA")
| agpl-3.0 | Python | |
86cae13f7dde04f7031ae111e596f2d8c03d5420 | Add tests of CSVFile and StdOut recorders | jstutters/Plumbium | tests/test_recorders.py | tests/test_recorders.py | import pytest
from plumbium.processresult import record, pipeline, call
from plumbium.recorders import CSVFile, StdOut
from collections import OrderedDict
@pytest.fixture
def simple_pipeline():
@record()
def recorded_function():
call(['echo', '6.35'])
def a_pipeline():
recorded_function()
return a_pipeline
def test_csvfile(simple_pipeline, tmpdir):
with tmpdir.as_cwd():
recorder = CSVFile(
'test.csv',
OrderedDict([
('id', lambda x: x['metadata']['id']),
('data', lambda x: x['processes'][0]['printed_output'].strip())
])
)
pipeline.run(
'test',
simple_pipeline,
str(tmpdir),
metadata={'id': 1},
recorder=recorder
)
with open('test.csv') as f:
assert f.readline().strip() == 'id,data'
assert f.readline().strip() == '1,6.35'
def test_stdout(simple_pipeline, tmpdir, capsys):
with tmpdir.as_cwd():
recorder = StdOut(
OrderedDict([
('id', lambda x: x['metadata']['id']),
('data', lambda x: x['processes'][0]['printed_output'].strip())
])
)
pipeline.run(
'test',
simple_pipeline,
str(tmpdir),
metadata={'id': 1},
recorder=recorder
)
out, err = capsys.readouterr()
assert out == 'id: 1\ndata: 6.35\n'
| mit | Python | |
c850cb4832e6273c8239eeb7d457d8e16bb472d6 | Add graph factory | googleinterns/data-dependency-graph-analysis | graph_generation/graph_template.py | graph_generation/graph_template.py | """
This module implements factory for creating a graph.
Current version supports proto and networkx graphs.
"""
from proto_graph import ProtoGraph
from nx_graph import NxGraph
class GraphTemplate:
"""
A class to get instance of a selected class for graph generation.
...
Methods:
get_proto_graph()
Return instance of a proto graph.
get_nx_graph()
Return instance of a networkx graph.
"""
@staticmethod
def get_proto_graph():
"""Return instance of a proto graph."""
return ProtoGraph()
@staticmethod
def get_nx_graph():
"""Return instance of a networkx graph."""
return NxGraph()
| apache-2.0 | Python | |
4bd9e4db4af430ae34ed87f695d72ae99ba5bb70 | Set up first test level, started to create constraints | joeYeager/BlockDudeSolver | solver.py | solver.py | from constraint import *
# Empty space is 0
# Brick is a 1
# Block is a 2
# West facing player - 3
# East facing player - 4
# Door - 5
level = [[1,1,1,1,1,1,1,1,1,1],
[1,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1],
[1,0,0,0,0,0,0,0,0,1],
[1,5,0,0,0,0,0,3,0,1],
[1,1,1,1,1,1,1,1,1,1]]
problem = Problem()
# moves
# move east - e
# move west - w
# more nortwest - nw
# more norteast - ne
# pickup block - p
# drop block - d
# fall - f
problem.addVariable("e", [[4,0],[3,0],[3,1],[3,2]] )
problem.addVariable("w", [[0,3],[0,4], [1,4],[2,4]])
problem.addVariable("nw", [0])
problem.addVariable("ne", [0])
problem.addVariable("p", [0])
problem.addVariable("d", [0])
solutions = problem.getSolutions()
print(solutions)
| mit | Python | |
c92e0350527e7715b6b625c33a79c993aeae66fd | Add gui.py | dsdshcym/Y86-Pipe-Simulator,dsdshcym/Y86-Pipe-Simulator | gui.py | gui.py | #!/usr/bin/python
import sys
from PyQt5.QtWidgets import QMainWindow, QDesktopWidget, QApplication
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.init_UI()
def init_UI(self):
WINDOW_WIDTH = 800
WINDOW_HEIGHT = 800
self.resize(WINDOW_WIDTH, WINDOW_HEIGHT)
self.center()
self.setWindowTitle('Test GUI')
self.show()
def center(self):
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width()-size.width())/2,
(screen.height()-size.height())/2)
# if __name__ == '__main__':
app = QApplication(sys.argv)
main_window = MainWindow()
sys.exit(app.exec_())
| mit | Python | |
2fb5557aed14d047d1ae120f0ff91c0e355d779f | Add simple perf measuring tool | cemeyer/xkcd-skein-brute,cemeyer/xkcd-skein-brute,cemeyer/xkcd-skein-brute | ref.py | ref.py | #!/usr/bin/env python2
import sys
import subprocess
"""
Usage:
./ref.py ./main -B 1000000 -t 3 -T 31
"""
system = subprocess.check_output
githash = system("git rev-parse HEAD", shell=True).strip()
date = system("date -Ihours", shell=True).strip()
filename = "reference.%s.%s" % (githash, date)
benchargs = sys.argv[1:]
with open(filename, "wb") as fh:
fh.write(" ".join(benchargs) + "\n")
system(benchargs) # warm up
results = system(benchargs)
fh.write(results)
print "Wrote", filename
| mit | Python | |
3bb3a1f1babab9e6516f635290baa4d4e9762b8d | add pressure box device | jminardi/mecode,alexvalentine/mecode,razeh/mecode,travisbusbee/mecode | mecode/devices/efd_pressure_box.py | mecode/devices/efd_pressure_box.py | import serial
STX = '\x02' #Packet Start
ETX = '\x03' #Packet End
ACK = '\x06' #Acknowledge
NAK = '\x15' #Not Acknowledge
ENQ = '\x05' #Enquiry
EOT = '\x04' #End Of Transmission
class EFDPressureBox(object):
def __init__(self, comport='COM4'):
self.comport = comport
self.connect()
def connect(self):
self.s = serial.Serial(self.comport, baudrate=115200,
parity='N', stopbits=1, bytesize=8)
def disconnect(self):
self.s.close()
def send(self, command):
checksum = self._calculate_checksum(command)
msg = ENQ + STX + command + checksum + ETX + EOT
self.s.write(msg)
self.s.read(self.s.inWaiting())
def set_pressure(self, pressure):
command = '08PS {}'.format(str(int(pressure * 10)).zfill(4))
self.send(command)
def toggle_pressure(self):
command = '04DI '
self.send(command)
def _calculate_checksum(self, string):
checksum = 0
for char in string:
checksum -= ord(char)
checksum %= 256
return hex(checksum)[2:].upper() | mit | Python | |
7927fd0c13f14b348faa63c08683c6f80bdc7a0f | Create 5.3_nextsmallbig.py | HeyIamJames/CodingInterviewPractice,HeyIamJames/CodingInterviewPractice | 5.3_nextsmallbig.py | 5.3_nextsmallbig.py | """
given a positive integer, return the next smallest and largest
number with the same number of 1s in the binary represenation
"""
| mit | Python | |
c9e111804974f21dbe297855ab217e964526baa2 | Add search_hints option. | gunan/tensorflow,renyi533/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,arborh/tensorflow,ppwwyyxx/tensorflow,jbedorf/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow,ghchinoy/tensorflow,alsrgv/tensorflow,xzturn/tensorflow,freedomtan/tensorflow,DavidNorman/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,petewarden/tensorflow,renyi533/tensorflow,xzturn/tensorflow,sarvex/tensorflow,adit-chandra/tensorflow,DavidNorman/tensorflow,Bismarrck/tensorflow,arborh/tensorflow,asimshankar/tensorflow,renyi533/tensorflow,paolodedios/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,ppwwyyxx/tensorflow,Intel-Corporation/tensorflow,davidzchen/tensorflow,annarev/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aldian/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,jendap/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alsrgv/tensorflow,theflofly/tensorflow,kevin-coder/tensorflow-fork,aam-at/tensorflow,apark263/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,Bismarrck/tensorflow,aam-at/tensorflow,Bismarrck/tensorflow,sarvex/tensorflow,petewarden/tensorflow,xzturn/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,adit-chandra/tensorflow,DavidNorman/tensorflow,kevin-coder/tensorflow-fork,karllessard/tensorflow,yongtang/tensorflow,theflofly/tensorflow,arborh/tensorflow,renyi533/tensorflow,karllessard/tensorflow,adit-chandra/tensorflow,asimshankar/tensorflow,xzturn/tensorflow,ageron/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,davidzchen/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,sarvex/tensorflow,chemelnucfin/tensorflow,paolodedios/tensorflow,kevin-coder/tensorflow-fork,petewarden/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,freedomtan/tensorflow,apark263/tensorflow,jendap/tensorflow,ghchinoy/tensorflow,hfp/tensorflow-xsmm,frreiss/tensorflow-fred,kevin-coder/tensorflow-fork,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,apark263/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,renyi533/tensorflow,ghchinoy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,annarev/tensorflow,jbedorf/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,ghchinoy/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,asimshankar/tensorflow,hfp/tensorflow-xsmm,xzturn/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,ghchinoy/tensorflow,petewarden/tensorflow,jhseu/tensorflow,sarvex/tensorflow,ageron/tensorflow,ageron/tensorflow,jbedorf/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,sarvex/tensorflow,asimshankar/tensorflow,jbedorf/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ghchinoy/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ghchinoy/tensorflow,gautam1858/tensorflow,kevin-coder/tensorflow-fork,xzturn/tensorflow,hfp/tensorflow-xsmm,chemelnucfin/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,ageron/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,theflofly/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ageron/tensorflow,jhseu/tensorflow,annarev/tensorflow,paolodedios/tensorflow,DavidNorman/tensorflow,jendap/tensorflow,gunan/tensorflow,aldian/tensorflow,tensorflow/tensorflow-pywrap_saved_model,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,alsrgv/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,renyi533/tensorflow,karllessard/tensorflow,ppwwyyxx/tensorflow,jhseu/tensorflow,jendap/tensorflow,theflofly/tensorflow,hfp/tensorflow-xsmm,cxxgtxy/tensorflow,theflofly/tensorflow,frreiss/tensorflow-fred,jbedorf/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,jendap/tensorflow,Bismarrck/tensorflow,Bismarrck/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow,ageron/tensorflow,karllessard/tensorflow,hfp/tensorflow-xsmm,paolodedios/tensorflow,arborh/tensorflow,paolodedios/tensorflow,annarev/tensorflow,adit-chandra/tensorflow,karllessard/tensorflow,gunan/tensorflow,aam-at/tensorflow,renyi533/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,chemelnucfin/tensorflow,ageron/tensorflow,jhseu/tensorflow,alsrgv/tensorflow,ghchinoy/tensorflow,davidzchen/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,Intel-tensorflow/tensorflow,petewarden/tensorflow,petewarden/tensorflow,gunan/tensorflow,chemelnucfin/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,apark263/tensorflow,cxxgtxy/tensorflow,gautam1858/tensorflow,ageron/tensorflow,arborh/tensorflow,aam-at/tensorflow,yongtang/tensorflow,davidzchen/tensorflow,DavidNorman/tensorflow,yongtang/tensorflow,DavidNorman/tensorflow,arborh/tensorflow,Bismarrck/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,kevin-coder/tensorflow-fork,theflofly/tensorflow,freedomtan/tensorflow,gunan/tensorflow,kevin-coder/tensorflow-fork,hfp/tensorflow-xsmm,theflofly/tensorflow,tensorflow/tensorflow,chemelnucfin/tensorflow,annarev/tensorflow,aldian/tensorflow,Bismarrck/tensorflow,jbedorf/tensorflow,Intel-tensorflow/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hfp/tensorflow-xsmm,aldian/tensorflow,gunan/tensorflow,karllessard/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,ppwwyyxx/tensorflow,chemelnucfin/tensorflow,hfp/tensorflow-xsmm,paolodedios/tensorflow,aam-at/tensorflow,jhseu/tensorflow,ageron/tensorflow,cxxgtxy/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,gautam1858/tensorflow,ghchinoy/tensorflow,cxxgtxy/tensorflow,yongtang/tensorflow,davidzchen/tensorflow,arborh/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,xzturn/tensorflow,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,apark263/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,Bismarrck/tensorflow,tensorflow/tensorflow,DavidNorman/tensorflow,petewarden/tensorflow,jbedorf/tensorflow,tensorflow/tensorflow,davidzchen/tensorflow,petewarden/tensorflow,Intel-Corporation/tensorflow,gautam1858/tensorflow,annarev/tensorflow,arborh/tensorflow,gunan/tensorflow,annarev/tensorflow,jbedorf/tensorflow,kevin-coder/tensorflow-fork,tensorflow/tensorflow-pywrap_tf_optimizer,xzturn/tensorflow,gunan/tensorflow,theflofly/tensorflow,tensorflow/tensorflow,Bismarrck/tensorflow,kevin-coder/tensorflow-fork,freedomtan/tensorflow,apark263/tensorflow,aldian/tensorflow,chemelnucfin/tensorflow,adit-chandra/tensorflow,ppwwyyxx/tensorflow,Intel-tensorflow/tensorflow,alsrgv/tensorflow,renyi533/tensorflow,apark263/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,yongtang/tensorflow,ppwwyyxx/tensorflow,hfp/tensorflow-xsmm,gunan/tensorflow,jendap/tensorflow,hfp/tensorflow-xsmm,freedomtan/tensorflow,ghchinoy/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,jhseu/tensorflow,freedomtan/tensorflow,gautam1858/tensorflow,arborh/tensorflow,gunan/tensorflow,jhseu/tensorflow,chemelnucfin/tensorflow,alsrgv/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,renyi533/tensorflow,theflofly/tensorflow,xzturn/tensorflow,adit-chandra/tensorflow,hfp/tensorflow-xsmm,jbedorf/tensorflow,jhseu/tensorflow,aam-at/tensorflow,sarvex/tensorflow,davidzchen/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,adit-chandra/tensorflow,kevin-coder/tensorflow-fork,cxxgtxy/tensorflow,kevin-coder/tensorflow-fork,apark263/tensorflow,asimshankar/tensorflow,karllessard/tensorflow,ghchinoy/tensorflow,jbedorf/tensorflow,asimshankar/tensorflow,aldian/tensorflow,asimshankar/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ageron/tensorflow,frreiss/tensorflow-fred,Intel-Corporation/tensorflow,aam-at/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,chemelnucfin/tensorflow,annarev/tensorflow,xzturn/tensorflow,renyi533/tensorflow,alsrgv/tensorflow,DavidNorman/tensorflow,asimshankar/tensorflow,alsrgv/tensorflow,ppwwyyxx/tensorflow,jhseu/tensorflow,aam-at/tensorflow,adit-chandra/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,asimshankar/tensorflow,gautam1858/tensorflow,karllessard/tensorflow,theflofly/tensorflow,frreiss/tensorflow-fred,petewarden/tensorflow,Intel-tensorflow/tensorflow,gunan/tensorflow,jbedorf/tensorflow,aldian/tensorflow,alsrgv/tensorflow,theflofly/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,DavidNorman/tensorflow,jendap/tensorflow,DavidNorman/tensorflow,Intel-tensorflow/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,cxxgtxy/tensorflow,jendap/tensorflow,gautam1858/tensorflow,yongtang/tensorflow,jendap/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow,apark263/tensorflow,jendap/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,apark263/tensorflow,petewarden/tensorflow,ageron/tensorflow,ageron/tensorflow,jendap/tensorflow,asimshankar/tensorflow,aldian/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,jhseu/tensorflow,alsrgv/tensorflow,sarvex/tensorflow,alsrgv/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once | tensorflow/tools/docs/generate2.py | tensorflow/tools/docs/generate2.py | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A tool to generate api_docs for TensorFlow2.
```
python generate2.py --output_dir=/tmp/out
```
Requires a local installation of:
https://github.com/tensorflow/docs/tree/master/tools
tf-nightly-2.0-preview
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os import path
from absl import app
from absl import flags
import tensorflow as tf
from tensorflow_docs.api_generator import generate_lib
FLAGS = flags.FLAGS
flags.DEFINE_string(
"code_url_prefix",
"/code/stable/tensorflow/",
"A url to prepend to code paths when creating links to defining code")
flags.DEFINE_string(
"output_dir", "/tmp/out",
"A directory, where the docs will be output to.")
flags.DEFINE_bool("search_hints", True,
"Include meta-data search hints at the top of each file.")
def build_docs(output_dir, code_url_prefix, search_hints=True):
"""Build api docs for tensorflow v2.
Args:
output_dir: A string path, where to put the files.
code_url_prefix: prefix for "Defined in" links.
search_hints: Bool. Include meta-data search hints at the top of each file.
"""
base_dir = path.dirname(tf.__file__)
doc_generator = generate_lib.DocGenerator(
root_title="TensorFlow 2.0 Preview",
py_modules=[("tf", tf)],
base_dir=base_dir,
search_hints=search_hints,
code_url_prefix=code_url_prefix,
site_path="api_docs/")
doc_generator.build(output_dir)
def main(argv):
del argv
build_docs(output_dir=FLAGS.output_dir,
code_url_prefix=FLAGS.code_url_prefix,
search_hints=FLAGS.search_hints)
if __name__ == "__main__":
app.run(main)
| # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""A tool to generate api_docs for TensorFlow2.
```
python generate2.py --output_dir=/tmp/out
```
Requires a local installation of:
https://github.com/tensorflow/docs/tree/master/tools
tf-nightly-2.0-preview
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from os import path
from absl import app
from absl import flags
import tensorflow as tf
from tensorflow_docs.api_generator import generate_lib
FLAGS = flags.FLAGS
flags.DEFINE_string(
"code_url_prefix",
"/code/stable/tensorflow/",
"A url to prepend to code paths when creating links to defining code")
flags.DEFINE_string(
"output_dir", "/tmp/out",
"A directory, where the docs will be output to.")
def build_docs(output_dir, code_url_prefix):
"""Build api docs for tensorflow v2.
Args:
output_dir: A string path, where to put the files.
code_url_prefix: prefix for "Defined in" links.
"""
base_dir = path.dirname(tf.__file__)
doc_generator = generate_lib.DocGenerator(
root_title="TensorFlow 2.0 Preview",
py_modules=[("tf", tf)],
base_dir=base_dir,
search_hints=True,
code_url_prefix=code_url_prefix,
site_path="api_docs/")
doc_generator.build(output_dir)
def main(argv):
del argv
build_docs(output_dir=FLAGS.output_dir,
code_url_prefix=FLAGS.code_url_prefix)
if __name__ == "__main__":
app.run(main)
| apache-2.0 | Python |
1e82d6110bee6953b78ee357ed5e0b94710b1357 | fix urls | aristotle-mdr/user-documentation,aristotle-mdr/user-documentation,aristotle-mdr/user-documentation | heroku/urls.py | heroku/urls.py | from django.conf.urls import include, url
urlpatterns = [
url(r'^fafl', include('fafl.urls')),
url(r'^', include('aristotle_cloud.urls')),
url(r'^publish/', include('aristotle_mdr.contrib.self_publish.urls', app_name="aristotle_self_publish", namespace="aristotle_self_publish")),
url(r'^', include('aristotle_mdr.contrib.links.urls', app_name="aristotle_mdr_links", namespace="aristotle_links")),
url(r'^', include('aristotle_mdr.contrib.slots.urls', app_name="aristotle_slots", namespace="aristotle_slots")),
url(r'^', include('aristotle_mdr.contrib.identifiers.urls', app_name="aristotle_mdr_identifiers", namespace="aristotle_identifiers")),
]
| mit | Python | |
6f9ced48a8c423e505e21cfa9a0b0d05b4c86f5c | Add lava context processor. | Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server | lava_server/context_processors.py | lava_server/context_processors.py | # Copyright (C) 2010, 2011 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of LAVA Server.
#
# LAVA Server is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# LAVA Server is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with LAVA Server. If not, see <http://www.gnu.org/licenses/>.
from lava_server.extension import loader
def lava(request):
return {
'lava': {
'extensions': loader.extensions
}
}
| agpl-3.0 | Python | |
54f6b9e5d8769ba608fe0d3f14eda2746319d6d2 | Add class DepthSerializerMixin | krescruz/depth-serializer-mixin | mixins.py | mixins.py | class DepthSerializerMixin(object):
"""Custom method 'get_serializer_class', set attribute 'depth' based on query parameter in the url"""
def get_serializer_class(self):
serializer_class = self.serializer_class
query_params = self.request.QUERY_PARAMS
depth = query_params.get('__depth', None)
serializer_class.Meta.depth = int(depth) if(depth != None and depth.isdigit()) else 0
return serializer_class
| mit | Python | |
935375fdc785adbbf74c8f943d319988ef4240f5 | Create execute.py | krishnasumanthm/Quora_Answer_Classifier | execute.py | execute.py | import extractor as ex
from sklearn.svm import LinearSVC
from sklearn import linear_model
from sklearn.naive_bayes import MultinomialNB
from sklearn.naive_bayes import GaussianNB
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import AdaBoostClassifier
from sklearn.qda import QDA
from sklearn.lda import LDA
from sklearn.svm import SVC
"""
L1- Based Feature Selection
"""
def extract_linear_features_indexes(features, labels):
"""
Perform Linear festure selection.
"""
clf = LinearSVC(C=0.01, penalty="l1", dual=False)
clf.fit(features, labels)
return [i for i, e in enumerate(clf.coef_[0]) if e != 0 and abs(e) > 1e-6]
def extract_lasso_features_indexes(features, labels):
"""
Perform Lasso feature selection.
"""
clf = linear_model.Lasso(alpha=0.022, fit_intercept=False,
max_iter=2000,normalize=False, positive=False,
tol=0.001, warm_start=True)
clf.fit(features, labels)
return [i for i, e in enumerate(clf.coef_) if e != 0 and abs(e) > 1e-6]
def extract_features(included_index ,features, labels):
"""
Return the only features that must be included in the classification
process.
"""
return features[:, included_index], labels
def scaled_features(features,labels):
max_features = features.max(axis = 0)
max_features = (max_features + (max_features == 0))
scaled_features = features / max_features
return scaled_features, labels
def main():
input_filename = 'data/input00.txt'
output_filename = 'data/output00.txt'
(train_features, train_labels,test_features, test_labels) = ex.extract(input_filename, output_filename)
classifiers = {
"NB Multinomial" : MultinomialNB(),
"NB Gaussian": GaussianNB(),
"Logistic Regression" : LogisticRegression(C=1e5, tol=0.001, fit_intercept=True),
"Decision Tree" : DecisionTreeClassifier(min_samples_split=1, random_state=0),
"KNN" : KNeighborsClassifier(n_neighbors=3),
"SVM" : SVC(gamma=2, C=1),
"LDA" : LDA(),
"QDA" : QDA(reg_param=0.5),
"Random Forest" : RandomForestClassifier(n_estimators=200),
"AdaBoost" : AdaBoostClassifier(n_estimators=200),
}
print "-"*80, "\n", "Raw Dataset", "\n", "-"*80
for name, classifier in classifiers.iteritems():
clf = classifier.fit(train_features,train_labels)
print name, clf.score(test_features,test_labels)
print "-"*80, "\n", "Scaled Feature Dataset", "\n", "-"*80
for name, classifier in classifiers.iteritems():
(new_features,new_lables) = scaled_features(train_features, train_labels)
clf = classifier.fit(new_features,new_lables)
(new_test_features,new_test_lables) = scaled_features(train_features, train_labels)
print name, clf.score(new_test_features,new_test_lables)
print "-"*80, "\n", "Lasso Feature Selection", "\n", "-"*80
for name, classifier in classifiers.iteritems():
(new_features,new_lables) = extract_features(extract_lasso_features_indexes(train_features, train_labels),train_features, train_labels)
clf = classifier.fit(new_features,new_lables)
(new_test_features,new_test_lables) = extract_features(extract_lasso_features_indexes(train_features, train_labels),test_features,test_labels)
print name, clf.score(new_test_features,new_test_lables)
print "-"*80, "\n", "Linear Feature Selection", "\n", "-"*80
for name, classifier in classifiers.iteritems():
(new_features,new_lables) = extract_features(extract_linear_features_indexes(train_features, train_labels),train_features, train_labels)
clf = classifier.fit(new_features,new_lables)
(new_test_features,new_test_lables) = extract_features(extract_linear_features_indexes(train_features, train_labels),test_features,test_labels)
print name, clf.score(new_test_features,new_test_lables)
if __name__ == '__main__':
main()
| mit | Python | |
2b9df0394285f601c80de2b6e7c5c39006caa3ed | add deployment script for OMS PDS, see ./doc/INSTALL.rst for more details on the deployment | patcon/openPDS,HumanDynamics/openPDS,patcon/openPDS,eschloss/FluFuture,HumanDynamics/openPDS,eschloss/FluFuture,HumanDynamics/openPDS,patcon/openPDS,eschloss/FluFuture | fabfile.py | fabfile.py | '''
Install PDS
-----------
details at oms-deploy! https://github.com/IDCubed/oms-deploy
'''
from oms_fabric.webapp import Webapp
PDS = Webapp()
PDS.repo_url = 'https://github.com/IDCubed/OMS-PDS'
PDS.repo_name = 'OMS-PDS'
def deploy_project(instance='pds',
branch='master',
config='./conf/deploy.ini'):
'''
direct pull from fabfile.py in resource server, not tested and needs more
work, just exemplifying the idea.
'''
PDS.branch = branch
PDS.deploy_project(instance, config, branch)
def create_config(outfile='./conf/deploy.ini'):
'''
ask the user some questions, create a configuration, and write to outfile
'''
user_config(outfile)
| mit | Python | |
1214755d5023331a432a2cea3224a6d117622393 | Create odrive.py | finoradin/moma-utils,finoradin/moma-utils | odrive.py | odrive.py | #!/usr/bin/env python
import glob
import os
import re
'''
# tool for MoMA O drive migration
# will create "artist level" folders and move object level folders inside
Pseudo code
1. crawl folders 2 levels deep
2. for folder:
if folder matches *---*---*---* pattern:
a. if artist level folder already exists
aa. move object level folder inside
bb. rename object level folder to folder[2]---folder[1]
b. else
aa. create new folder using first part of name
bb. move object level folder inside
cc. rename object level folder to folder[2]---folder[1]
'''
# pattern for object level folders
re1='.*'
re2='---'
pattern = re.compile(re1+re2+re1+re2+re1+re2+re1,re.IGNORECASE|re.DOTALL)
def walklevel(some_dir, level=1):
some_dir = some_dir.rstrip(os.path.sep)
assert os.path.isdir(some_dir)
num_sep = some_dir.count(os.path.sep)
for root, dirs, files in os.walk(some_dir):
yield root, dirs, files
num_sep_this = root.count(os.path.sep)
if num_sep + level <= num_sep_this:
del dirs[:]
path = '/Volumes/Dept/CONSERV/All Conservation/0000 Media/1 Artists'
walker = walklevel(path)
for folder in walker:
objectlevel_dirs = []
artist_dirs = []
print '\n\n'+"Listing "+ folder[0]
for subdir in folder[1]:
# print subdir
if pattern.match(subdir):
# print subdir
objectlevel_dirs.append(subdir)
# print subdir.split('---')
else:
artist_dirs.append(subdir)
for directory in objectlevel_dirs:
artistname = directory.split('---')
print artistname[0]
if artistname in artist_dirs:
print "match!!!"
print artist_dirs
# print objectlevel_dirs
| mit | Python | |
10bd939290f4a9195ff582addb17beed5ba08f67 | Add some examples of situations the leak detector detects. | Nextdoor/nose-leak-detector | examples.py | examples.py | """ Some examples of what nose leak detector can detect. """
try:
from unittest import mock
except ImportError:
from mock import mock
new_global_mock = None
def test_with_leaked_new_global_mock():
global new_global_mock
new_global_mock = mock.Mock()
called_global_mock = mock.Mock()
def test_with_leaked_called_global_mock():
global called_global_mock
called_global_mock()
| bsd-2-clause | Python | |
cc6322683b0f98f666f0bef130fab4e7c45e07a6 | Add auxiliary python file | cpmech/gosl,cpmech/gosl,cpmech/gosl,cpmech/gosl,cpmech/gosl | la/oblas/data/auxiliary.py | la/oblas/data/auxiliary.py | # Copyright 2016 The Gosl Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
def vprint(name, v):
l = '%s := []float64{' % name
for i in range(len(v)):
if i > 0: l += ','
l += '%23.15e' % v[i]
l += '}'
print l
def mprint(name, m):
l = '%s := [][]float64{\n' % name
for i in range(len(m)):
l += ' {'
for j in range(len(m[i])):
if j > 0: l += ','
l += '%23.15e' % m[i][j]
l += '},\n'
l += '}'
print l
| bsd-3-clause | Python | |
e39ffc74a2386d58d62e1302ea0b0f2e8550cf84 | Create genetic algorithm engine: genetic.py | felipemaion/genetic-algorithms | genetic.py | genetic.py |
import random
import statistics
import sys
import time
def _generate_parent(length, geneSet, get_fitness):
genes = []
while len(genes) < length:
sampleSize = min(length - len(genes), len(geneSet))
genes.extend(random.sample(geneSet, sampleSize))
genes = ''.join(genes)
fitness = get_fitness(genes)
return Chromosome(genes, fitness)
def _mutate(parent, geneSet, get_fitness):
index = random.randrange(0, len(parent.Genes))
childGenes = list(parent.Genes)
newGene, alternate = random.sample(geneSet, 2)
childGenes[index] = alternate if newGene == childGenes[index] else newGene
genes = ''.join(childGenes)
fitness = get_fitness(genes)
return Chromosome(genes, fitness)
def get_best(get_fitness, targetLen, optimalFitness, geneSet, display):
random.seed()
bestParent = _generate_parent(targetLen, geneSet, get_fitness)
display(bestParent)
if bestParent.Fitness >= optimalFitness:
return bestParent
while True:
child = _mutate(bestParent, geneSet, get_fitness)
if bestParent.Fitness >= child.Fitness:
continue
display(child)
if child.Fitness >= optimalFitness:
return child
bestParent = child
class Chromosome:
def __init__(self, genes, fitness):
self.Genes = genes
self.Fitness = fitness
class Benchmark:
@staticmethod
def run(function):
timings = []
stdout = sys.stdout
for i in range(100):
sys.stdout = None
startTime = time.time()
function()
seconds = time.time() - startTime
sys.stdout = stdout
timings.append(seconds)
mean = statistics.mean(timings)
if i < 10 or i % 10 == 9:
print("{} {:3.2f} {:3.2f}".format(
1 + i, mean,
statistics.stdev(timings, mean) if i > 1 else 0))
| mit | Python | |
8588624502c88b89426619640acee0077332906d | Create getImgs.py | xiepeiliang/WebCrawler | getImgs.py | getImgs.py | #在猫扑网上爬取一些好看的~图片
#-*-coding:utf-8-*-
import urllib2, re, urllib
from bs4 import BeautifulSoup
def getHtml(url):
header = {'User-Agent':'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:55.0) Gecko/20100101 Firefox/55.0'}
request = urllib2.Request(url, headers=header)
response = urllib2.urlopen(request)
html = response.read()
return html
def getImg(html):
x = 1
imgUrls = []
html = BeautifulSoup(html, 'html.parser')
imgs = html.find_all('p', class_="tc mb10")
for i in imgs:
temp = i.find('img')
link = temp.get('src')
imgUrls.append('http:'+link)
for i in imgUrls:
urllib.urlretrieve(i, '/home/xpl/图片/%d.jpg'%x)
x += 1
if __name__ == '__main__':
url = 'http://tt.mop.com/a/170905093348712403573.html'
html = getHtml(url)
getImg(html)
| mit | Python | |
a6eab0de0a7b681d13462528edd0fec212452341 | Create pset1.py | gaurav61/MIT6.00x | pset1.py | pset1.py | # PROBLEM 1 : Counting Vowels
count=0
for char in s:
if char=='a' or char=='e' or char=='i' or char=='o' or char=='u':
count+=1
print count
# PROBLEM 2 : Counting Bobs
count=0
for i in range(0,len(s)-2):
if s[i]=='b' and s[i+1]=='o' and s[i+2]=='b':
count+=1
print count
# PROBLEM 3 : Counting and Grouping
def item_order(order):
count1=order.count('salad',0,len(order))
count2=order.count('hamburger',0,len(order))
count3=order.count('water',0,len(order))
return 'salad:'+str(count1)+' hamburger:'+str(count2)+' water:'+str(count3)
| mit | Python | |
7107149104424959f989b9bfaef48dd09391d7cc | Add lc0819_most_common_word.py | bowen0701/algorithms_data_structures | lc0819_most_common_word.py | lc0819_most_common_word.py | """Leetcode 819. Most Common Word
Easy
URL: https://leetcode.com/problems/most-common-word/
Given a paragraph and a list of banned words, return the most frequent word that
is not in the list of banned words.
It is guaranteed there is at least one word that isn't banned,
and that the answer is unique.
Words in the list of banned words are given in lowercase, and free of punctuation.
Words in the paragraph are not case sensitive. The answer is in lowercase.
Example:
Input:
paragraph = "Bob hit a ball, the hit BALL flew far after it was hit."
banned = ["hit"]
Output: "ball"
Explanation:
"hit" occurs 3 times, but it is a banned word.
"ball" occurs twice (and no other word does), so it is the
most frequent non-banned word in the paragraph.
Note that words in the paragraph are not case sensitive,
that punctuation is ignored (even if adjacent to words, such as "ball,"),
and that "hit" isn't the answer even though it occurs more because it is banned.
Note:
- 1 <= paragraph.length <= 1000.
- 0 <= banned.length <= 100.
- 1 <= banned[i].length <= 10.
- The answer is unique, and written in lowercase (even if its occurrences in
paragraph may have uppercase symbols, and even if it is a proper noun.)
- paragraph only consists of letters, spaces, or the punctuation symbols !?',;.
- There are no hyphens or hyphenated words.
- Words only consist of letters, never apostrophes or other punctuation symbols.
"""
class Solution(object):
def mostCommonWord(self, paragraph, banned):
"""
:type paragraph: str
:type banned: List[str]
:rtype: str
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
034aaea2e8e56d24d709669f9992b0806b638621 | create a new python file test_cut_milestone.py | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos | test/unit_test/test_cut_milestone2.py | test/unit_test/test_cut_milestone2.py | from lexos.processors.prepare.cutter import cut_by_milestone
class TestMileStone:
def test_milestone_regular(self):
text_content = "The bobcat slept all day.."
milestone = "bobcat"
assert cut_by_milestone(text_content, milestone) == ["The ",
" slept all day.."
]
def test_milestone_no_milestone_in_text(self):
text_content = "The bobcat slept all day."
milestone = "am"
assert cut_by_milestone(text_content, milestone) == [
"The bobcat slept all day."]
def test_milestone_longer_than_text(self):
text_content = "The bobcat slept all day."
milestone = "The cute bobcat slept all day."
assert cut_by_milestone(text_content, milestone) == [
"The bobcat slept all day."]
def test_milestone_len_zero(self):
text_content = "The bobcat slept all day."
milestone = ""
assert cut_by_milestone(text_content, milestone) == [
"The bobcat slept all day."]
def test_milestone_empty_text(self):
text_content = ""
milestone = "bobcat"
assert cut_by_milestone(text_content, milestone) == []
def test_milestone_check_case_sensative(self):
text_content = "The bobcat slept all day."
milestone = "BOBCAT"
assert cut_by_milestone(text_content, milestone) == ["The bobcat "
"slept all day."] | mit | Python | |
6b9a0fe7181a3d80a4a88d32e895dda51923a96b | fix bug 984562 - track b2g 1.3 builds | linearregression/socorro,adngdb/socorro,pcabido/socorro,yglazko/socorro,spthaolt/socorro,luser/socorro,Tchanders/socorro,AdrianGaudebert/socorro,bsmedberg/socorro,mozilla/socorro,AdrianGaudebert/socorro,KaiRo-at/socorro,Tchanders/socorro,twobraids/socorro,twobraids/socorro,spthaolt/socorro,pcabido/socorro,adngdb/socorro,m8ttyB/socorro,yglazko/socorro,rhelmer/socorro,Tchanders/socorro,spthaolt/socorro,rhelmer/socorro,KaiRo-at/socorro,luser/socorro,cliqz/socorro,Tayamarn/socorro,m8ttyB/socorro,linearregression/socorro,m8ttyB/socorro,linearregression/socorro,Tchanders/socorro,luser/socorro,Serg09/socorro,KaiRo-at/socorro,pcabido/socorro,rhelmer/socorro,Serg09/socorro,lonnen/socorro,Tchanders/socorro,KaiRo-at/socorro,yglazko/socorro,spthaolt/socorro,adngdb/socorro,bsmedberg/socorro,linearregression/socorro,lonnen/socorro,rhelmer/socorro,AdrianGaudebert/socorro,m8ttyB/socorro,KaiRo-at/socorro,Tayamarn/socorro,bsmedberg/socorro,Tayamarn/socorro,mozilla/socorro,Serg09/socorro,spthaolt/socorro,linearregression/socorro,pcabido/socorro,yglazko/socorro,twobraids/socorro,AdrianGaudebert/socorro,m8ttyB/socorro,cliqz/socorro,luser/socorro,adngdb/socorro,Tayamarn/socorro,Serg09/socorro,mozilla/socorro,cliqz/socorro,cliqz/socorro,AdrianGaudebert/socorro,mozilla/socorro,bsmedberg/socorro,adngdb/socorro,Tayamarn/socorro,adngdb/socorro,pcabido/socorro,Tayamarn/socorro,Tchanders/socorro,m8ttyB/socorro,luser/socorro,spthaolt/socorro,pcabido/socorro,twobraids/socorro,linearregression/socorro,luser/socorro,twobraids/socorro,mozilla/socorro,Serg09/socorro,Serg09/socorro,twobraids/socorro,KaiRo-at/socorro,yglazko/socorro,lonnen/socorro,rhelmer/socorro,cliqz/socorro,mozilla/socorro,yglazko/socorro,cliqz/socorro,bsmedberg/socorro,lonnen/socorro,rhelmer/socorro,AdrianGaudebert/socorro | alembic/versions/224f0fda6ecb_bug_984562-track-b2g-1_3.py | alembic/versions/224f0fda6ecb_bug_984562-track-b2g-1_3.py | """bug 984562 - track b2g 1.3
Revision ID: 224f0fda6ecb
Revises: 4c279bec76d8
Create Date: 2014-03-28 10:54:59.521434
"""
# revision identifiers, used by Alembic.
revision = '224f0fda6ecb'
down_revision = '4c279bec76d8'
from alembic import op
from socorro.lib import citexttype, jsontype, buildtype
from socorro.lib.migrations import fix_permissions, load_stored_proc
import sqlalchemy as sa
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.sql import table, column
def upgrade():
op.execute("""
INSERT INTO update_channel_map
(update_channel, productid, version_field, rewrite)
VALUES
('hamachi/1.3.0/nightly',
'{3c2e2abc-06d4-11e1-ac3b-374f68613e61}',
'B2G_OS_Version',
'{"Android_Manufacturer": "unknown",
"Android_Model": "msm7627a",
"Android_Version": "15(REL)",
"B2G_OS_Version": "1.3.0.0-prerelease",
"BuildID":
["20140317004001"],
"ProductName": "B2G",
"ReleaseChannel": "hamachi/1.3.0/nightly",
"Version": "28.0",
"rewrite_update_channel_to": "release-buri",
"rewrite_build_type_to": "release"}'
)
""")
op.execute("""
INSERT INTO update_channel_map
(update_channel, productid, version_field, rewrite)
VALUES
('default',
'{3c2e2abc-06d4-11e1-ac3b-374f68613e61}',
'B2G_OS_Version',
'{"Android_Manufacturer": "Spreadtrum",
"Android_Model": "sp6821a",
"Android_Version": "15(AOSP)",
"B2G_OS_Version": "1.3.0.0-prerelease",
"BuildID":
["20140317060055"],
"ProductName": "B2G",
"ReleaseChannel": "default",
"Version": "28.0",
"rewrite_update_channel_to": "release-tarako",
"rewrite_build_type_to": "release"}'
)
""")
def downgrade():
op.execute("""
DELETE FROM update_channel_map
WHERE rewrite->>'rewrite_update_channel_to' = 'release-buri'
OR rewrite->>'rewrite_update_channel_to' = 'release-tarako'
""")
| mpl-2.0 | Python | |
e0786c5798b35b911193de1b4e3694b7ad8cad76 | Add unit test for generate_admin_metadata helper function | JIC-CSB/dtoolcore | tests/test_generate_admin_metadata.py | tests/test_generate_admin_metadata.py | """Test the generate_admin_metadata helper function."""
def test_generate_admin_metadata():
import dtoolcore
from dtoolcore import generate_admin_metadata
admin_metadata = generate_admin_metadata("ds-name", "creator-name")
assert len(admin_metadata["uuid"]) == 36
assert admin_metadata["dtoolcore_version"] == dtoolcore.__version__
assert admin_metadata["name"] == "ds-name"
assert admin_metadata["type"] == "protodataset"
assert admin_metadata["creator_username"] == "creator-name"
| mit | Python | |
1db200b51d05e799b1016cdf1ed04726a3377635 | Add basic structure of rules. | sievetech/rgc | rules.py | rules.py | # -*- coding: utf-8 -*-
class RuleDoesNotExistError(Exception):
def __init__(self, rulename):
self.value = rulename
def __str__(self):
return 'rule "{0}" is not implemented'.format(self.value)
def __unicode__(self):
return str(self).decode('utf-8')
class RuleSet(object):
def __init__(self, *args, **kwargs):
self.rules = kwargs
def apply(self, obj):
"""
obj: A file inside a container (cloudfiles.Object)
"""
ret_val = False
for rulename, arg in self.rules.iteritems():
try:
rule = getattr(self, rulename+'_rule')
except AttributeError:
raise RuleDoesNotExistError(rulename)
else:
ret_val &= rule(obj, arg)
return ret_val
| bsd-3-clause | Python | |
724dc6ff77e9494e9519cb507cf43644034d5ca6 | Integrate switch 10 and introduce couplings. | wglas85/pytrain,wglas85/pytrain,wglas85/pytrain,wglas85/pytrain | run_2.py | run_2.py | #!/usr/bin/python3
#
# start pytrain
#
import os
import sys
MYDIR = os.path.dirname(sys.argv[0])
os.system(MYDIR+"/run.sh")
if len(sys.argv)==1:
sys.argv.append("10.0.0.6")
os.system("chromium http://%s:8000/index.html"%(sys.argv[1]))
| apache-2.0 | Python | |
73cb99538aa48e43cfc3b2833ecf0ececee1dc42 | Add timers example | asvetlov/europython2015,hguemar/europython2015,ifosch/europython2015 | timers.py | timers.py | import asyncio
@asyncio.coroutine
def coro(loop):
yield from asyncio.sleep(0.5, loop=loop)
print("Called coro")
loop = asyncio.get_event_loop()
def cb(arg):
print("Called", arg)
loop.create_task(coro(loop))
loop.call_soon(cb, 1)
loop.call_later(0.4, cb, 2)
loop.call_at(loop.time() + 0.6, cb, 3)
loop.call_later(0.7, loop.stop)
loop.run_forever()
print("DONE")
| apache-2.0 | Python | |
17af81742f5bf6473155837c655506f4509a4273 | Add buildlet/__init__.py | tkf/buildlet | buildlet/__init__.py | buildlet/__init__.py | # [[[cog import cog; cog.outl('"""\n%s\n"""' % file('../README.rst').read())]]]
"""
Buildlet - build tool like functionality as a Python module
===========================================================
"""
# [[[end]]]
__author__ = "Takafumi Arakaki"
__version__ = "0.0.1.dev0"
__license__ = 'BSD License'
| bsd-3-clause | Python | |
1ed0bc1eb37b42845f42659ae00ccfbe444b0bfe | add first game | Devoxx4KidsDE/workshop-minecraft-modding-raspberry-pi,Devoxx4KidsDE/workshop-minecraft-modding-raspberry-pi | game-timeout.py | game-timeout.py | # -*- coding: utf-8 -*-
from mcpi import minecraft, block
import time
def buildGame(mc, y):
mc.postToChat("Begin")
mc.setting("world_immutable", True)
putDestination(mc, -5,y-1,5)
putPoint(mc, -5, y, 5)
putPoint(mc, -5, y, -5)
putPoint(mc, 5, y, 5)
putPoint(mc, 5, y, -5)
def putPoint(mc, x,y,z):
mc.setBlock(x,y,z,block.GOLD_ORE)
def putDestination(mc, x, y, z):
mc.setBlock(x,y,z,block.GLOWING_OBSIDIAN)
def startGame(mc, y):
mc.player.setTilePos(0,y,0)
points = 0
timer = 5
while True:
blockEvents = mc.events.pollBlockHits()
for event in blockEvents:
hitBlock = mc.getBlock(event.pos.x, event.pos.y, event.pos.z)
if hitBlock == block.GOLD_ORE.id:
points += 1
mc.setBlock(event.pos.x , event.pos.y , event.pos.z , block.AIR.id)
timer += 3
if hitBlock == block.GLOWING_OBSIDIAN.id:
points += 3
mc.setBlock(event.pos.x , event.pos.y , event.pos.z , block.GRASS.id)
timer += 5
if points == 7:
mc.postToChat("Du hast gewonnen in " + str(timer) + " Sekunden")
break
timer -= 1
print timer
time.sleep(1)
if timer == 0:
mc.postToChat("deine Zeit ist um, du hast verloren :(")
break
mc = minecraft.Minecraft.create()
y = mc.getHeight(0,0)
buildGame(mc, y)
startGame(mc, y)
| mit | Python | |
9c939cca65e3fbf0318e34f94021f3fa7ddb4f2d | add setup script for downloading dependencies | DucAnhPhi/LinguisticAnalysis | setup.py | setup.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Aug 30 16:09:27 2017
Setup script for downloading all dependencies
@author: duc
"""
import pip
import nltk
dependencies = [
'certifi==2017.7.27.1',
'chardet==3.0.4',
'cycler==0.10.0',
'idna==2.6',
'matplotlib==2.0.2',
'nltk==3.2.4',
'numpy==1.13.1',
'oauthlib==2.0.2',
'pyparsing==2.2.0',
'python-dateutil==2.6.1',
'pytz==2017.2',
'requests==2.18.4',
'requests-oauthlib==0.8.0',
'six==1.10.0',
'tweepy==3.3.0',
'urllib3==1.22'
]
nltk_data = [
'cmudict',
'stopwords',
'twitter_samples',
'punkt'
]
def install():
for d in dependencies:
pip.main(['install', d])
for data in nltk_data:
nltk.download(data)
if __name__ == '__main__':
install() | mit | Python | |
4f024a5da95d7a55e055fcde89981cefcc48a9b4 | Add setup script | django-blog-zinnia/zinnia-spam-checker-mollom | setup.py | setup.py | """Setup script of zinnia-spam-checker-mollom"""
from setuptools import setup
from setuptools import find_packages
setup(
name='zinnia-spam-checker-akismet',
version='1.0.dev',
description='Anti-spam protections for django-blog-zinnia with Mollom',
long_description=open('README.rst').read(),
keywords='django, zinnia, spam, mollom',
author='Fantomas42',
author_email='fantomas42@gmail.com',
url='https://github.com/Fantomas42/zinnia-spam-checker-mollom',
packages=find_packages(),
classifiers=[
'Framework :: Django',
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Programming Language :: Python',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'License :: OSI Approved :: BSD License',
'Topic :: Software Development :: Libraries :: Python Modules'],
license='BSD License',
include_package_data=True,
zip_safe=False,
install_requires=['PyMollom>=0.1']
)
| bsd-3-clause | Python | |
6c4bbb599900644055c200cfdd3a6fd2cd02a295 | add setup | MinnSoe/orcid | setup.py | setup.py | from setuptools import setup, Command
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import sys,subprocess
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
_parameters = {
'name': 'orcid',
'version': '0.1.1',
'description': 'ORCID API Client.',
'author': 'Minn Soe',
'maintainer': 'Minn Soe',
'maintainer_email': 'contributions@minn.so',
'license': 'BSD',
'packages': ['orcid'],
'classifiers': [
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2.7'
],
'install_requires': ['rauth'],
'cmdclass': {'test': PyTest}
}
setup(**_parameters) | bsd-2-clause | Python | |
9d406dc943a3a135ad8386255bd312b0412e9fe6 | Add files via upload | QuinDiesel/CommitSudoku-Project-Game | Definitief/timer.py | Definitief/timer.py | import pygame
screen = pygame.display.set_mode((800, 600))
clock = pygame.time.Clock()
counter, text = 10, '10'.rjust(3)
pygame.time.set_timer(pygame.USEREVENT, 1000)
font = pygame.font.SysFont('timer', 30)
event = pygame.USEREVENT
while True:
for e in pygame.event.get():
if e.type == pygame.event:
counter -= 1
text = str(counter).rjust(3) if counter > 0 else 'time\s up'
if e.type == pygame.QUIT:
quit()
else:
screen.fill((255, 255, 255))
screen.blit(font.render(text, True, (255, 255, 255)), (700, 30))
pygame.display.flip()
clock.tick(60)
continue
quit() | mit | Python | |
61b007c4e79edd98f076ed2873d4cdca601b6202 | Add setup.py | lamenezes/simple-model | setup.py | setup.py | # Copyright (c) 2017 Luiz Menezes
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
from setuptools import setup, find_packages
setup(
name='simplemodel',
version='0.0.1',
description='Simple Models for Python',
url='https://github.com/lamenzes/simple-model',
author='Luiz Menezes',
author_email='luiz.menezesf@gmail.com',
long_description=open('README.rst').read(),
packages=find_packages(exclude=['tests*']),
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
],
)
| mit | Python | |
83fd46732d473132cf51f551608689e2ac4fc4c9 | Add poodle main file | mpgn/poodle-PoC,mpgn/poodle-exploit,mpgn/poodle-PoC | poodle.py | poodle.py | import http.server
import http.client
import socketserver
import ssl
import argparse
import socket
import sys
import threading
sys.path.append('tests/')
from testClient import open_ssl
from pprint import pprint
CRLF = "\r\n\r\n"
class MyTCPHandler(socketserver.BaseRequestHandler):
"""
The RequestHandler class for our server.
It is instantiated once per connection to the server, and must
override the handle() method to implement communication to the
client.
"""
def handle(self):
httpd.socket = ssl.wrap_socket (httpd.socket,ssl_version=ssl.PROTOCOL_SSLv3, certfile='cert/localhost.pem', server_side=True, cert_reqs=ssl.CERT_NONE)
while True:
try:
data = httpd.socket.recv(1024)
if data == '':
break
httpd.socket.send(b'200')
except ssl.SSLError as e:
print("Error SSL")
break
return
class Client():
def connection(host, port):
ssl_socket = socket.create_connection((host,port))
ssl_socket= ssl.wrap_socket(ssl_socket, server_side=False, ssl_version=ssl.PROTOCOL_SSLv3, cert_reqs=ssl.CERT_NONE)
print('Client is ready on host {!r} and port {}\n'.format(host, port))
return ssl_socket
def request(ssl_sock, path=0):
print('Cliend send request...')
ssl_sock.sendall(b"HEAD / HTTP/1.0\r\nHost: localhost\r\n\r\n")
pprint.pprint(conn.recv(1024).split(b"\r\n"))
def closeSession(client):
print('Client close the connection')
client.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Connection with SSLv3')
parser.add_argument('host', help='hostname or IP address')
parser.add_argument('port', type=int, help='TCP port number')
parser.add_argument('-v', help='debug mode', action="store_true")
args = parser.parse_args()
httpd = socketserver.TCPServer((args.host, args.port), MyTCPHandler)
server = threading.Thread(target=httpd.serve_forever)
server.start()
print('Server is serving HTTPS on {!r} port {}'.format(args.host, args.port))
client = Client.connection(args.host, args.port)
Client.request(client)
Client.request(client)
Client.closeSession(client)
init_server.shutdown()
| mit | Python | |
063bf860b8a6b043e982d9db0f6c5cb524590752 | Create gate-puzzles.py | Pouf/CodingCompetition,Pouf/CodingCompetition | CiO/gate-puzzles.py | CiO/gate-puzzles.py | from string import punctuation
def find_word(message):
msg = ''.join(l for l in message.lower() if not l in punctuation).split()
lMessage = len(msg)
scores = msg[:]
for i, word in enumerate(msg):
lWord = len(word)
likeness = 0
for each in msg:
likeness += word[0] == each[0]
likeness += word[-1] == each[-1]
lEach = len(each)
likeness += 3 * min(lWord, lEach) / max(lWord, lEach)
common = set(word) & set(each)
unique = set(word) | set(each)
likeness += 5 * len(common) / len(unique)
scores[i] = [likeness, word]
winner = sorted(scores, key=lambda i:i[0])[-1][1]
return winner
| mit | Python | |
7e5e310f0a4bd4aa3fa4313624136f52146c9bbd | add setup.py | byteweaver/django-tickets,byteweaver/django-tickets,Christophe31/django-tickets,Christophe31/django-tickets | setup.py | setup.py | import os
from setuptools import setup, find_packages
import tickets
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name='django-tickets',
version=tickets.__version__,
description='Reusable django application providing a generic support ticket system',
long_description=read('README.md'),
license=read('LICENSE'),
author='akuryou',
author_email='contact@byteweaver.net',
url='https://github.com/byteweaver/django-tickets',
packages=find_packages(),
install_requires=[
'Django',
],
tests_require=[
'django-nose',
'coverage',
'django-coverage',
'factory_boy',
],
)
| bsd-3-clause | Python | |
6953c04104eb4cc3eb908026f2420e3978371616 | Move working view.cwl script to doc folder | curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g,curoverse/l7g | doc/viewcwl-json.py | doc/viewcwl-json.py | #!/usr/bin/env python
import fnmatch
import requests
import time
import os
import glob
# You can alternatively define these in travis.yml as env vars or arguments
BASE_URL = 'https://view.commonwl.org/workflows'
#get the cwl in l7g/cwl-version
matches = []
for root, dirnames, filenames in os.walk('cwl-version'):
for filename in fnmatch.filter(filenames, '*.cwl'):
matches.append(os.path.join(root, filename))
print matches
REPO_SLUG = 'curoverse/l7g/blob/master/'
# Headers
HEADERS = {
'user-agent': 'my-app/0.0.1',
'accept': 'application/json'
}
#Testing WORKFLOW_PATH
#WORKFLOW_PATH = 'cwl-version/clean/cwl/tiling_clean_gvcf.cwl'
#This will loop through matches, need to indent everything after to make work
for WORKFLOW_PATH in matches:
# Whole workflow URL on github
workflowURL = 'https://github.com/' + REPO_SLUG + WORKFLOW_PATH
print '\n',workflowURL,'\n'
# Add new workflow with the specific commit ID of this build
addResponse = requests.post(BASE_URL,
data={'url': workflowURL},
headers = HEADERS)
print BASE_URL,'\n',workflowURL,'\n\n'
print(addResponse)
print(addResponse.encoding)
print(addResponse.content)
print(addResponse.url)
print(addResponse.request)
print(addResponse.raw)
print(addResponse.headers)
print('\n\n End Sarah\'s code \n\n')
print('Sleep 1 second\n\n')
time.sleep(1)
| agpl-3.0 | Python | |
c43a2af36172faca15d24c858fdb27c88ba6e76a | Add the main setup config file | vu3jej/scrapy-corenlp | setup.py | setup.py | from setuptools import setup
setup(
name='scrapy-corenlp',
version='0.1',
description='Scrapy spider middleware :: Stanford CoreNLP Named Entity Recognition',
url='https://github.com/vu3jej/scrapy-corenlp',
author='Jithesh E J',
author_email='mail@jithesh.net',
license='BSD',
packages=['scrapy_corenlp'],
classifiers=[
'Development Status :: Alpha',
'Programming Language :: Python :: 3.4',
'Topic :: Natural Language Processing :: Named Entity Recognition',
]
)
| bsd-2-clause | Python | |
d177935aab000e0c909f85a6162d64438f009301 | add setup script | kwarunek/file2py,kAlmAcetA/file2py | setup.py | setup.py | #!/usr/bin/env python
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name="file2py",
packages=['file2py'],
version="0.2.0",
author="Krzysztof Warunek",
author_email="kalmaceta@gmail.com",
description="Allows to include/manage binary files in python source file.",
license="MIT",
keywords="binary, converter, pyside, qt, pyqt, file2py",
url="https://github.com/kAlmAcetA/file2py",
long_description='Allows to include/manage binary files in python source file.',
scripts=['scripts/file2py', 'scripts/image2py'],
classifiers=[
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 2',
'Operating System :: POSIX',
'Development Status :: 3 - Alpha'
]
)
| mit | Python | |
df00bcd0fbf94b35162e02a2776ee7d089a4193c | update version string | ContinuumIO/pycosat,sandervandorsten/pycosat,sandervandorsten/pycosat,ContinuumIO/pycosat | setup.py | setup.py | import sys
from distutils.core import setup, Extension
version = '0.6.0.dev'
ext_kwds = dict(
name = "pycosat",
sources = ["pycosat.c"],
define_macros = []
)
if sys.platform != 'win32':
ext_kwds['define_macros'].append(('PYCOSAT_VERSION', '"%s"' % version))
if '--inplace' in sys.argv:
ext_kwds['define_macros'].append(('DONT_INCLUDE_PICOSAT', 1))
ext_kwds['library_dirs'] = ['.']
ext_kwds['libraries'] = ['picosat']
setup(
name = "pycosat",
version = version,
author = "Ilan Schnell",
author_email = "ilan@continuum.io",
url = "https://github.com/ContinuumIO/pycosat",
license = "MIT",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: C",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Utilities",
],
ext_modules = [Extension(**ext_kwds)],
py_modules = ['test_pycosat'],
description = "bindings to picosat (a SAT solver)",
long_description = open('README.rst').read(),
)
| import sys
from distutils.core import setup, Extension
version = '0.6.0'
ext_kwds = dict(
name = "pycosat",
sources = ["pycosat.c"],
define_macros = []
)
if sys.platform != 'win32':
ext_kwds['define_macros'].append(('PYCOSAT_VERSION', '"%s"' % version))
if '--inplace' in sys.argv:
ext_kwds['define_macros'].append(('DONT_INCLUDE_PICOSAT', 1))
ext_kwds['library_dirs'] = ['.']
ext_kwds['libraries'] = ['picosat']
setup(
name = "pycosat",
version = version,
author = "Ilan Schnell",
author_email = "ilan@continuum.io",
url = "https://github.com/ContinuumIO/pycosat",
license = "MIT",
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: C",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.5",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Utilities",
],
ext_modules = [Extension(**ext_kwds)],
py_modules = ['test_pycosat'],
description = "bindings to picosat (a SAT solver)",
long_description = open('README.rst').read(),
)
| mit | Python |
39fd3d75dd6dc60fb2f4fdc1cf5cf4096eade93d | Create setup.py | allanliebold/data-structures,allanliebold/data-structures | setup.py | setup.py | """."""
from setuptools import setup
setup(
name="data-structures",
description="Implementations of various data structures in Python",
version=0.1,
author="Matt Favoino, Allan Liebold",
licence="MIT",
py_modules=['linked-list'],
package_dir={'': 'src'},
install_requires=[],
extras_require={
'testing': ['pytest', 'pytest-cov', 'tox'],
'development': ['ipython']
},
entry_points={}
)
| mit | Python | |
91a851dd6516bcdd451ba187c72fe2d3eef6a3ce | make fileoperate module | helloTC/ATT,BNUCNL/ATT | utilfunc/fileoperate.py | utilfunc/fileoperate.py | # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode:nil -*-
# vi: set ft=python sts=4 sw=4 et:
import os
import shutil
verbose = True
class DeleteFile(object):
"""
Delete file/directory
--------------------------------
Parameters:
path: path for deleting
>>> m = DeleteFile()
>>> m.exexute(path)
"""
def __init__(self):
pass
def execute(self, path):
if verbose:
print('Deleting {}'.format(path))
if '.' in path:
os.remove(path)
else:
shutil.rmtree(self._path)
def undo(self):
pass
class CopyFile(object):
"""
Copy file/directory from source to destination
------------------------------------------------
Parameters:
path_src: source path
path_dst: destination path
>>> m = CopyFile()
>>> m.execute(src, dst)
"""
def __init__(self):
pass
def execute(self, src, dst):
if verbose:
print('Copying {0} to {1}'.format(src, dst))
if '.' in src:
shutil.copyfile(src, os.path.join(dst, [x for x in src.split('/') if x][-1]))
else:
shutil.copytree(src, os.path.join(dst, [x for x in src.split('/') if x][-1]))
def undo(self):
pass
class RenameFile(object):
"""
Rename file or directory
---------------------------
Parameters:
src: source file
dst: destination file
>>> m = RenameFile()
>>> m.execute(src, dst)
"""
def __init__(self):
pass
def execute(self, src, dst):
if verbose:
print('Renaming {0} to {1}'.format(src, dst))
os.rename(src, dst)
def undo(self):
pass
class CreateFile(object):
"""
Create file/directory
---------------------------
Parameters:
path: data path
if it's a directory, create directory,
else create a file
text: by default a text
>>> a = CreateFile()
>>> a.execute(path, text)
"""
def __init__(self):
pass
def execute(self, path, text = 'Hello, world\n'):
if verbose:
print('Creating {}'.format(path))
if '.' in path:
with open(path, 'w') as out_file:
out_file.write(text)
else:
os.mkdir(path)
def undo(self):
pass
| mit | Python | |
4f6900033dc8bbba5f85369565ce17aa850c230c | Add an egg-ified setup | emgee/formal,emgee/formal,emgee/formal | setup.py | setup.py | try:
from setuptools import setup
except:
from distutils.core import setup
import forms
setup(
name='forms',
version=forms.version,
description='HTML forms framework for Nevow',
author='Matt Goodall',
author_email='matt@pollenation.net',
packages=['forms', 'forms.test'],
)
| mit | Python | |
5e153bf16e25f3ab5039531be61c3d7ee09137bb | add test (#35568) | luotao1/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,luotao1/Paddle,luotao1/Paddle,PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_tile.py | python/paddle/fluid/tests/unittests/ir/inference/test_trt_convert_tile.py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from trt_layer_auto_scan_test import TrtLayerAutoScanTest, SkipReasons
from program_config import TensorConfig, ProgramConfig
import numpy as np
import paddle.inference as paddle_infer
from functools import partial
from typing import Optional, List, Callable, Dict, Any, Set
import unittest
class TrtConvertTileTest(TrtLayerAutoScanTest):
def is_program_valid(self, program_config: ProgramConfig) -> bool:
inputs = program_config.inputs
attrs = [
program_config.ops[i].attrs
for i in range(len(program_config.ops))
]
for x in attrs[0]['repeat_times']:
if x <= 0:
return False
return True
def sample_program_configs(self):
def generate_input1(attrs: List[Dict[str, Any]]):
return np.ones([1, 2, 3, 4]).astype(np.float32)
for repeat_times in [[100], [1, 2], [0, 3], [1, 2, 100]]:
dics = [{"repeat_times": repeat_times}]
ops_config = [{
"op_type": "tile",
"op_inputs": {
"X": ["input_data"]
},
"op_outputs": {
"Out": ["tile_output_data"]
},
"op_attrs": dics[0]
}]
ops = self.generate_op_config(ops_config)
program_config = ProgramConfig(
ops=ops,
weights={},
inputs={
"input_data": TensorConfig(data_gen=partial(generate_input1,
dics))
},
outputs=["tile_output_data"])
yield program_config
def sample_predictor_configs(
self, program_config) -> (paddle_infer.Config, List[int], float):
def generate_dynamic_shape(attrs):
self.dynamic_shape.min_input_shape = {"input_data": [1, 3, 32, 32]}
self.dynamic_shape.max_input_shape = {"input_data": [4, 3, 64, 64]}
self.dynamic_shape.opt_input_shape = {"input_data": [1, 3, 64, 64]}
def clear_dynamic_shape():
self.dynamic_shape.min_input_shape = {}
self.dynamic_shape.max_input_shape = {}
self.dynamic_shape.opt_input_shape = {}
def generate_trt_nodes_num(attrs, dynamic_shape):
if dynamic_shape == True:
return 0, 3
else:
return 1, 2
attrs = [
program_config.ops[i].attrs
for i in range(len(program_config.ops))
]
# for static_shape
clear_dynamic_shape()
self.trt_param.precision = paddle_infer.PrecisionType.Float32
yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), 1e-5
self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), generate_trt_nodes_num(
attrs, False), 1e-4
# for dynamic_shape
generate_dynamic_shape(attrs)
self.trt_param.precision = paddle_infer.PrecisionType.Float32
yield self.create_inference_config(), generate_trt_nodes_num(attrs,
True), 1e-5
self.trt_param.precision = paddle_infer.PrecisionType.Half
yield self.create_inference_config(), generate_trt_nodes_num(attrs,
True), 1e-4
def test(self):
self.run_test()
if __name__ == "__main__":
unittest.main()
| apache-2.0 | Python | |
aee2363f6c6995a124b3c0ad358e83dc815ea808 | Remove redundant subscription fields from user model. | Code4SA/pmg-cms-2,Code4SA/pmg-cms-2,Code4SA/pmg-cms-2 | alembic/versions/3fc4c97dc6bd_remove_redundant_user_subscription_.py | alembic/versions/3fc4c97dc6bd_remove_redundant_user_subscription_.py | """remove redundant user subscription fields
Revision ID: 3fc4c97dc6bd
Revises: 3d723944025f
Create Date: 2015-01-27 18:11:15.822193
"""
# revision identifiers, used by Alembic.
revision = '3fc4c97dc6bd'
down_revision = '3d723944025f'
branch_labels = None
depends_on = None
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('user', u'subscribe_bill')
op.drop_column('user', u'subscribe_call_for_comment')
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('user', sa.Column(u'subscribe_call_for_comment', sa.BOOLEAN(), autoincrement=False, nullable=True))
op.add_column('user', sa.Column(u'subscribe_bill', sa.BOOLEAN(), autoincrement=False, nullable=True))
### end Alembic commands ###
| apache-2.0 | Python | |
a3e70bdd8146d4e70dc8f34396f7c537dbd4784c | add setup.py | comynli/m | setup.py | setup.py | from distutils.core import setup
setup(name='m',
version='0.1.0',
packages=['m', 'm.security'],
install_requires=['WebOb>=1.6.1'],
author = "comyn",
author_email = "me@xueming.li",
description = "This is a very light web framework",
license = "Apache License 2.0",
) | apache-2.0 | Python | |
76d2386bfa9e61ac17bca396384772ae70fb4563 | Add one liner to add ability to print a normal distribution with mean zero and varience one | ianorlin/pyrandtoys | gauss.py | gauss.py | #!/usr/bin/env python3
#Copyright 2015 BRendan Perrine
import random
random.seed()
print (random.gauss(0,1), "Is a normal distribution with mean zero and standard deviation and varience of one")
| mit | Python | |
5653330769630c8f4f8ed88753b3886d063f9e3d | Add web example | TonyPythoneer/seria-bot-py | web.py | web.py | # -*- coding: utf-8 -*-
import asyncio
import os
from flask import Flask
flask_app = Flask(__name__)
@flask_app.route('/')
def hello():
return 'Hello World!'
async def main():
port = int(os.environ.get('PORT', 5000))
future_tasks = [
asyncio.ensure_future(flask_app.run(host='0.0.0.0', port=port)),
]
return await asyncio.gather(*future_tasks)
if __name__ == '__main__':
print('Application server gets started!')
event_loop = asyncio.get_event_loop()
event_loop.run_until_complete(main())
| apache-2.0 | Python | |
4a89b6e085f363a6e848a13e857872165a781d61 | add a base setup.py | armstrong/armstrong.apps.content,armstrong/armstrong.apps.content | setup.py | setup.py | from distutils.core import setup
import os
# Borrowed and modified from django-registration
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
def build_package(dirpath, dirnames, filenames):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
[build_package(dirpath, dirnames, filenames) for dirpath, dirnames, filenames
in os.walk('armstrong')]
setup(
name='armstrong.apps.content',
version='0.0.1a',
description='Provides a concrete implementation of ContentBase',
author='Bay Citizen & Texas Tribune',
author_email='info@armstrongcms.org',
url='http://github.com/armstrongcms/armstrong.apps.content/',
packages=packages,
install_requires=[
'armstrong.core.arm_content',
],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| apache-2.0 | Python | |
9fb6d0ea74aacc77f06d36805760270854e53eba | Add missing django_libs test requirement | claudep/django-calendarium,claudep/django-calendarium,bitmazk/django-calendarium,bitmazk/django-calendarium,claudep/django-calendarium,bitmazk/django-calendarium,claudep/django-calendarium | setup.py | setup.py | import os
from setuptools import setup, find_packages
import calendarium
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name="django-calendarium",
version=calendarium.__version__,
description=read('DESCRIPTION'),
long_description=read('README.rst'),
license='The MIT License',
platforms=['OS Independent'],
keywords='django, calendar, app, widget, events, schedule',
author='Daniel Kaufhold',
author_email='daniel.kaufhold@bitmazk.com',
url="https://github.com/bitmazk/django-calendarium",
packages=find_packages(),
include_package_data=True,
tests_require=[
'fabric',
'factory_boy<2.0.0',
'django_libs',
'django-nose',
'coverage',
'django-coverage',
'mock',
],
test_suite='calendarium.tests.runtests.runtests',
)
| import os
from setuptools import setup, find_packages
import calendarium
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ''
setup(
name="django-calendarium",
version=calendarium.__version__,
description=read('DESCRIPTION'),
long_description=read('README.rst'),
license='The MIT License',
platforms=['OS Independent'],
keywords='django, calendar, app, widget, events, schedule',
author='Daniel Kaufhold',
author_email='daniel.kaufhold@bitmazk.com',
url="https://github.com/bitmazk/django-calendarium",
packages=find_packages(),
include_package_data=True,
tests_require=[
'fabric',
'factory_boy<2.0.0',
'django-nose',
'coverage',
'django-coverage',
'mock',
],
test_suite='calendarium.tests.runtests.runtests',
)
| mit | Python |
ff000972d386b001d75cb36b161acd59a1626917 | Correct Path to PyPi Readme | rjschwei/azure-sdk-for-python,leihu0724/azure-sdk-for-python,huguesv/azure-sdk-for-python,oaastest/azure-sdk-for-python,Azure/azure-sdk-for-python,aarsan/azure-sdk-for-python,Azure/azure-sdk-for-python,bonethrown/azure-sdk-for-python,mariotristan/azure-sdk-for-python,AutorestCI/azure-sdk-for-python,aarsan/azure-sdk-for-python,huguesv/azure-sdk-for-python,ParallaxIT/azure-sdk-for-python,dominoFire/azure-sdk-for-python,crwilcox/azure-sdk-for-python,lmazuel/azure-sdk-for-python,crwilcox/azure-sdk-for-python,v-iam/azure-sdk-for-python,SUSE/azure-sdk-for-python,Azure/azure-sdk-for-python,mariotristan/azure-sdk-for-python,leihu0724/azure-sdk-for-python,bonethrown/azure-sdk-for-python,oaastest/azure-sdk-for-python,Azure/azure-sdk-for-python,dominoFire/azure-sdk-for-python,ParallaxIT/azure-sdk-for-python | setup.py | setup.py | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
# To build:
# python setup.py sdist
#
# To install:
# python setup.py install
#
# To register (only needed once):
# python setup.py register
#
# To upload:
# python setup.py sdist upload
setup(name='azure',
version='0.9.0',
description='Microsoft Azure client APIs',
long_description=open('README.rst', 'r').read(),
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://github.com/WindowsAzure/azure-sdk-for-python',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'License :: OSI Approved :: Apache Software License'],
packages=['azure',
'azure.http',
'azure.servicebus',
'azure.storage',
'azure.servicemanagement'],
install_requires=['python-dateutil',
'pyopenssl',
'futures']
)
| #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#--------------------------------------------------------------------------
from distutils.core import setup
# To build:
# python setup.py sdist
#
# To install:
# python setup.py install
#
# To register (only needed once):
# python setup.py register
#
# To upload:
# python setup.py sdist upload
setup(name='azure',
version='0.9.0',
description='Microsoft Azure client APIs',
long_description=open('PyPi-README.rst', 'r').read(),
license='Apache License 2.0',
author='Microsoft Corporation',
author_email='ptvshelp@microsoft.com',
url='https://github.com/WindowsAzure/azure-sdk-for-python',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'License :: OSI Approved :: Apache Software License'],
packages=['azure',
'azure.http',
'azure.servicebus',
'azure.storage',
'azure.servicemanagement'],
install_requires=['python-dateutil',
'pyopenssl',
'futures']
)
| mit | Python |
e611dda17f64b0d1355fbe85589b2c790db40158 | Create hello.py | MehtaShivam/cs3240-labdemo | hello.py | hello.py | def main():
print("hello")
if __name__=='__main__':
main()
| mit | Python | |
bd0f6328bcb0aeae69568fab203f97e155fe406f | add the lexicon preprocess script demo | huajianjiu/Bernoulli-CBOFP,huajianjiu/Bernoulli-CBOFP,huajianjiu/Bernoulli-CBOFP,huajianjiu/Bernoulli-CBOFP | PPDB.py | PPDB.py | import numpy as np
import os.path
import cPickle as pickle
import sys
class PPDB_2(object):
def __init__(self, vocab="vocab.txt", ppdb="ppdb-2.0-tldr", output="ppdb2.txt"):
self.vocab = vocab
self.ppdb_paraphrases = ppdb_paraphrases = {}
self.word_hash = {}
self.output = output
with open(vocab, "r") as f_vocab:
words = f_vocab.readlines()
self.words = words = [x.split()[0] for x in words]
for n,w in enumerate(words):
self.word_hash[w]=n
with open(ppdb, "r") as ppdb_f:
lines = ppdb_f.readlines()
print "Total lines: " + str(len(lines))
n = 0
for line in lines:
if (self.search_hash(line.split("|||")[1].strip()) > 0) and (self.search_hash(line.split("|||")[2].strip()) > 0):
baseword = line.split("|||")[1].strip()
ppword = line.split("|||")[2].strip()
if (line.split("|||")[-1].strip() == "Equivalence"):
self.add_paraphrases(baseword, ppword)
self.add_paraphrases(ppword, baseword)
elif (line.split("|||")[-1].strip() == "ForwardEntailment"):
self.add_paraphrases(baseword, ppword)
elif (line.split("|||")[-1].strip() == "ReverseEntailment"):
self.add_paraphrases(ppword, baseword)
n += 1
if n%10000 == 0:
print str(n) + " lines processed."
print "Finish. Totally "+str(n)+" lines processed."
def search_hash(self, word):
try:
return self.word_hash[word]
except KeyError:
return -1
except:
print "Unexpected error:", sys.exc_info()[0]
return -1
def search_baseword(self, inputword):
return inputword in self.ppdb_paraphrases.keys()
def add_paraphrases(self, baseword, ppword):
if self.search_baseword(baseword):
self.ppdb_paraphrases[baseword].append(ppword)
else:
self.ppdb_paraphrases[baseword] = [ppword]
def save_ppdb(self):
print "Writing to ouput file."
with open(self.output, "w") as f_save:
n = 0
for word in self.words:
if word == "UNK":
write_line = "</s> </s>\n"
elif word in self.ppdb_paraphrases.keys():
write_line = str(word) + " "
for ppword in self.ppdb_paraphrases[word]:
write_line += ppword + " "
write_line += "</s>\n"
else:
write_line = str(word) + " </s>\n"
f_save.write(write_line)
n += 1
if n%1000 == 0:
print "."
f_save.flush()
if __name__ == "__main__":
if len(sys.argv)>1:
#python PPDB.py vocab ppdb output
ppdb_s_corpus = PPDB_2(sys.argv[1], sys.argv[2], sys.argv[3])
else:
ppdb_s_corpus = PPDB_2()
ppdb_s_corpus.save_ppdb()
| apache-2.0 | Python | |
23ccab0a6d4107de250fb5e609538bb86a3aef24 | add main script runner | osxi/selenium-drupal-demo | runner.py | runner.py | import json
import urllib2
from selenium import webdriver
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
drupal_site = "http://url.toyourdrupalsite.com"
data_path = "/api/v1/resource" # returns array of objects containing nids
driver = webdriver.Firefox()
def parse_nids_json(record):
return str(record["nid"])
def login():
driver.get(drupal_site + "/user")
driver.find_element_by_id("edit-name").send_keys("your_username")
driver.find_element_by_id("edit-pass").send_keys("your_password")
return driver.find_element_by_id("edit-submit").submit()
request = json.load(urllib2.urlopen(drupal_site + data_path))
nids = map(parse_nids_json, request)
login()
for nid in nids:
url = drupal_site + "/node/" + nid + "/edit"
driver.get(url)
# Do stuff here like change fields and click the Save button
driver.quit()
| mit | Python | |
4e503a060023da75153438d73902d19a07e90be3 | Add the source file | cromod/PyBF | PyBF.py | PyBF.py | # -*- coding: utf-8 -*-
import sys
array = [0] # array of bytes
ptr = 0 # data pointer
def readNoLoop(char):
global ptr
# Increment/Decrement the byte at the data pointer
if char=='+':
array[ptr] += 1
elif char=='-':
array[ptr] -= 1
if array[ptr] < 0:
raise ValueError("Negative value in array")
# Increment/Decrement the data pointer
elif char=='>':
ptr += 1
while(ptr>=len(array)-1):
array.append(0)
elif char=='<':
ptr -= 1
if ptr < 0:
raise ValueError("Negative value of pointer")
# Output the byte at the data pointer
elif char=='.':
sys.stdout.write(chr(array[ptr]))
# Store one byte of input in the byte at the data pointer
elif char==',':
array[ptr] = ord(sys.stdin.read(1))
#print ptr, array
def interpret(charChain):
it = 0
loopBegin = []
while(it<len(charChain)):
if charChain[it]=='[':
loopBegin.append(it)
elif charChain[it]==']':
subChain = charChain[loopBegin[-1]+1:it]
while(array[ptr]>0):
interpret(subChain)
loopBegin.pop()
else:
readNoLoop(charChain[it])
it+=1
if __name__ == "__main__":
cmd = '++++++++[>++++[>++>+++>+++>+<<<<-]>+>+>->>+[<]<-]>>.>---.+++++++..+++.>>.<-.<.+++.------.--------.>>+.>++.'
#cmd = '[,.]'
try:
interpret(cmd)
except:
raise | mit | Python | |
7d3a1eb991e6678e8227ea7778688a0458ad5843 | Add package distribution files | mvy/google_code_jam_framework | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='GCJCore',
version='1.0',
description='AppState reader for python scripts',
author='Yves Stadler',
author_email='yves.stadler@gmail.com',
url='',
packages=['gcjcore'],
)
| mit | Python | |
5d5806ed7b490ab79a882e8b3f966d5309db648b | Create setup.py | david-shu/lxml-mate | setup.py | setup.py |
import sys
try:
from setuptools import setup
except ImportError:
if sys.version_info > (3,):
raise RuntimeError("python3 support requires setuptools")
from distutils.core import setup
info = {}
src = open("lxmlmate.py")
lines = []
for ln in src:
lines.append(ln)
if "__version__" in ln:
for ln in src:
if "__version__" not in ln:
break
lines.append(ln)
break
exec("".join(lines),info)
with open( 'README.rst' ) as f:
info['__doc__'] = f.read()
NAME = "lxmlmate"
VERSION = info["__version__"]
DESCRIPTION = "The simplest Object-XML mapper for Python. Lite for lxml."
LONG_DESC = info["__doc__"]
AUTHOR = "David Shu"
AUTHOR_EMAIL = "david.shu@126.com"
URL=""#http://packages.python.org/lxml-lite"
LICENSE = "MIT"
KEYWORDS = "xml lxml"
CLASSIFIERS = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"License :: OSI Approved",
"License :: OSI Approved :: MIT License",
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Text Processing",
"Topic :: Text Processing :: Markup",
"Topic :: Text Processing :: Markup :: XML",
]
setup(name=NAME,
version=VERSION,
author=AUTHOR,
author_email=AUTHOR_EMAIL,
url=URL,
description=DESCRIPTION,
long_description=LONG_DESC,
license=LICENSE,
keywords=KEYWORDS,
#packages=["lxml-lite"],
classifiers=CLASSIFIERS,
py_modules=['lxmlmate','example'],
requires=['lxml'],
data_files=['README.rst']
)
| mit | Python | |
addc7e79920afbef5a936b0df536bb8d5f71af99 | Add a setup script. | mit-athena/python-discuss | setup.py | setup.py | #!/usr/bin/python
from distutils.core import setup
setup(name='discuss',
version='1.0',
description='Python client for Project Athena forum system',
author='Victor Vasiliev',
packages=['discuss']
)
| mit | Python | |
06eea20e9db69879bec2657e73c95d452774acf9 | Create blynk_ctrl.py | okhiroyuki/blynk-library,csicar/blynk-library,CedricFinance/blynk-library,csicar/blynk-library,blynkkk/blynk-library,flashvnn/blynk-library,CedricFinance/blynk-library,al1271/blynk-library,al1271/blynk-library,flashvnn/blynk-library,flashvnn/blynk-library,al1271/blynk-library,johan--/blynk-library,ivankravets/blynk-library,blynkkk/blynk-library,csicar/blynk-library,CedricFinance/blynk-library,sstocker46/blynk-library,blynkkk/blynk-library,okhiroyuki/blynk-library,radut/blynk-library,CedricFinance/blynk-library,blynkkk/blynk-library,ivankravets/blynk-library,radut/blynk-library,johan--/blynk-library,sstocker46/blynk-library,okhiroyuki/blynk-library,csicar/blynk-library,ivankravets/blynk-library,radut/blynk-library,johan--/blynk-library,blynkkk/blynk-library,okhiroyuki/blynk-library,ivankravets/blynk-library,flashvnn/blynk-library,sstocker46/blynk-library,sstocker46/blynk-library,al1271/blynk-library,johan--/blynk-library,radut/blynk-library,ivankravets/blynk-library | scripts/blynk_ctrl.py | scripts/blynk_ctrl.py | #!/usr/bin/env python
'''
This script uses Bridge feature to control another device from the command line.
Examples:
python blynk_ctrl.py --token=b168ccc8c8734fad98323247afbc1113 write D0 1
python blynk_ctrl.py --token=b168ccc8c8734fad98323247afbc1113 write A0 123
python blynk_ctrl.py --token=b168ccc8c8734fad98323247afbc1113 write V0 "some value"
Note: read is not supported yet
Author: Volodymyr Shymanskyy
License: The MIT license
'''
import select, socket, struct
import os, sys, time, getopt # TODO: use optparse
# Parse command line options
try:
opts, args = getopt.getopt(sys.argv[1:],
"hs:p:t:",
["help", "server=", "port=", "token=", "dump", "target="])
except getopt.GetoptError:
print >>sys.stderr, __doc__
sys.exit(2)
# Default options
TOKEN = "YourAuthToken"
SERVER = "cloud.blynk.cc"
PORT = 8442
# Expert options
DUMP = 0
NODELAY = 1 # TCP_NODELAY
BRIDGE_PIN = 64
TARGET = None
for o, v in opts:
if o in ("-h", "--help"):
print __doc__
sys.exit()
elif o in ("-s", "--server"):
SERVER = v
elif o in ("-p", "--port"):
PORT = int(v)
elif o in ("--dump",):
DUMP = 1
elif o in ("-t", "--token"):
TOKEN = v
TARGET = v
elif o in ("--target",):
TARGET = v
else:
print __doc__
sys.exit(1)
if not TARGET or len(args) < 2:
print __doc__
sys.exit(1)
# Blynk protocol helpers
hdr = struct.Struct("!BHH")
class MsgType:
RSP = 0
LOGIN = 2
PING = 6
BRIDGE = 15
HW = 20
class MsgStatus:
OK = 200
def bridge(*args):
# Convert params to string and join using \0
data = "\0".join(map(str, args))
dump("< " + " ".join(map(str, args)))
# Prepend BRIDGE command header
return hdr.pack(MsgType.BRIDGE, genMsgId(), len(data)) + data
static_msg_id = 1
def genMsgId():
global static_msg_id
static_msg_id += 1
return static_msg_id
# Other utilities
start_time = time.time()
def log(msg):
print "[{:7.3f}] {:}".format(float(time.time() - start_time), msg)
def dump(msg):
if DUMP:
log(msg)
def receive(sock, length):
d = []
l = 0
while l < length:
r = ''
try:
r = sock.recv(length-l)
except socket.timeout:
continue
if not r:
return ''
d.append(r)
l += len(r)
return ''.join(d)
# Main code
try:
conn = socket.create_connection((SERVER, PORT), 3)
except:
log("Can't connect to %s:%d" % (SERVER, PORT))
sys.exit(1)
if NODELAY != 0:
conn.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
# Authenticate
conn.sendall(hdr.pack(MsgType.LOGIN, 1, len(TOKEN)))
conn.sendall(TOKEN)
data = receive(conn, hdr.size)
if not data:
log("Login timeout")
sys.exit(1)
msg_type, msg_id, status = hdr.unpack(data)
if MsgType.RSP != 0 or msg_id != 1 or status != MsgStatus.OK:
log("Login failed: {0}, {1}, {2}".format(msg_type, msg_id, status))
sys.exit(1)
conn.sendall(bridge(BRIDGE_PIN, "i", TARGET));
op = args[0]
pin = args[1]
if op == 'write' or op == 'set':
val = args[2]
if pin[0] == 'D' or pin[0] == 'd':
conn.sendall(bridge(BRIDGE_PIN, "dw", pin[1:], val));
elif pin[0] == 'A' or pin[0] == 'a':
conn.sendall(bridge(BRIDGE_PIN, "aw", 'A'+pin[1:], val));
elif pin[0] == 'V' or pin[0] == 'v':
conn.sendall(bridge(BRIDGE_PIN, "vw", pin[1:], val));
else:
log("Invalid pin format")
elif op == 'read' or op == 'get':
if pin[0] == 'D' or pin[0] == 'd':
conn.sendall(bridge(BRIDGE_PIN, "dr", pin[1:]));
elif pin[0] == 'A' or pin[0] == 'a':
conn.sendall(bridge(BRIDGE_PIN, "ar", 'A'+pin[1:]));
elif pin[0] == 'V' or pin[0] == 'v':
conn.sendall(bridge(BRIDGE_PIN, "vr", pin[1:]));
else:
log("Invalid pin format")
while True:
data = receive(conn, hdr.size)
if not data:
log("Data read timeout")
sys.exit(1)
msg_type, msg_id, status = hdr.unpack(data)
dump("Got: {0}, {1}, {2}".format(msg_type, msg_id, status))
conn.close()
| mit | Python | |
3edc58673e21c7bbbfa5db07d7c6a92c76470dfc | add setup script | yamahigashi/sphinx-git-lowdown | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='sphinx-git-lowdown',
version='0.0.1',
url='https://github.com/yamahigashi/sphinx-git-lowdown',
# download_url='http://pypi.python.org/pypi/sphinx-git-lowdown',
license='Apache',
author='yamahigashi',
author_email='yamahigashi@gmail.com',
description='Sphinx extension to wrap git changelog',
long_description="",
zip_safe=True,
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Documentation',
'Topic :: Utilities',
],
platforms='any',
include_package_data=True,
install_requires=['Sphinx>=1.1', 'GitPython', 'lowdown'],
# namespace_packages=['sphinx_git_lowdown'],
packages=['sphinx_git_lowdown'],
)
| apache-2.0 | Python | |
0741cd665592dc18a7880622fad83aec92093907 | add setup.py | PRIArobotics/STM32Flasher,PRIArobotics/STM32Flasher | setup.py | setup.py | from setuptools import setup, find_packages
import hedgehog_light
setup(
name="stm32flasher",
description="STM32 USART Flasher",
version=hedgehog_light.__version__,
license="AGPLv3",
url="https://github.com/PRIArobotics/STM32Flasher",
author="Clemens Koza",
author_email="koza@pria.at",
packages=['hedgehog_light'],
install_requires=['pyserial>=2.7'],
)
| agpl-3.0 | Python | |
845660721ddcbefa4b52fe4a5f3fd3bba75c10bc | Add setup.py | stivalet/C-Sharp-Vuln-test-suite-gen,stivalet/C-Sharp-Vuln-test-suite-gen | setup.py | setup.py | import os
from setuptools import setup
# Utility function to read the README file.
# Used for the long_description. It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...
def read(fname):
"""Utility function to read the README file.
Used for the long_description. It's nice, because now 1) we have a top level
README file and 2) it's easier to type in the README file than to put a raw string in below ..."""
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="C# test suite generator",
version="0.1",
packages=['c_sharp_vuln_test_suite_gen'],
scripts=['main.py'],
# Project uses reStructuredText, so ensure that the docutils get
# installed or upgraded on the target machine
install_requires=['jinja2'],
package_data={
# If any package contains *.txt or *.xml files, include them:
'c_sharp_vuln_test_suite_gen': ['*.txt', '*.xml'],
},
# metadata for upload to PyPI
author="Bertrand Stivalet",
author_email="bertrand.stivalet@gmail.com",
description="Collection of vulnerable and fixed C# synthetic test cases expressing specific flaws.",
license="MIT",
keywords="C# flaws vulnerability generator",
long_description=read('README.md'),
# could also include long_description, download_url, classifiers, etc.
)
| mit | Python | |
d98c66ee6a0e27485980fd336ef2ee7a2f08462c | Add setup.py for Read the Docs | coffeestats/coffeestats-django,coffeestats/coffeestats-django,coffeestats/coffeestats-django,coffeestats/coffeestats-django | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='coffeestats',
version='0.2.1',
description='Coffeestats.org',
author='Jan Dittberner',
author_email='jan@dittberner.info',
url='https://github.com/coffeestats/coffeestats-django',
package_dir = {'': 'coffeestats'},
)
| mit | Python | |
2c11575e5414cca19698cc500844e7553dcc9aa8 | Add setup.py | bremac/s3viewport | setup.py | setup.py | from distutils.core import setup
setup(name='s3viewport',
version='20120930',
description='A FUSE filesystem for viewing S3 buckets',
author='Brendan MacDonell',
author_email='brendan@macdonell.net',
url='https://github.com/bremac/s3viewport',
packages=['s3viewport'],
package_dir={'s3viewport': 's3viewport'},
scripts=['mount.s3viewport'],
requires=[
'fusepy (>=2.0.1)',
'boto (>=2.6.0)',
'iso8601 (>=0.1.4)',
'PyYAML (>=3.10)',
],
provides=['s3viewport'])
| isc | Python | |
02f984f7efe9481dbaa2517cfc11ec826421925f | update the version of jinja needed, 2.6 is out | getpelican/pelican,eevee/pelican,lazycoder-ru/pelican,jo-tham/pelican,kennethlyn/pelican,janaurka/git-debug-presentiation,HyperGroups/pelican,ionelmc/pelican,abrahamvarricatt/pelican,0xMF/pelican,ls2uper/pelican,deved69/pelican-1,kernc/pelican,Rogdham/pelican,karlcow/pelican,iKevinY/pelican,lazycoder-ru/pelican,arty-name/pelican,sunzhongwei/pelican,avaris/pelican,Polyconseil/pelican,catdog2/pelican,ingwinlu/pelican,levanhien8/pelican,ls2uper/pelican,simonjj/pelican,fbs/pelican,deved69/pelican-1,TC01/pelican,11craft/pelican,florianjacob/pelican,Rogdham/pelican,janaurka/git-debug-presentiation,koobs/pelican,rbarraud/pelican,GiovanniMoretti/pelican,alexras/pelican,UdeskDeveloper/pelican,avaris/pelican,btnpushnmunky/pelican,liyonghelpme/myBlog,treyhunner/pelican,lucasplus/pelican,deanishe/pelican,crmackay/pelican,deved69/pelican-1,TC01/pelican,garbas/pelican,jvehent/pelican,lucasplus/pelican,Scheirle/pelican,sunzhongwei/pelican,catdog2/pelican,crmackay/pelican,karlcow/pelican,talha131/pelican,abrahamvarricatt/pelican,Rogdham/pelican,douglaskastle/pelican,lazycoder-ru/pelican,jvehent/pelican,zackw/pelican,rbarraud/pelican,iurisilvio/pelican,treyhunner/pelican,talha131/pelican,koobs/pelican,justinmayer/pelican,TC01/pelican,51itclub/pelican,UdeskDeveloper/pelican,simonjj/pelican,HyperGroups/pelican,jimperio/pelican,joetboole/pelican,HyperGroups/pelican,abrahamvarricatt/pelican,treyhunner/pelican,farseerfc/pelican,lucasplus/pelican,catdog2/pelican,GiovanniMoretti/pelican,farseerfc/pelican,deanishe/pelican,sunzhongwei/pelican,rbarraud/pelican,jo-tham/pelican,alexras/pelican,liyonghelpme/myBlog,btnpushnmunky/pelican,number5/pelican,joetboole/pelican,number5/pelican,alexras/pelican,kernc/pelican,liyonghelpme/myBlog,Summonee/pelican,jvehent/pelican,karlcow/pelican,Summonee/pelican,douglaskastle/pelican,koobs/pelican,liyonghelpme/myBlog,gymglish/pelican,zackw/pelican,liyonghelpme/myBlog,kernc/pelican,UdeskDeveloper/pelican,levanhien8/pelican,Scheirle/pelican,levanhien8/pelican,garbas/pelican,number5/pelican,JeremyMorgan/pelican,eevee/pelican,jimperio/pelican,crmackay/pelican,goerz/pelican,goerz/pelican,janaurka/git-debug-presentiation,kennethlyn/pelican,JeremyMorgan/pelican,GiovanniMoretti/pelican,jimperio/pelican,11craft/pelican,btnpushnmunky/pelican,iurisilvio/pelican,11craft/pelican,iKevinY/pelican,kennethlyn/pelican,goerz/pelican,ingwinlu/pelican,eevee/pelican,51itclub/pelican,deanishe/pelican,gymglish/pelican,51itclub/pelican,joetboole/pelican,simonjj/pelican,garbas/pelican,sunzhongwei/pelican,ls2uper/pelican,ehashman/pelican,Polyconseil/pelican,zackw/pelican,douglaskastle/pelican,JeremyMorgan/pelican,gymglish/pelican,ehashman/pelican,Summonee/pelican,florianjacob/pelican,florianjacob/pelican,iurisilvio/pelican,Scheirle/pelican,getpelican/pelican,ehashman/pelican | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
requires = ['feedgenerator', 'jinja2 >= 2.6', 'pygments', 'docutils', 'pytz',
'blinker', 'unidecode']
try:
import argparse # NOQA
except ImportError:
requires.append('argparse')
entry_points = {
'console_scripts': [
'pelican = pelican:main',
'pelican-import = pelican.tools.pelican_import:main',
'pelican-quickstart = pelican.tools.pelican_quickstart:main',
'pelican-themes = pelican.tools.pelican_themes:main'
]
}
README = open('README.rst').read()
CHANGELOG = open('docs/changelog.rst').read()
setup(
name="pelican",
version="3.2",
url='http://getpelican.com/',
author='Alexis Metaireau',
author_email='authors@getpelican.com',
description="A tool to generate a static blog from reStructuredText or "
"Markdown input files.",
long_description=README + '\n' + CHANGELOG,
packages=['pelican', 'pelican.tools', 'pelican.plugins'],
include_package_data=True,
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite='tests',
)
| #!/usr/bin/env python
from setuptools import setup
requires = ['feedgenerator', 'jinja2 >= 2.4', 'pygments', 'docutils', 'pytz',
'blinker', 'unidecode']
try:
import argparse # NOQA
except ImportError:
requires.append('argparse')
entry_points = {
'console_scripts': [
'pelican = pelican:main',
'pelican-import = pelican.tools.pelican_import:main',
'pelican-quickstart = pelican.tools.pelican_quickstart:main',
'pelican-themes = pelican.tools.pelican_themes:main'
]
}
README = open('README.rst').read()
CHANGELOG = open('docs/changelog.rst').read()
setup(
name="pelican",
version="3.2",
url='http://getpelican.com/',
author='Alexis Metaireau',
author_email='authors@getpelican.com',
description="A tool to generate a static blog from reStructuredText or "
"Markdown input files.",
long_description=README + '\n' + CHANGELOG,
packages=['pelican', 'pelican.tools', 'pelican.plugins'],
include_package_data=True,
install_requires=requires,
entry_points=entry_points,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Console',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Software Development :: Libraries :: Python Modules',
],
test_suite='tests',
)
| agpl-3.0 | Python |
2e2ae8f42c46a09224fdd4d39ab317f23c96d465 | Create setup.py | Colin-b/pyconfigparser | setup.py | setup.py | import os
from setuptools import setup, find_packages
this_dir = os.path.abspath(os.path.dirname(__file__))
with open(os.path.join(this_dir, 'README.md'), 'r') as f:
long_description = f.read()
setup(name='boa',
version='0.1',
author='Bounouar Colin',
maintainer='Bounouar Colin',
url='https://github.com/Colin-b/pyconfigparser',
description='Helper to parse configuration files.',
long_description=long_description,
download_url='https://github.com/Colin-b/pyconfigparser',
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Operating System :: Microsoft :: Windows :: Windows 7"
],
keywords=[
'configuration'
],
packages=find_packages(),
install_requires=[
],
platforms=[
'Windows'
]
)
| mit | Python | |
67c508d132f1ec40768a7488bc1e08e62d2208fe | Add DatabaselessTestRunner class | ZeroCater/zc_common,ZeroCater/zc_common | zc_common/databaseless_test_runner.py | zc_common/databaseless_test_runner.py | from django.test.runner import DiscoverRunner
class DatabaselessTestRunner(DiscoverRunner):
"""A test suite runner that does not set up and tear down a database."""
def setup_databases(self):
"""Overrides DjangoTestSuiteRunner"""
pass
def teardown_databases(self, *args):
"""Overrides DjangoTestSuiteRunner"""
pass
| mit | Python | |
15112cbf19478ff966e2946977b25a2cb0042cb6 | Add D2 PvE stats model | jgayfer/spirit | cogs/models/pve_stats.py | cogs/models/pve_stats.py | class PvEStats:
"""This class represents the general PvE stats for a Destiny 2 character (or set of characters)
Args:
pve_stats: The 'response' portion of the JSON returned from the D2 'GetHistoricalStats' endpoint
with modes 7,4,16,17,18,46,47 (given as a dictionary).
"""
def __init__(self, pve_stats):
# General PvE stats
if len(pve_stats['allPvE']):
self.time_played = pve_stats['allPvE']['allTime']['totalActivityDurationSeconds']['basic']['displayValue']
self.best_weapon = pve_stats['allPvE']['allTime']['weaponBestType']['basic']['displayValue']
self.kills = pve_stats['allPvE']['allTime']['kills']['basic']['displayValue']
self.assists = pve_stats['allPvE']['allTime']['assists']['basic']['displayValue']
self.deaths = pve_stats['allPvE']['allTime']['deaths']['basic']['displayValue']
self.event_count = pve_stats['allPvE']['allTime']['publicEventsCompleted']['basic']['displayValue']
self.heroic_event_count = pve_stats['allPvE']['allTime']['heroicPublicEventsCompleted']['basic']['displayValue']
else:
self.time_played = '-'
self.best_weapon = '-'
self.kills = 0
self.assists = 0
self.deaths = 0
self.event_count = 0
self.heroic_event_count = 0
# Strike stats
if len(pve_stats['allStrikes']):
self.strike_count = pve_stats['allStrikes']['allTime']['activitiesCleared']['basic']['displayValue']
else:
self.strike_count = 0
# Raid stats
if len(pve_stats['raid']):
self.raid_count = pve_stats['raid']['allTime']['activitiesCleared']['basic']['displayValue']
self.raid_time = pve_stats['raid']['allTime']['totalActivityDurationSeconds']['basic']['displayValue']
else:
self.raid_count = 0
self.raid_time = '-'
# Nightfall stats
self.nightfall_count = self._sum_nightfalls(pve_stats)
self.fastest_nightfall = self._find_fastest_nightfall(pve_stats)
def _find_fastest_nightfall(self, pve_stats):
times = {}
if len(pve_stats['nightfall']):
times['nightfall'] = pve_stats['nightfall']['allTime']['fastestCompletionMs']['basic']['value']
if len(pve_stats['heroicNightfall']):
times['heroicNightfall'] = pve_stats['heroicNightfall']['allTime']['fastestCompletionMs']['basic']['value']
if len(pve_stats['scored_nightfall']):
times['scored_nightfall'] = pve_stats['scored_nightfall']['allTime']['fastestCompletionMs']['basic']['value']
if len(pve_stats['scored_heroicNightfall']):
times['scored_heroicNightfall'] = pve_stats['scored_heroicNightfall']['allTime']['fastestCompletionMs']['basic']['value']
return pve_stats[min(times, key=times.get)]['allTime']['fastestCompletionMs']['basic']['displayValue']
def _sum_nightfalls(self, pve_stats):
count = 0
if len(pve_stats['nightfall']):
count += pve_stats['nightfall']['allTime']['activitiesCleared']['basic']['value']
if len(pve_stats['heroicNightfall']):
count += pve_stats['heroicNightfall']['allTime']['activitiesCleared']['basic']['value']
if len(pve_stats['scored_nightfall']):
count += pve_stats['scored_nightfall']['allTime']['activitiesCleared']['basic']['value']
if len(pve_stats['scored_heroicNightfall']):
count += pve_stats['scored_heroicNightfall']['allTime']['activitiesCleared']['basic']['value']
return int(count)
| mit | Python | |
28fd3e9305872593a9d8167afa11ce3190b43903 | add uncollectible object sample code | baishancloud/pykit,baishancloud/pykit,sejust/pykit,sejust/pykit | profiling/snippet/uncollectible.py | profiling/snippet/uncollectible.py | from __future__ import print_function
import gc
'''
This snippet shows how to create a uncollectible object:
It is an object in a cycle reference chain, in which there is an object
with __del__ defined.
The simpliest is an object that refers to itself and with a __del__ defined.
'''
def dd(*mes):
for m in mes:
print(m, end='')
print()
class One(object):
def __init__(self, collectible):
if collectible:
self.typ = 'collectible'
else:
self.typ = 'uncollectible'
# Make a reference to it self, to form a reference cycle.
# A reference cycle with __del__, makes it uncollectible.
self.me = self
def __del__(self):
dd('*** __del__ called')
def test_it(collectible):
dd()
dd('======= ', ('collectible' if collectible else 'uncollectible'), ' object =======')
dd()
gc.collect()
dd('*** init, nr of referrers: ', len(gc.get_referrers(One)))
dd(' garbage: ', gc.garbage)
one = One(collectible)
dd(' created: ', one.typ, ': ', one)
dd(' nr of referrers: ', len(gc.get_referrers(One)))
dd(' delete:')
del one
gc.collect()
dd('*** after gc, nr of referrers: ', len(gc.get_referrers(One)))
dd(' garbage: ', gc.garbage)
if __name__ == "__main__":
test_it(collectible=True)
test_it(collectible=False)
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.