commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
804b7aa609ce1fde97f81f839776501352a59a89 | Create card-flipping-game.py | tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode | Python/card-flipping-game.py | Python/card-flipping-game.py | # Time: O(n)
# Space: O(n)
# On a table are N cards, with a positive integer printed on the front
# and back of each card (possibly different).
#
# We flip any number of cards, and after we choose one card.
#
# If the number X on the back of the chosen card is not on the front of
# any card, then this number X is good.
#
# What is the smallest number that is good? If no number is good, output 0.
#
# Here, fronts[i] and backs[i] represent the number on the front and back of
# card i.
#
# A flip swaps the front and back numbers, so the value on the front is
# now on the back and vice versa.
#
# Example:
#
# Input: fronts = [1,2,4,4,7], backs = [1,3,4,1,3]
# Output: 2
# Explanation: If we flip the second card, the fronts are [1,3,4,4,7] and
# the backs are [1,2,4,1,3].
# We choose the second card, which has number 2 on the back,
# and it isn't on the front of any card, so 2 is good.
#
# Note:
# - 1 <= fronts.length == backs.length <= 1000.
# - 1 <= fronts[i] <= 2000.
# - 1 <= backs[i] <= 2000.
import itertools
class Solution(object):
def flipgame(self, fronts, backs):
"""
:type fronts: List[int]
:type backs: List[int]
:rtype: int
"""
same = {n for i, n in enumerate(fronts) if n == backs[i]}
result = float("inf")
for n in itertools.chain(fronts, backs):
if n not in same:
result = min(result, n)
return result if result < float("inf") else 0
| mit | Python | |
a0bbb3b7befbac0ca13a9fc34df63a409cbe930c | Create plot_WRFtimeseries.py | Peter9192/MAQ_PhD,Peter9192/MAQ_PhD | Python/plot_WRFtimeseries.py | Python/plot_WRFtimeseries.py | #plot_WRFtimeseries.py
"""plot time series of WRF output;
NOTE: we assume variables to have dimension [time,y,x] or [time,z,y,x]
If this is not the case, adapt the dimensions where variable is read
Author: Ingrid Super
Last revisions: 2-6-2016"""
import netCDF4 as nc
import numpy as np
import matplotlib.pyplot as plt
from maptools import *
from numpy import ma
import datetime as dtm
from matplotlib.colors import LogNorm
##############################################################################################
"""specify the following:"""
"""directory of the WRF output and grid resolution [m] for each domain:"""
wrfout_path='/Storage/WRF/super004/WRF/run_paper2/output'
"""x- and y-location of the location you want to plot, the WRF domain, WRF vertical level and variable of interest"""
xloc=22
yloc=28
domain=3 #1 being outer domain
lev=0 #0 being surface level
var='U10'
##############################################################################################
"""read in variable of interest"""
vars=[]
timers=[]
wrfout_files=[os.path.join(wrfout_path,filename) for filename in os.listdir(wrfout_path) if filename.startswith('wrfout_d%02d'%domain)]
for each_file in wrfout_files:
mf=nc.Dataset(each_file)
dum=mf.variables[var][:]
wrftime=mf.variables['Times'][:]
for j in range(len(wrftime)):
year=int(''.join(wrftime[j][0:4]))
month=int(''.join(wrftime[j][5:7]))
day=int(''.join(wrftime[j][8:10]))
hour=int(''.join(wrftime[j][11:13]))
dat=dtm.datetime(year,month,day,hour,0)
timers.append(dat)
if len(dum.shape)==3:
dum2=dum[:,yloc,xloc]
elif len(dum.shape)==4:
dum2=dum[:,lev,yloc,xloc]
vars.extend(dum2)
"""make plot and lay-out"""
xvar=np.arange(1,len(timers)+1)
plot(xvar,vars,label='time series',color='k',linestyle='-',linewidth=2)
legend(loc='upper right')
xlabel('label') #please specify
ylabel('label') #please specify
title('title') #please specify
skp=len(timers)/4
ticks=[xvar[0],xvar[skp],xvar[2*skp],xvar[3*skp],xvar[4*skp]]
"""the following statement allows you to change the labels on the x-axis and can be adapted to your specific needs (e.g. including year)"""
labels=[timers[0].strftime("%d. %B"),timers[skp].strftime("%d. %B"),timers[2*skp].strftime("%d. %B"),timers[3*skp].strftime("%d. %B"),timers[4*skp].strftime("%d. %B")]
plt.xticks(ticks,labels)
plt.show()
| mit | Python | |
4db53b62f616edb61ce469ffb2f4cd642f925784 | Add patch migration for missing constraints (#6276) | pypa/warehouse,pypa/warehouse,pypa/warehouse,pypa/warehouse | warehouse/migrations/versions/48def930fcfd_webauthn_and_macaroon_constraints.py | warehouse/migrations/versions/48def930fcfd_webauthn_and_macaroon_constraints.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
WebAuthn and Macaroon constraints
Revision ID: 48def930fcfd
Revises: 5ea52744d154
Create Date: 2019-07-26 17:55:41.802528
"""
from alembic import op
revision = "48def930fcfd"
down_revision = "5ea52744d154"
def upgrade():
op.create_unique_constraint(
"_user_macaroons_description_uc", "macaroons", ["description", "user_id"]
)
op.drop_index("user_security_keys_label_key", table_name="user_security_keys")
op.create_unique_constraint(
"user_security_keys_label_key", "user_security_keys", ["label"]
)
def downgrade():
op.drop_constraint(
"user_security_keys_label_key", "user_security_keys", type_="unique"
)
op.create_index(
"user_security_keys_label_key", "user_security_keys", ["user_id"], unique=False
)
op.drop_constraint("_user_macaroons_description_uc", "macaroons", type_="unique")
| apache-2.0 | Python | |
2c6141656e0a0885dbcfecad5f227a950dafc3c3 | Create predict-the-winner.py | githubutilities/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode,githubutilities/LeetCode | Python/predict-the-winner.py | Python/predict-the-winner.py | # Time: O(n^2)
# Space: O(n)
# Given an array of scores that are non-negative integers.
# Player 1 picks one of the numbers from either end of the array
# followed by the player 2 and then player 1 and so on.
# Each time a player picks a number, that number will not be available for the next player.
# This continues until all the scores have been chosen. The player with the maximum score wins.
#
# Given an array of scores, predict whether player 1 is the winner.
# You can assume each player plays to maximize his score.
#
# Example 1:
# Input: [1, 5, 2]
# Output: False
# Explanation: Initially, player 1 can choose between 1 and 2.
# If he chooses 2 (or 1), then player 2 can choose from 1 (or 2) and 5.
# If player 2 chooses 5, then player 1 will be left with 1 (or 2).
# So, final score of player 1 is 1 + 2 = 3, and player 2 is 5.
# Hence, player 1 will never be the winner and you need to return False.
# Example 2:
# Input: [1, 5, 233, 7]
# Output: True
# Explanation: Player 1 first chooses 1. Then player 2 have to choose between 5 and 7.
# No matter which number player 2 choose, player 1 can choose 233.
# Finally, player 1 has more score (234) than player 2 (12), so you need to return True representing player1 can win.
# Note:
# 1 <= length of the array <= 20.
# Any scores in the given array are non-negative integers and will not exceed 10,000,000.
# If the scores of both players are equal, then player 1 is still the winner.
class Solution(object):
def PredictTheWinner(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
if len(nums) % 2 == 0 or len(nums) == 1:
return True
dp = [0] * len(nums);
for i in reversed(xrange(len(nums))):
dp[i] = nums[i]
for j in xrange(i+1, len(nums)):
dp[j] = max(nums[i] - dp[j], nums[j] - dp[j - 1])
return dp[-1] >= 0
| mit | Python | |
5de560e8458375a2e03200128ff046927cb3dbb5 | Add first solution to LCq003 | lemming52/white_pawn,lemming52/white_pawn | leetcode/q003/solution.py | leetcode/q003/solution.py |
MAX_SUBSTRING_LENGTH = 26
class Solution:
def lengthOfLongestSubstring(self, s: str) -> int:
"""
for a given string, at each starting position within the string,
check how far you can go without repeating a character
"""
max_length = 0
total_length = len(s)
for i in range(0, total_length):
if max_length > (total_length - i):
return max_length
length = findUniqueLength(s[i:])
if length > max_length:
max_length = length
if max_length == MAX_SUBSTRING_LENGTH:
return max_length
return max_length
def findUniqueLength(s: str) -> int:
chars = {}
for char in s:
if char in chars:
return len(chars)
chars[char] = True
return len(chars)
| mit | Python | |
a53d19fccc8656f5190385e028261e4bcff84531 | add rottest.py | tomba/kmsxx,tomba/kmsxx,tomba/kmsxx,tomba/kmsxx | py/tests/rottest.py | py/tests/rottest.py | #!/usr/bin/python3
import pykms
from enum import Enum
import termios, sys, os, tty
card = pykms.OmapCard()
res = pykms.ResourceManager(card)
conn = res.reserve_connector()
crtc = res.reserve_crtc(conn)
mode = conn.get_default_mode()
modeb = mode.to_blob(card)
rootplane = res.reserve_primary_plane(crtc, pykms.PixelFormat.XRGB8888)
plane = res.reserve_overlay_plane(crtc, pykms.PixelFormat.NV12)
card.disable_planes()
req = pykms.AtomicReq(card)
req.add(conn, "CRTC_ID", crtc.id)
req.add(crtc, {"ACTIVE": 1,
"MODE_ID": modeb.id})
# This enables the root plane
#rootfb = pykms.OmapFramebuffer(card, mode.hdisplay, mode.vdisplay, "XR24");
#pykms.draw_test_pattern(rootfb);
#
#req.add(rootplane, {"FB_ID": rootfb.id,
# "CRTC_ID": crtc.id,
# "SRC_X": 0 << 16,
# "SRC_Y": 0 << 16,
# "SRC_W": mode.hdisplay << 16,
# "SRC_H": mode.vdisplay << 16,
# "CRTC_X": 0,
# "CRTC_Y": 0,
# "CRTC_W": mode.hdisplay,
# "CRTC_H": mode.vdisplay,
# "zorder": 0})
req.commit_sync(allow_modeset = True)
class Rotation(int, Enum):
ROTATE_0 = 1 << 0
ROTATE_90 = 1 << 1
ROTATE_180 = 1 << 2
ROTATE_270 = 1 << 3
ROTATE_MASK = ROTATE_0 | ROTATE_90 | ROTATE_180 | ROTATE_270
REFLECT_X = 1 << 4
REFLECT_Y = 1 << 5
REFLECT_MASK = REFLECT_X | REFLECT_Y
def show_rot_plane(crtc, plane, fb, rot, x_scale, y_scale):
crtc_w = int(fb_w * x_scale)
crtc_h = int(fb_h * y_scale)
if (rot & Rotation.ROTATE_90) or (rot & Rotation.ROTATE_270):
tmp = crtc_w
crtc_w = crtc_h
crtc_h = tmp
crtc_x = int(mode.hdisplay / 2 - crtc_w / 2)
crtc_y = int(mode.vdisplay / 2 - crtc_h / 2)
req = pykms.AtomicReq(card)
src_x = 0
src_y = 0
src_w = fb_w - src_x
src_h = fb_h - src_y
print("SRC {},{}-{}x{} DST {},{}-{}x{}".format(
src_x, src_y, src_w, src_h,
crtc_x, crtc_y, crtc_w, crtc_h))
angle_str = Rotation(rot & Rotation.ROTATE_MASK).name
reflect_x_str = "REFLECT_X" if rot & Rotation.REFLECT_X else ""
reflect_y_str = "REFLECT_Y" if rot & Rotation.REFLECT_Y else ""
print("{} {} {}".format(angle_str, reflect_x_str, reflect_y_str))
sys.stdout.flush()
req.add(plane, {"FB_ID": fb.id,
"CRTC_ID": crtc.id,
"SRC_X": src_x << 16,
"SRC_Y": src_y << 16,
"SRC_W": src_w << 16,
"SRC_H": src_h << 16,
"CRTC_X": crtc_x,
"CRTC_Y": crtc_y,
"CRTC_W": crtc_w,
"CRTC_H": crtc_h,
"rotation": rot,
"zorder": 2})
req.commit_sync(allow_modeset = True)
fb_w = 480
fb_h = 150
x_scale = 1
y_scale = 1
fb = pykms.OmapFramebuffer(card, fb_w, fb_h, "NV12", tiled = True);
#fb = pykms.DumbFramebuffer(card, fb_w, fb_h, "NV12")
pykms.draw_test_pattern(fb);
def even(i):
return i & ~1
pykms.draw_text(fb, even((fb_w // 2) - (8 * 3) // 2), 4, "TOP", pykms.white)
pykms.draw_text(fb, even((fb_w // 2) - (8 * 6) // 2), fb_h - 8 - 4, "BOTTOM", pykms.white)
pykms.draw_text(fb, 4, even(((fb_h // 2) - 4)), "L", pykms.white)
pykms.draw_text(fb, fb_w - 8 - 4, even(((fb_h // 2) - 4)), "R", pykms.white)
rots = [ Rotation.ROTATE_0, Rotation.ROTATE_90, Rotation.ROTATE_180, Rotation.ROTATE_270 ]
cursors = [ "A", "D", "B", "C" ]
print("Use the cursor keys, x and y to change rotation. Press q to quit.")
fd = sys.stdin.fileno()
oldterm = termios.tcgetattr(fd)
tty.setcbreak(fd)
try:
esc_seq = 0
current_rot = Rotation.ROTATE_0
show_rot_plane(crtc, plane, fb, current_rot, x_scale, y_scale)
while True:
c = sys.stdin.read(1)
#print("Got character {}".format(repr(c)))
changed = False
handled = False
if esc_seq == 0:
if c == "\x1b":
esc_seq = 1
handled = True
elif esc_seq == 1:
if c == "[":
esc_seq = 2
handled = True
else:
esc_seq = 0
elif esc_seq == 2:
esc_seq = 0
if c in cursors:
handled = True
rot = rots[cursors.index(c)]
current_rot &= ~Rotation.ROTATE_MASK
current_rot |= rot
changed = True
if not handled:
if c == "q":
break
elif c == "x":
current_rot ^= Rotation.REFLECT_X
changed = True
elif c == "y":
current_rot ^= Rotation.REFLECT_Y
changed = True
if changed:
show_rot_plane(crtc, plane, fb, current_rot, x_scale, y_scale)
finally:
termios.tcsetattr(fd, termios.TCSAFLUSH, oldterm)
| mpl-2.0 | Python | |
c80f789da36ad0c23a6cbbf20fd97808d62c3619 | Create a simple Spinner. | hmleal/py-clui | py_clui/__init__.py | py_clui/__init__.py | #!/usr/bin/env python
# https://en.wikipedia.org/wiki/ANSI_escape_code#Colors
# http://aurelio.net/shell/canivete/
# https://stackoverflow.com/questions/27265322/how-to-print-to-console-in-color
import sys
import time
#def colored(text, color):
# return COLORS[color] + text + COLORS['white']
#def Gauge(value, max_value, width, danger_zone, suffix=None):
# if max_value == 0:
# return '[]'
#
# length = math.ceil(value / max_value * width)
#
# if length > width:
# length = width
#
# bar_color = 'green'
# if value > danger_zone:
# bar_color = 'red'
#
# return '[' + colored('|' * length, bar_color) + '-' * (width + 1 - length) + '] ' + colored(suffix, 'grey')
#COLORS = {
# 'white': '\033[0m', # White (normal)
# 'red': '\033[31m', # Red
# 'green': '\033[32m', # Green
# 'orange': '\033[33m', # Orange
# 'blue': '\033[34m', # Blue
# 'purple': '\033[35m', # Purple
# 'grey': '\033[30;1m', # Grey
#}
class Spinner:
def __init__(self, message, style=None):
self.message = message
self.style = ['|','/', '-', '\\']
self._number = 0
#self.style = ['◜', '◠', '◝', '◞', '◡', '◟']
self.style = ['⣾', '⣽', '⣻', '⢿', '⡿', '⣟', '⣯', '⣷']
def run(self):
self._draw()
def update_msg(self, message):
self.message = message
def _draw(self):
frames = [' \u001b[96m{0} '.format(el) for el in self.style]
msg = '\u001b[0G{0}\u001b[90m{1}\u001b[0m'
self._number += 1
print(msg.format(frames[self._number % len(self.style)], self.message), end='\r', file=sys.stdout, flush=True)
| mit | Python | |
1f043dd959fa1e1d243a3278abeb66838a2f9305 | Remove the IR target type in migration. | justineaster/interop,auvsi-suas/interop,justineaster/interop,justineaster/interop,auvsi-suas/interop,justineaster/interop,auvsi-suas/interop,auvsi-suas/interop,justineaster/interop | server/auvsi_suas/migrations/0013_remove_ir_as_target_type.py | server/auvsi_suas/migrations/0013_remove_ir_as_target_type.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [('auvsi_suas', '0012_missionclockevent'), ]
operations = [
migrations.AlterField(
model_name='target',
name='target_type',
field=models.IntegerField(choices=[(1, b'standard'), (2, b'qrc'), (
3, b'off_axis'), (4, b'emergent')]), ),
]
| apache-2.0 | Python | |
e869920c2334e4d0bcd95d2805c8126952c97b51 | Add tests for `cms.debug`. | danielsamuels/cms,jamesfoley/cms,jamesfoley/cms,jamesfoley/cms,lewiscollard/cms,jamesfoley/cms,dan-gamble/cms,danielsamuels/cms,dan-gamble/cms,danielsamuels/cms,lewiscollard/cms,dan-gamble/cms,lewiscollard/cms | cms/tests/test_debug.py | cms/tests/test_debug.py | from django.test import TestCase
from ..debug import print_exc, print_current_exc
import os
from StringIO import StringIO
import sys
class TestDebug(TestCase):
def test_print_exc(self):
def func():
return True
def func_raise():
raise Exception
self.assertEqual(print_exc(func), func)
self.orig_stderr = sys.stderr
sys.stderr = open(os.devnull, 'w')
with self.settings(DEBUG=True):
self.assertTrue(print_exc(func)())
with self.assertRaises(Exception):
print_exc(func_raise)()
sys.stderr = self.orig_stderr
def test_print_current_exc(self):
# Redirect STDOUT so we can capture the `print`.
orig_stderr = sys.stderr
stderr = StringIO()
sys.stderr = stderr
print_current_exc()
with self.settings(DEBUG=True):
print_current_exc()
self.assertEqual(stderr.getvalue().strip(), 'None')
sys.stderr = orig_stderr
| bsd-3-clause | Python | |
f584c26b05ce47f993f02801256370d6dde475ac | create the Spider for Switzerland of McDonalds | iandees/all-the-places,iandees/all-the-places,iandees/all-the-places | locations/spiders/mcdonalds_ch.py | locations/spiders/mcdonalds_ch.py | # -*- coding: utf-8 -*-
import scrapy
import re
import json
import datetime
from locations.items import GeojsonPointItem
class McDonalsCHSpider(scrapy.Spider):
name = "mcdonalds_ch"
allowed_domains = ["www.mcdonalds.ch"]
start_urls = (
'https://www.mcdonalds.ch/api/v1/restaurants/?lon=7.458354699999973&lat=46.786249&range=1000',
'https://www.mcdonalds.ch/api/v1/restaurants/?lon=9.022665800000027&lat=46.9342272&range=1000'
)
def normalize_time(self, time_str):
time = datetime.datetime.fromtimestamp(time_str).strftime('%H:%M %Z%z')
return time
def store_hours(self, data):
day_groups = []
this_day_group = {}
day_hours = data["dayofweekservice"]
for day_hour in day_hours:
if not day_hour['isOpen']:
continue
hours = ''
day, start, end = day_hour['dayOfWeek'], day_hour['startTime'], day_hour['endTime']
start = self.normalize_time(start)
end = self.normalize_time(end)
short_day = day[:2]
hours = '{}:{}-{}:{}'.format(start[:2], start[3:], end[:2], end[3:])
if not this_day_group:
this_day_group = {
'from_day': short_day,
'to_day': short_day,
'hours': hours,
}
elif hours == this_day_group['hours']:
this_day_group['to_day'] = short_day
elif hours != this_day_group['hours']:
day_groups.append(this_day_group)
this_day_group = {
'from_day': short_day,
'to_day': short_day,
'hours': hours,
}
day_groups.append(this_day_group)
if not day_groups:
return None
if not day_groups[0]:
return None
opening_hours = ''
if len(day_groups) == 1 and day_groups[0]['hours'] in ('00:00-23:59', '00:00-00:00'):
opening_hours = '24/7'
else:
for day_group in day_groups:
if day_group['from_day'] == day_group['to_day']:
opening_hours += '{from_day} {hours}; '.format(**day_group)
else:
opening_hours += '{from_day}-{to_day} {hours}; '.format(**day_group)
opening_hours = opening_hours [:-2]
return opening_hours
def parse(self, response):
results = json.loads(response.body_as_unicode())
for data in results:
properties = {
'city': data['address']['cityTown'],
'ref': data['id'],
'addr_full': data['address']['addressLine1'],
'phone': data['storeNumbers']['phonenumber'][0]['number'],
'state': data['address']['country'],
'postcode': data['address']['postalZip'],
'lat': data['address']['location']['lat'],
'lon': data['address']['location']['lon'],
'name': data['publicName']
}
opening_hours = self.store_hours(data['storeServices'])
if opening_hours:
properties['opening_hours'] = opening_hours
yield GeojsonPointItem(**properties)
| mit | Python | |
f301dd2366f53a6cf4b0949942b8520502f54351 | Fix import error when [jwt] not installed. | Tusky/box-python-sdk,sanketdjain/box-python-sdk,sanketdjain/box-python-sdk,Frencil/box-python-sdk,samkuehn/box-python-sdk,lkabongoVC/box-python-sdk,Frencil/box-python-sdk,box/box-python-sdk,lkabongoVC/box-python-sdk,samkuehn/box-python-sdk | boxsdk/__init__.py | boxsdk/__init__.py | # coding: utf-8
from __future__ import unicode_literals
try:
from .auth.jwt_auth import JWTAuth
except ImportError:
JWTAuth = None # If extras are not installed, JWTAuth won't be available.
from .auth.oauth2 import OAuth2
from .client import Client
from .object import * # pylint:disable=wildcard-import,redefined-builtin
| # coding: utf-8
from __future__ import unicode_literals
from .auth.jwt_auth import JWTAuth
from .auth.oauth2 import OAuth2
from .client import Client
from .object import * # pylint:disable=wildcard-import,redefined-builtin
| apache-2.0 | Python |
334961054d875641d150eec4d6938f6f824ea655 | Add initializer for top-level '_gcloud_vendor' package. | optimizely/gcloud-python,jgeewax/gcloud-python,lucemia/gcloud-python,tseaver/google-cloud-python,dhermes/google-cloud-python,elibixby/gcloud-python,tseaver/gcloud-python,daspecster/google-cloud-python,GrimDerp/gcloud-python,blowmage/gcloud-python,jonparrott/google-cloud-python,tseaver/google-cloud-python,GoogleCloudPlatform/gcloud-python,calpeyser/google-cloud-python,waprin/gcloud-python,VitalLabs/gcloud-python,daspecster/google-cloud-python,elibixby/gcloud-python,CyrusBiotechnology/gcloud-python,dhermes/gcloud-python,thesandlord/gcloud-python,blowmage/gcloud-python,waprin/google-cloud-python,lucemia/gcloud-python,tswast/google-cloud-python,VitalLabs/gcloud-python,thesandlord/gcloud-python,jonparrott/google-cloud-python,EugenePig/gcloud-python,tseaver/gcloud-python,tswast/google-cloud-python,googleapis/google-cloud-python,tartavull/google-cloud-python,jbuberel/gcloud-python,waprin/gcloud-python,dhermes/google-cloud-python,jbuberel/gcloud-python,Fkawala/gcloud-python,quom/google-cloud-python,jonparrott/gcloud-python,dhermes/google-cloud-python,dhermes/gcloud-python,EugenePig/gcloud-python,quom/google-cloud-python,waprin/google-cloud-python,GrimDerp/gcloud-python,Fkawala/gcloud-python,tseaver/google-cloud-python,vj-ug/gcloud-python,jonparrott/gcloud-python,jgeewax/gcloud-python,calpeyser/google-cloud-python,googleapis/google-cloud-python,CyrusBiotechnology/gcloud-python,optimizely/gcloud-python,tswast/google-cloud-python,optimizely/gcloud-python,GoogleCloudPlatform/gcloud-python,tartavull/google-cloud-python,vj-ug/gcloud-python | _gcloud_vendor/__init__.py | _gcloud_vendor/__init__.py | """Dependencies "vendored in", due to dependencies, Python versions, etc.
Current set
-----------
``apitools`` (pending release to PyPI, plus acceptable Python version
support for its dependencies). Review before M2.
"""
| apache-2.0 | Python | |
906898f6b8139c364994b50e0d6c60da6e4ec549 | Add train module, simplify the code in and modules | minhvvu/writing_ass | train.py | train.py | from nltk.corpus import PlaintextCorpusReader
import networkx as nx
from collections import Counter
from itertools import chain
from itertools import tee
import graph_utils as gutil
def pairwise(itr):
a, b = tee(itr) # two version of itr
next(b, None) # b goes ahead one step
return zip(a, b) # return iterator
def doc_to_sentences(root, ext):
return PlaintextCorpusReader(root, ext).sents()
def doc_to_pairs(root, ext):
return chain.from_iterable([pairwise(s)
for s in doc_to_sentences(root, ext)])
def pairs_to_graph(pairs):
G = nx.DiGraph()
G.add_weighted_edges_from([(n1, n2, count)
for ((n1, n2), count) in Counter(pairs).items()])
return G
def sum_path(G, p):
return sum([G[n1][n2]['weight'] for (n1, n2) in pairwise(p)])
def all_path(G, src, dest, cutoff=4):
return sorted([path
for path in nx.all_simple_paths(G, src, dest, cutoff)],
key=lambda path: -1 * sum_path(G, path))
def train_graph(model_name, data_root, file_ext=r'.*\.txt'):
pairs = doc_to_pairs(data_root, file_ext)
G = pairs_to_graph(pairs)
gutil.write_edges(G, model_name)
def example():
pairs = [('a', 'b'), ('b', 'c'), ('c', 'd'), ('a', 'b'), ('c', 'd')]
G = pairs_to_graph(pairs)
gutil.print_graph(G)
gutil.draw_graph(G, 'example.png')
gutil.write_edges(G, 'example.edge')
res = all_path(G, 'a', 'd')
print(res)
if __name__ == '__main__':
data_root = '/home/minhvu/Data/dummy/'
model_name = 'dummy_v1.edge'
train_graph(model_name, data_root)
| mit | Python | |
2e9e1f801653d4df60f5103d70ae91a7d307b200 | add the management bare minimum to auto-create site. | 1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow | oneflow/base/management/__init__.py | oneflow/base/management/__init__.py | # -*- coding: utf-8 -*-
from django.contrib.sites import models
from django.db.models import signals
from django.conf import settings
def create_site(app, created_models, verbosity, **kwargs):
""" Create the default site when when we install the sites framework. """
if not models.Site in created_models:
return
try:
models.Site.objects.get(pk=settings.SITE_ID)
except models.Site.DoesNotExist:
models.Site.objects.create(pk=settings.SITE_ID,
name=settings.SITE_NAME,
domain=settings.SITE_DOMAIN).save()
signals.post_syncdb.connect(create_site, sender=models)
| agpl-3.0 | Python | |
0f9f4f1ee325d72d09625850ba6a153ae5616ab0 | Update test to load plugin | ptthiem/nose2,little-dude/nose2,little-dude/nose2,leth/nose2,ojengwa/nose2,ezigman/nose2,ojengwa/nose2,ezigman/nose2,leth/nose2,ptthiem/nose2 | nose2/tests/functional/test_collect_plugin.py | nose2/tests/functional/test_collect_plugin.py | import re
from nose2.tests._common import FunctionalTestCase
class CollectOnlyFunctionalTest(FunctionalTestCase):
def test_collect_tests_in_package(self):
self.assertTestRunOutputMatches(
self.runIn('scenario/tests_in_package', '-v', '--collect-only',
'--plugin=nose2.plugins.collect'),
stderr=EXPECT_LAYOUT1)
# expectations
EXPECT_LAYOUT1 = re.compile("""\
Ran 25 tests in \d.\d+s
OK""")
| import re
from nose2.tests._common import FunctionalTestCase
class CollectOnlyFunctionalTest(FunctionalTestCase):
def test_collect_tests_in_package(self):
self.assertTestRunOutputMatches(
self.runIn('scenario/tests_in_package', '-v', '--collect-only'),
stderr=EXPECT_LAYOUT1)
# expectations
EXPECT_LAYOUT1 = re.compile("""\
Ran 25 tests in \d.\d+s
OK""")
| bsd-2-clause | Python |
8eddb8eafede9fa5aa9761ae53fa0d7081a4ba8f | Add setup.py | InsightSoftwareConsortium/ITKAnisotropicDiffusionLBR,InsightSoftwareConsortium/ITKAnisotropicDiffusionLBR,InsightSoftwareConsortium/ITKAnisotropicDiffusionLBR | setup.py | setup.py | from __future__ import print_function
from os import sys
try:
from skbuild import setup
except ImportError:
print('scikit-build is required to build from source.', file=sys.stderr)
print('Please run:', file=sys.stderr)
print('', file=sys.stderr)
print(' python -m pip install scikit-build')
sys.exit(1)
setup(
name='itk-anisotropicdiffusionlbr',
version='0.1.0',
author='Insight Software Consortium',
author_email='community@itk.org',
packages=['itk'],
package_dir={'itk': 'itk'},
download_url=r'https://github.com/InsightSoftwareConsortium/ITKAnisotropicDiffusionLBR',
description=r'Smooth images while preserving edges or coherent structures.',
long_description='ITK is an open-source, cross-platform library that '
'provides developers with an extensive suite of software '
'tools for image analysis. This package implements anisotropic diffusion '
'using Lattice Basis Reduction. For more information, see '
'Mirebeau J., Fehrenbach J., Risser L., Tobji S. '
'"Anisotropic Diffusion in ITK" '
'http://insight-journal.org/browse/publication/953 '
'http://hdl.handle.net/10380/3505 ',
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: C++",
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Intended Audience :: Education",
"Intended Audience :: Healthcare Industry",
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Medical Science Apps.",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Software Development :: Libraries",
"Operating System :: Android",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX",
"Operating System :: Unix",
"Operating System :: MacOS"
],
license='Apache',
keywords='ITK InsightToolkit',
url=r'https://itk.org/',
install_requires=[
r'itk'
]
)
| apache-2.0 | Python | |
a577c19e1296df413100f6858ded39ab71e757a8 | Add setup.py | gadomski/sbd | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name="sbd",
version="0.1",
description="Iridium Short Burst Data DirectIP handling",
author="Pete Gadomski",
author_email="pete.gadomski@gmail.com",
url="https://github.com/gadomski/sbd",
)
| mit | Python | |
b3c13747dcb72d751dbc2322d240499cb0fa2764 | Add shebang line to setup.py | nkgilley/home-assistant,ct-23/home-assistant,morphis/home-assistant,srcLurker/home-assistant,nnic/home-assistant,aequitas/home-assistant,Zyell/home-assistant,srcLurker/home-assistant,sffjunkie/home-assistant,home-assistant/home-assistant,turbokongen/home-assistant,justyns/home-assistant,deisi/home-assistant,jnewland/home-assistant,deisi/home-assistant,caiuspb/home-assistant,miniconfig/home-assistant,mKeRix/home-assistant,bdfoster/blumate,oandrew/home-assistant,MungoRae/home-assistant,sffjunkie/home-assistant,lukas-hetzenecker/home-assistant,ct-23/home-assistant,srcLurker/home-assistant,xifle/home-assistant,Zac-HD/home-assistant,srcLurker/home-assistant,Zyell/home-assistant,MungoRae/home-assistant,partofthething/home-assistant,leoc/home-assistant,instantchow/home-assistant,dmeulen/home-assistant,varunr047/homefile,dmeulen/home-assistant,mikaelboman/home-assistant,MungoRae/home-assistant,philipbl/home-assistant,MungoRae/home-assistant,open-homeautomation/home-assistant,bdfoster/blumate,mKeRix/home-assistant,robjohnson189/home-assistant,florianholzapfel/home-assistant,fbradyirl/home-assistant,robjohnson189/home-assistant,caiuspb/home-assistant,varunr047/homefile,ma314smith/home-assistant,robjohnson189/home-assistant,tboyce1/home-assistant,Duoxilian/home-assistant,rohitranjan1991/home-assistant,w1ll1am23/home-assistant,jabesq/home-assistant,nevercast/home-assistant,open-homeautomation/home-assistant,soldag/home-assistant,robbiet480/home-assistant,alexmogavero/home-assistant,philipbl/home-assistant,sfam/home-assistant,Theb-1/home-assistant,rohitranjan1991/home-assistant,lukas-hetzenecker/home-assistant,Cinntax/home-assistant,sffjunkie/home-assistant,jnewland/home-assistant,MartinHjelmare/home-assistant,xifle/home-assistant,emilhetty/home-assistant,mKeRix/home-assistant,Danielhiversen/home-assistant,nnic/home-assistant,PetePriority/home-assistant,aequitas/home-assistant,kyvinh/home-assistant,stefan-jonasson/home-assistant,mKeRix/home-assistant,ct-23/home-assistant,tboyce021/home-assistant,eagleamon/home-assistant,tboyce1/home-assistant,balloob/home-assistant,sfam/home-assistant,leppa/home-assistant,MartinHjelmare/home-assistant,aoakeson/home-assistant,eagleamon/home-assistant,nugget/home-assistant,jamespcole/home-assistant,HydrelioxGitHub/home-assistant,jaharkes/home-assistant,Teagan42/home-assistant,kennedyshead/home-assistant,qedi-r/home-assistant,Smart-Torvy/torvy-home-assistant,emilhetty/home-assistant,adrienbrault/home-assistant,MungoRae/home-assistant,mezz64/home-assistant,hmronline/home-assistant,florianholzapfel/home-assistant,fbradyirl/home-assistant,emilhetty/home-assistant,philipbl/home-assistant,Julian/home-assistant,jawilson/home-assistant,robjohnson189/home-assistant,tinloaf/home-assistant,devdelay/home-assistant,Zyell/home-assistant,hexxter/home-assistant,adrienbrault/home-assistant,miniconfig/home-assistant,Julian/home-assistant,Danielhiversen/home-assistant,keerts/home-assistant,aronsky/home-assistant,instantchow/home-assistant,dmeulen/home-assistant,emilhetty/home-assistant,hmronline/home-assistant,tchellomello/home-assistant,aronsky/home-assistant,hmronline/home-assistant,oandrew/home-assistant,happyleavesaoc/home-assistant,JshWright/home-assistant,DavidLP/home-assistant,HydrelioxGitHub/home-assistant,kyvinh/home-assistant,balloob/home-assistant,sander76/home-assistant,tboyce021/home-assistant,devdelay/home-assistant,morphis/home-assistant,nnic/home-assistant,dmeulen/home-assistant,leoc/home-assistant,molobrakos/home-assistant,jnewland/home-assistant,postlund/home-assistant,home-assistant/home-assistant,Smart-Torvy/torvy-home-assistant,mikaelboman/home-assistant,aequitas/home-assistant,sfam/home-assistant,xifle/home-assistant,LinuxChristian/home-assistant,Zac-HD/home-assistant,ct-23/home-assistant,betrisey/home-assistant,alexmogavero/home-assistant,Duoxilian/home-assistant,eagleamon/home-assistant,kyvinh/home-assistant,Theb-1/home-assistant,shaftoe/home-assistant,sffjunkie/home-assistant,Julian/home-assistant,persandstrom/home-assistant,auduny/home-assistant,Smart-Torvy/torvy-home-assistant,ewandor/home-assistant,toddeye/home-assistant,pschmitt/home-assistant,deisi/home-assistant,stefan-jonasson/home-assistant,DavidLP/home-assistant,sander76/home-assistant,shaftoe/home-assistant,Zac-HD/home-assistant,jabesq/home-assistant,molobrakos/home-assistant,Duoxilian/home-assistant,ct-23/home-assistant,hmronline/home-assistant,hexxter/home-assistant,coteyr/home-assistant,ma314smith/home-assistant,alexmogavero/home-assistant,luxus/home-assistant,ma314smith/home-assistant,florianholzapfel/home-assistant,pschmitt/home-assistant,oandrew/home-assistant,HydrelioxGitHub/home-assistant,stefan-jonasson/home-assistant,titilambert/home-assistant,varunr047/homefile,balloob/home-assistant,titilambert/home-assistant,happyleavesaoc/home-assistant,hexxter/home-assistant,jaharkes/home-assistant,JshWright/home-assistant,sdague/home-assistant,happyleavesaoc/home-assistant,happyleavesaoc/home-assistant,auduny/home-assistant,hmronline/home-assistant,jamespcole/home-assistant,nugget/home-assistant,JshWright/home-assistant,shaftoe/home-assistant,devdelay/home-assistant,toddeye/home-assistant,GenericStudent/home-assistant,Teagan42/home-assistant,aoakeson/home-assistant,betrisey/home-assistant,nevercast/home-assistant,LinuxChristian/home-assistant,stefan-jonasson/home-assistant,keerts/home-assistant,joopert/home-assistant,tboyce1/home-assistant,PetePriority/home-assistant,tchellomello/home-assistant,w1ll1am23/home-assistant,Duoxilian/home-assistant,partofthething/home-assistant,keerts/home-assistant,Smart-Torvy/torvy-home-assistant,keerts/home-assistant,coteyr/home-assistant,nevercast/home-assistant,jaharkes/home-assistant,open-homeautomation/home-assistant,Zac-HD/home-assistant,justyns/home-assistant,MartinHjelmare/home-assistant,turbokongen/home-assistant,leoc/home-assistant,aoakeson/home-assistant,mikaelboman/home-assistant,PetePriority/home-assistant,deisi/home-assistant,kennedyshead/home-assistant,LinuxChristian/home-assistant,morphis/home-assistant,morphis/home-assistant,jabesq/home-assistant,leppa/home-assistant,bdfoster/blumate,oandrew/home-assistant,bdfoster/blumate,tinloaf/home-assistant,persandstrom/home-assistant,philipbl/home-assistant,florianholzapfel/home-assistant,miniconfig/home-assistant,mikaelboman/home-assistant,mikaelboman/home-assistant,LinuxChristian/home-assistant,deisi/home-assistant,betrisey/home-assistant,devdelay/home-assistant,betrisey/home-assistant,bdfoster/blumate,persandstrom/home-assistant,mezz64/home-assistant,FreekingDean/home-assistant,tboyce1/home-assistant,soldag/home-assistant,Cinntax/home-assistant,caiuspb/home-assistant,Julian/home-assistant,shaftoe/home-assistant,robbiet480/home-assistant,varunr047/homefile,nugget/home-assistant,auduny/home-assistant,coteyr/home-assistant,justyns/home-assistant,eagleamon/home-assistant,LinuxChristian/home-assistant,jawilson/home-assistant,nkgilley/home-assistant,xifle/home-assistant,FreekingDean/home-assistant,rohitranjan1991/home-assistant,DavidLP/home-assistant,tinloaf/home-assistant,varunr047/homefile,molobrakos/home-assistant,GenericStudent/home-assistant,alexmogavero/home-assistant,qedi-r/home-assistant,postlund/home-assistant,leoc/home-assistant,luxus/home-assistant,ewandor/home-assistant,instantchow/home-assistant,emilhetty/home-assistant,Theb-1/home-assistant,ma314smith/home-assistant,luxus/home-assistant,JshWright/home-assistant,sffjunkie/home-assistant,sdague/home-assistant,open-homeautomation/home-assistant,miniconfig/home-assistant,jamespcole/home-assistant,hexxter/home-assistant,jaharkes/home-assistant,ewandor/home-assistant,joopert/home-assistant,fbradyirl/home-assistant,kyvinh/home-assistant | setup.py | setup.py | #!/usr/bin/env python3
import os
from setuptools import setup, find_packages
from homeassistant.const import __version__
PACKAGE_NAME = 'homeassistant'
HERE = os.path.abspath(os.path.dirname(__file__))
DOWNLOAD_URL = ('https://github.com/balloob/home-assistant/archive/'
'{}.zip'.format(__version__))
PACKAGES = find_packages(exclude=['tests', 'tests.*'])
REQUIRES = [
'requests>=2,<3',
'pyyaml>=3.11,<4',
'pytz>=2015.4',
'pip>=7.0.0',
'vincenty==0.1.3',
'jinja2>=2.8'
]
setup(
name=PACKAGE_NAME,
version=__version__,
license='MIT License',
url='https://home-assistant.io/',
download_url=DOWNLOAD_URL,
author='Paulus Schoutsen',
author_email='paulus@paulusschoutsen.nl',
description='Open-source home automation platform running on Python 3.',
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=REQUIRES,
keywords=['home', 'automation'],
entry_points={
'console_scripts': [
'hass = homeassistant.__main__:main'
]
},
classifiers=[
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Topic :: Home Automation'
]
)
| import os
from setuptools import setup, find_packages
from homeassistant.const import __version__
PACKAGE_NAME = 'homeassistant'
HERE = os.path.abspath(os.path.dirname(__file__))
DOWNLOAD_URL = ('https://github.com/balloob/home-assistant/archive/'
'{}.zip'.format(__version__))
PACKAGES = find_packages(exclude=['tests', 'tests.*'])
REQUIRES = [
'requests>=2,<3',
'pyyaml>=3.11,<4',
'pytz>=2015.4',
'pip>=7.0.0',
'vincenty==0.1.3',
'jinja2>=2.8'
]
setup(
name=PACKAGE_NAME,
version=__version__,
license='MIT License',
url='https://home-assistant.io/',
download_url=DOWNLOAD_URL,
author='Paulus Schoutsen',
author_email='paulus@paulusschoutsen.nl',
description='Open-source home automation platform running on Python 3.',
packages=PACKAGES,
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=REQUIRES,
keywords=['home', 'automation'],
entry_points={
'console_scripts': [
'hass = homeassistant.__main__:main'
]
},
classifiers=[
'Intended Audience :: End Users/Desktop',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.4',
'Topic :: Home Automation'
]
)
| apache-2.0 | Python |
40183d3ef5b49626e62e2bfd830ad54cfa138df9 | Add setup.py | bennuttall/energenie,RPi-Distro/python-energenie,rjw57/energenie | setup.py | setup.py | import os
from setuptools import setup
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = "energenie",
version = "0.1.0",
author = "Ben Nuttall",
author_email = "ben@raspberrypi.org",
description = "Python module to control the Energenie add-on board for the Raspberry Pi used for remotely turning power sockets on and off.",
license = "BSD",
keywords = [
"energenie",
"raspberry pi",
]
url = "https://github.com/bennuttall/energenie",
packages = [
"energenie",
],
long_description = read('README.md'),
classifiers = [
"Development Status :: 4 - Beta",
"Topic :: Home Automation",
"License :: OSI Approved :: BSD License",
],
)
| bsd-3-clause | Python | |
02a982614b26dae21c18308c6fc94998805484fb | Add ball collector component | 3299/2017 | components/collector.py | components/collector.py | """
Runs the wheels on the front of the robot to pick up balls.
"""
class BallCollector(object):
def __init__(self, motor):
self.motor = motor
def run(self, trigger):
if (trigger == True):
self.motor.set(1)
else:
self.motor.set(0)
| mit | Python | |
dc3ee951363116b235ec96bef34b06a661fc4795 | Add a test that fails if using an old version of chromedriver | mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase | examples/fail_if_old_driver_test.py | examples/fail_if_old_driver_test.py | from seleniumbase import BaseCase
class ChromedriverTests(BaseCase):
def test_fail_if_using_an_old_chromedriver(self):
if self.browser != "chrome":
print("\n This test is only for Chrome!")
print(" (Run with: '--browser=chrome')")
self.skip("This test is only for Chrome!")
chrome_version = self.get_chrome_version()
major_chrome_version = chrome_version.split('.')[0]
chromedriver_version = self.get_chromedriver_version()
major_chromedriver_version = chromedriver_version.split('.')[0]
install_sb = "sbase install chromedriver %s" % major_chrome_version
if (
int(major_chromedriver_version) < 73
and int(major_chrome_version) >= 73
):
message = (
'Your version of chromedriver: "%s"\n '
'is too old for your version of Chrome: "%s"\n'
'You should upgrade chromedriver '
"to receive important bug fixes!\n"
'Run this command to upgrade: "%s"'
% (chromedriver_version, chrome_version, install_sb)
)
raise Exception(message) # chromedriver is out-of-date
| mit | Python | |
2b1cc5fb7465ec0d14d01d2c5cdec9f3bd222016 | Update import_cluster.py | Tendrl/node-agent,r0h4n/node-agent,Tendrl/node_agent,Tendrl/node-agent,Tendrl/node_agent,r0h4n/node-agent,r0h4n/node-agent,Tendrl/node-agent | tendrl/node_agent/gluster_integration/flows/import_cluster.py | tendrl/node_agent/gluster_integration/flows/import_cluster.py | import json
import uuid
from tendrl.node_agent.flows.flow import Flow
class ImportCluster(Flow):
def run(self):
node_list = self.parameters['Nodes[]']
if len(node_list) > 1:
# This is the master node for this flow
for node in node_list:
if self.node_id != node:
new_params = self.parameters.copy()
new_params['Nodes[]'] = [node]
# create same flow for each node in node list except $this
job = {
"cluster_id": self.parameters['cluster_id'],
"node_id": node,
"run": self.name,
"status": "new",
"parameters": new_params,
"type": "node"
}
self.etcd_client.write("/queue/%s" % uuid.uuid4(),
json.dumps(job))
super(ImportCluster, self).run()
| import json
import uuid
from tendrl.node_agent.flows.flow import Flow
class ImportCluster(Flow):
def run(self):
node_list = self.parameters['Nodes[]']
if len(node_list) > 1:
# This is the master node for this flow
for node in node_list:
if self.node_id != node:
new_params = self.parameters.copy()
new_params['Nodes[]'] = [node]
# create same flow for each node in node list except $this
job = {
"cluster_id": self.parameters['cluster_id'],
"node_id": node,
"run": self.name,
"status": "new",
"parameters": new_params
}
self.etcd_client.write("/queue/%s" % uuid.uuid4(),
json.dumps(job))
super(ImportCluster, self).run()
| lgpl-2.1 | Python |
b426522a82d9ba9caa98731d91b985933466aaf9 | Create setup.py | TheLartians/Expresso,TheLartians/Expresso | setup.py | setup.py | from setuptools import setup, Extension, find_packages
from glob import glob
setup(
name='expresso',
version='0.2',
description='A symbolic expression manipulation library.',
author='Lars Melchior',
author_email='thelartians@gmail.com',
url='https://github.com/TheLartians/Expresso',
#include_package_data=True,
packages=find_packages(exclude=['tests*']),
extras_require={
'pycas':['numpy','mpmath']
},
zip_safe=False,
classifiers=[
'Programming Language :: Python :: 2.7'
],
ext_modules=[
Extension('_expresso',
sources = glob('source/expresso/*.cpp') + ['libs/sha256/sha256.cpp','source/python.cpp'],
include_dirs=['libs'], # assuming your project include files are there
libraries=['boost_python'], # those are the linked libs
library_dirs=['/'],
extra_compile_args=['-g','-std=c++11','-Wno-unknown-pragmas','-O3'] # some other compile args
),
]
)
| mit | Python | |
ff61d42f2cc60713437277c69521041c36ee425c | Create Startup.py | alrik11es/pyagent | plugins/events/Startup.py | plugins/events/Startup.py | print 1
| mit | Python | |
dfb5a1146b44979f98dc8aa236f52f0393eeabab | Add a basic setup.py | texastribune/the-dp,texastribune/the-dp,texastribune/the-dp,texastribune/the-dp | setup.py | setup.py | from distutils.core import setup
import os.path
def is_package(path):
return (
os.path.isdir(path) and
os.path.isfile(os.path.join(path, '__init__.py'))
)
def find_packages(path, base=""):
""" Find all packages in path """
packages = {}
for item in os.listdir(path):
dir = os.path.join(path, item)
if is_package(dir):
if base:
module_name = "%(base)s.%(item)s" % vars()
else:
module_name = item
packages[module_name] = dir
packages.update(find_packages(dir, module_name))
return packages
setup(
name='tx_highered',
version='0.1.0alpha',
description='Django app for Texas higher education data',
author='Texas Tribune',
author_email='tech@texastribune.org',
url='http://github.com/texastribune/tx_highered/',
license='Apache Software License',
install_requires=[
],
packages=find_packages('./tx_highered', 'tx_highered').keys(),
package_data={
'tx_highered': [
'tx_highered/fixtures/*.json',
]
},
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Other/NonlistedTopic'
],
)
| apache-2.0 | Python | |
38d7a3671afa0e2c76682c8c409959288b547f82 | Prepare for tagging v4.0b3 | yasserglez/pymdptoolbox,sawcordwell/pymdptoolbox,silgon/pymdptoolbox,McCabeJM/pymdptoolbox,silgon/pymdptoolbox,McCabeJM/pymdptoolbox,sawcordwell/pymdptoolbox,yasserglez/pymdptoolbox | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="pymdptoolbox",
version="4.0-b3",
author="Steven A. W. Cordwell",
author_email="steven.cordwell@uqconnect.edu.au",
url="https://github.com/sawcordwell/pymdptoolbox",
description="Markov Decision Process (MDP) Toolbox",
long_description="The MDP toolbox provides classes and functions for "
"the resolution of descrete-time Markov Decision Processes. The list of "
"algorithms that have been implemented includes backwards induction, "
"linear programming, policy iteration, q-learning and value iteration "
"along with several variations.",
download_url="https://pypi.python.org/pypi/pymdptoolbox",
classifiers=[
"Development Status :: 4 - Beta",
"Environment :: Console",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Software Development :: Libraries :: Python Modules"],
platforms=["Any"],
license="New BSD",
packages=find_packages("src", exclude=["tests"]),
package_dir={"": "src"},
install_requires=["numpy", "scipy"],
extras_require={"LP": "cvxopt"})
| # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
setup(name="pymdptoolbox",
version="0",
author="Steven A. W. Cordwell",
author_email="steven.cordwell@uqconnect.edu.au",
url="https://github.com/sawcordwell/pymdptoolbox",
description="Markov Decision Process (MDP) Toolbox",
long_description="The MDP toolbox provides classes and functions for "
"the resolution of descrete-time Markov Decision Processes. The list of "
"algorithms that have been implemented includes backwards induction, "
"linear programming, policy iteration, q-learning and value iteration "
"along with several variations.",
download_url="https://pypi.python.org/pypi/pymdptoolbox",
classifiers=[
"Development Status :: 2 - Pre-Alpha",
"Environment :: Console",
"Intended Audience :: Education",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Mathematics",
"Topic :: Software Development :: Libraries :: Python Modules"],
platforms=["Any"],
license="New BSD",
packages=find_packages("src", exclude=["tests"]),
package_dir={"": "src"},
install_requires=["numpy", "scipy"],
extras_require={"LP": "cvxopt"})
| bsd-3-clause | Python |
12267f2a6ac574924ec991a8719d1d1cbf42f85c | Add installation script | rolurq/flask-gulp | setup.py | setup.py | from setuptools import setup
setup(name='Flask-Static', license='MIT', author='Rolando Urquiza',
author_email='rolurquiza@gmail.com',
description='Task executioner similar to gulp for Python',
packages=['flask_static'], platforms='any',
install_requires=['werkzeug'],
classifiers=['Environment :: Web Environment',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python'])
| mit | Python | |
470617e0fad9381cf5fadf64d1a9ba18dbf5e101 | Fix copy ec scenario files failed. | swiftstack/ssbench,charz/ssbench,charz/ssbench,swiftstack/ssbench | setup.py | setup.py | #!/usr/bin/python
# Copyright (c) 2013 SwiftStack, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from glob import glob
from setuptools import setup, find_packages
thispath = os.path.dirname(__file__)
sys.path.insert(0, thispath)
import ssbench
with open(os.path.join(thispath, 'requirements.txt'), 'r') as f:
requires = [x.strip() for x in f if x.strip()]
with open(os.path.join(thispath, 'test-requirements.txt'), 'r') as f:
test_requires = [x.strip() for x in f if x.strip()]
with open(os.path.join(thispath, 'README.rst'), 'r') as f:
readme = f.read()
setup(
name='ssbench',
version=ssbench.version,
description='SwiftStack Swift Benchmarking Suite',
long_description=readme,
license='Apache License (2.0)',
author='SwiftStack, Inc.',
author_email='darrell@swiftstack.com',
url='http://github.com/SwiftStack/ssbench',
packages=find_packages(exclude=['ssbench.tests']),
test_suite='nose.collector',
tests_require=test_requires,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Testing :: Traffic Generation',
'Topic :: System :: Benchmark',
'Topic :: Utilities',
],
keywords='openstack swift object storage benchmark',
install_requires=requires,
scripts=[
'bin/ssbench-master',
'bin/ssbench-worker',
],
data_files=[('share/ssbench/scenarios', glob('scenarios/*.scenario')),
('share/ssbench/scenarios/ec_test_scenarios', glob('scenarios/ec_test_scenarios/*.scenario')),
('share/ssbench', ['CHANGELOG', 'AUTHORS', 'LICENSE'])],
)
| #!/usr/bin/python
# Copyright (c) 2013 SwiftStack, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from glob import glob
from setuptools import setup, find_packages
thispath = os.path.dirname(__file__)
sys.path.insert(0, thispath)
import ssbench
with open(os.path.join(thispath, 'requirements.txt'), 'r') as f:
requires = [x.strip() for x in f if x.strip()]
with open(os.path.join(thispath, 'test-requirements.txt'), 'r') as f:
test_requires = [x.strip() for x in f if x.strip()]
with open(os.path.join(thispath, 'README.rst'), 'r') as f:
readme = f.read()
setup(
name='ssbench',
version=ssbench.version,
description='SwiftStack Swift Benchmarking Suite',
long_description=readme,
license='Apache License (2.0)',
author='SwiftStack, Inc.',
author_email='darrell@swiftstack.com',
url='http://github.com/SwiftStack/ssbench',
packages=find_packages(exclude=['ssbench.tests']),
test_suite='nose.collector',
tests_require=test_requires,
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Intended Audience :: System Administrators',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Testing :: Traffic Generation',
'Topic :: System :: Benchmark',
'Topic :: Utilities',
],
keywords='openstack swift object storage benchmark',
install_requires=requires,
scripts=[
'bin/ssbench-master',
'bin/ssbench-worker',
],
data_files=[('share/ssbench/scenarios', glob('scenarios/*')),
('share/ssbench', ['CHANGELOG', 'AUTHORS', 'LICENSE'])],
)
| apache-2.0 | Python |
30f704c3e767462fefb5086bbf6b5f190cec7c1b | Add fibonacci search in python | ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms | search/fibonacci_search/python/fibonacci_search.py | search/fibonacci_search/python/fibonacci_search.py | #Fibonacci search for sorted algorithm
def fibSearch(arr,x):
#fibonacci numbers initialization
fib2 = 0
fib1 = 1
fib = fib2 + fib1
n = len(arr)
#find the smallest fibonacci greater than or equal to array length
while (fib < n):
fib2 = fib1
fib1 = fib
fib = fib2 + fib1
#the leftout list for the array
offset = -1
while (fib > 1):
#check if fib2 is a valid index
i = min(offset+fib2, n-1)
#if x is greater than the value at index fib2,
#cut the array from offset to i
if (arr[i] < x):
fib = fib1
fib1 = fib2
fib2 = fib - fib1
offset = i
#if x is smaller than the value at index fib2,
#cut the array after i+1
elif (arr[i] > x):
fib = fib2
fib1 = fib1 - fib2
fib2 = fib - fib1
#return i when element is found
else :
return i
#compare last element with x
if (fib1 and arr[offset+1] == x):
return offset+1
#when element not found return -1
return -1
arr = [10, 22, 35, 40, 45, 50, 80, 82, 85, 90, 100]
x = 85
print ("Found at index: ", fibSearch(arr, x))
| cc0-1.0 | Python | |
215ab7e04495f829caf3583237fbe6c41ec8b40e | add logout API | cboling/xos,cboling/xos,zdw/xos,opencord/xos,zdw/xos,opencord/xos,cboling/xos,open-cloud/xos,cboling/xos,open-cloud/xos,zdw/xos,opencord/xos,zdw/xos,open-cloud/xos,cboling/xos | xos/core/xoslib/methods/loginview.py | xos/core/xoslib/methods/loginview.py | from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import serializers
from rest_framework import generics
from rest_framework.views import APIView
from core.models import *
from services.hpc.models import *
from services.requestrouter.models import *
from django.forms import widgets
from django.core.exceptions import PermissionDenied
from django.contrib.contenttypes.models import ContentType
import json
import socket
import time
import django.middleware.csrf
from xos.exceptions import *
from django.contrib.sessions.backends.db import SessionStore
from django.contrib.sessions.models import Session
class LoginView(APIView):
method_kind = "list"
method_name = "login"
def do_login(self, request, username, password):
if not username:
raise XOSMissingField("No username specified")
if not password:
raise XOSMissingField("No password specified")
u = User.objects.filter(email=username)
if not u:
raise XOSNotFound("User %s does not exist" % username)
u=u[0]
if not u.check_password(password):
raise PermissionDenied("Incorrect password")
auth = {"username": username, "password": password}
request.session["auth"] = auth
request.session.save()
return Response({"xoscsrftoken": django.middleware.csrf.get_token(request),
"xossessionid": request.session.session_key})
def get(self, request, format=None):
username = request.GET.get("username", None)
password = request.GET.get("password", None)
return self.do_login(request, username, password)
def post(self, request, format=None):
username = request.DATA.get("username", None)
password = request.DATA.get("password", None)
return self.do_login(request, username, password)
class LogoutView(APIView):
method_kind = "list"
method_name = "logout"
def do_logout(self, request, sessionid):
if not sessionid:
raise XOSMissingField("No xossessionid specified")
# Make sure the session exists. This prevents us from accidentally
# creating empty sessions with SessionStore()
session = Session.objects.filter(session_key=sessionid)
if not session:
# session doesn't exist
raise PermissionDenied("Session does not exist")
session = SessionStore(session_key=sessionid)
if "auth" in session:
del session["auth"]
session.save()
return Response("Logged Out")
def get(self, request, format=None):
sessionid = request.GET.get("xossessionid", None)
return self.do_logout(request, sessionid)
def post(self, request, format=None):
sessionid = request.DATA.get("xossessionid", None)
return self.do_logout(request, sessionid)
| from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import serializers
from rest_framework import generics
from rest_framework.views import APIView
from core.models import *
from services.hpc.models import *
from services.requestrouter.models import *
from django.forms import widgets
from django.core.exceptions import PermissionDenied
from django.contrib.contenttypes.models import ContentType
import json
import socket
import time
import django.middleware.csrf
from xos.exceptions import *
class LoginView(APIView):
method_kind = "list"
method_name = "login"
def do_login(self, request, username, password):
if not username:
raise XOSMissingField("No username specified")
if not password:
raise XOSMissingField("No password specified")
u = User.objects.filter(email=username)
if not u:
raise XOSNotFound("User %s does not exist" % username)
u=u[0]
if not u.check_password(password):
raise PermissionDenied("Incorrect password")
auth = {"username": username, "password": password}
request.session["auth"] = auth
request.session.save()
return Response({"xoscsrftoken": django.middleware.csrf.get_token(request),
"xossessionid": request.session.session_key})
def get(self, request, format=None):
username = request.GET.get("username", None)
password = request.GET.get("password", None)
return self.do_login(request, username, password)
def post(self, request, format=None):
username = request.DATA.get("username", None)
password = request.DATA.get("password", None)
return self.do_login(request, username, password)
| apache-2.0 | Python |
f872501586dfe05c5b96402a3d8ea5194a9ff6b3 | add the pseudocode for the reservation random generator | cloudmesh/reservation,cloudmesh/reservation,cloudmesh/reservation | reservation/generate.py | reservation/generate.py | """
Usage:
generate SERVERS RESERVATIONS DURATION
Arguments:
SERVERS Number of servers for which we generate reservations
RESERVATIONS Number of reservations per server
DURATION The maximum duration of a reservation (determined randomly)
"""
def generate(arguments):
print arguments
servers = arguments["SERVERS"]
reservations = arguments["RESERVATIONS"]
duration = arguments["DURATION"]
for s in range(0,servers):
t_start[s] = []
t_start[s][0] = random (0,duration)
t_end[s][0] = t_stat[s][0] + random (0,duration)
for s in range(0,servers):
for n in range(1,reservations):
t_start[s][n] = t_end_[r,n-1] + random (0,duration)
t_end[[s[n] = t_start[r][n] + random (0,duration)
for s in range(0,servers):
for n in range(0,reservations):
print s, n, t_start[s][n], t_end[s][n]
if __name__ == '__main__':
print(sys.argv)
arguments = docopt(__doc__)
generate(arguments)
| apache-2.0 | Python | |
68a7b8b87caca9e80cc827854fa362e8a2911a2a | use Qt4 instead of Tkinter. This will use less code and provide a better appearance | edsoncudjoe/CatDVText2XlsxGui | setup.py | setup.py | """
This is a setup.py script generated by py2applet
Usage:
python setup.py py2app
"""
from setuptools import setup
APP = ['app.py']
DATA_FILES = []
OPTIONS = {'argv_emulation': True, 'includes': ['sip', 'PyQt4',
'PyQt4.QtCore', 'PyQt4.QtGui']}
setup(
name='CatDV to XLSX',
version='2.0',
description='Convert CatDV .txt output to .xlsx',
date='3-Dec-2015',
url='https://github.com/edsoncudjoe/CatDVText2XlsxGui',
author='Edson Cudjoe',
author_email='bashpythonstuff@hotmail.co.uk',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Software Development :: Media',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
keywords='catdv text xlsx',
app=APP,
data_files=DATA_FILES,
options={'py2app': OPTIONS},
setup_requires=['py2app'],
)
| mit | Python | |
0e02dc4390c8376940100bef2bb23abb4e9c1642 | Create merge_fasta.py | vrmarcelino/Shape-4-Qiime,vrmarcelino/Shape-4-Qiime,vrmarcelino/Shape-4-Qiime | merge_fasta.py | merge_fasta.py | # -*- coding: utf-8 -*-
""" Concatenate different fasta files and add barcodes.
Run this script after separate fasta and qual files (see onvert_fastaqual_fastq.py from qiime)
Created on Thu Jul 31 15:49:39 2014
@author: VanessaRM
"""
from Bio import SeqIO
import sys
#input files:
input_files = []
for n in sys.argv[1:]:
input_files.append(str(n))
#barcodes sequences:
S501N701= "TAGATCGCTAAGGCGA" #PHV882-M1b
S502N701= "CTCTCTATTAAGGCGA" #VRM130d - M1b
S503N701= "TATCCTCTTAAGGCGA" #VRM135 - M1b
S504N701= "AGAGTAGATAAGGCGA" #PHV570-M3a
S505N701= "GTAAGGAGTAAGGCGA" #PHV237-M3a
S506N701= "ACTGCATATAAGGCGA" #PHV882-M3a
S507N701= "AAGGAGTATAAGGCGA" #PHV207-M3a
S508N701= "CTAAGCCTTAAGGCGA" #VRM135-M3a
S501N702= "TAGATCGCCGTACTAG" #VRM190-M3a
S502N702= "CTCTCTATCGTACTAG" #PHV237-M1a
S503N702= "TATCCTCTCGTACTAG" #PHV237-kit
S504N702= "AGAGTAGACGTACTAG" #VRM036e1_M1a
S505N702= "GTAAGGAGCGTACTAG" #VRM036e1_kit
S508N702= "CTAAGCCTCGTACTAG" #VRM028e1-kit
S501N703= "TAGATCGCAGGCAGAA" #VRM091e-kit1
S504N703= "AGAGTAGAAGGCAGAA" #VRM032_e1
S506N703= "ACTGCATAAGGCAGAA" #VRM032_c
S507N703= "AAGGAGTAAGGCAGAA" #VRM0060
S508N703= "CTAAGCCTAGGCAGAA" #VRM066
S501N704= "TAGATCGCTCCTGAGC" #VRM067
S502N704= "CTCTCTATTCCTGAGC" #VRM0081
S503N704= "TATCCTCTTCCTGAGC" #VRM0086
S504N704= "AGAGTAGATCCTGAGC" #VRM0087
S505N704= "GTAAGGAGTCCTGAGC" #VRM90_e1
S506N704= "ACTGCATATCCTGAGC" #VRM096
S508N704= "CTAAGCCTTCCTGAGC" #VRM098
S501N705= "TAGATCGCGGACTCCT" #VRM099
S502N705= "CTCTCTATGGACTCCT" #VRM100
S505N705= "GTAAGGAGGGACTCCT" #VRM106
S506N705= "ACTGCATAGGACTCCT" #VRM115
S507N705= "AAGGAGTAGGACTCCT" #VRM120
S508N705= "CTAAGCCTGGACTCCT" #VRM121
S501N706= "TAGATCGCTAGGCATG" #VRM122
S502N706= "CTCTCTATTAGGCATG" #VRM123
S503N706= "TATCCTCTTAGGCATG" #VRM124
S504N706= "AGAGTAGATAGGCATG" #C4
S505N706= "GTAAGGAGTAGGCATG" #C9
S506N706= "ACTGCATATAGGCATG" #C10_1
S507N706= "AAGGAGTATAGGCATG" #blank (PCR)
S501N709= "TAGATCGCGCTACGCT" #VRM091e-kit1
S502N709= "CTCTCTATGCTACGCT" #VRM0086
S503N709= "TATCCTCTGCTACGCT" #VRM0087
S504N709= "AGAGTAGAGCTACGCT" #VRM90_e1
S505N709= "GTAAGGAGGCTACGCT" #VRM096
S506N709= "ACTGCATAGCTACGCT" #VRM121
S507N709= "AAGGAGTAGCTACGCT" #PHV237-M3a
S508N709= "CTAAGCCTGCTACGCT" #blank (PCR)
barcodes = (S501N701,S502N701,S503N701,S504N701,S505N701,S506N701,S507N701,S508N701,S501N702,S502N702,
S503N702,S504N702, S505N702,S508N702,S501N703,S504N703,S506N703,S507N703,S508N703,S501N704,
S502N704,S503N704,S504N704,S505N704,S506N704,S508N704,S501N705,S502N705,S505N705,S506N705,
S507N705, S508N705,S501N706,S502N706,S503N706,S504N706,S505N706,S506N706,S507N706,S501N709,S502N709,
S503N709,S504N709,S505N709,S506N709,S507N709,S508N709)
#Store the files
all_records = []
#function for adding barcode sequences
def add_barcode(records, barcode):
for seq_record in records:
seq_record.seq = (barcode + seq_record.seq)
all_records.append (seq_record)
#iterate over input files
counter = 0
for file in input_files:
original_reads = SeqIO.parse(file, "fasta")
barcode_seq = barcodes[counter]
print""
print "Adding the barcode %s to the %s file" %(barcode_seq, file)
do_it = add_barcode(original_reads, barcode_seq)
counter +=1
SeqIO.write(all_records, "all_records.fna", "fasta")
print""
print "Done!"
| mit | Python | |
f1907672f276ee7cf19dd9f14824e56361c8128d | Add tests | mpkato/dmr | tests/lda_test.py | tests/lda_test.py | # -*- coding:utf-8 -*-
import unittest
import nose
import dmr
class LDATestCase(unittest.TestCase):
def test___init__(self):
'''
__init__ test
'''
corpus = dmr.Corpus.read("./doc.txt")
voca = dmr.Vocabulary()
docs = voca.read_corpus(corpus)
if __name__ == '__main__':
nose.main(argv=['nose', '-v'])
| mit | Python | |
4ee28dbcdf631e3f328dcda5f901b0d4bf20ef3b | Create setup.py | mjvakili/supermean | setup.py | setup.py | from distutils.core import setup
from Cython.Distutils import Extension
from Cython.Distutils import build_ext
import os
import numpy
ext_modules = []
ext_modules.append(Extension("terminator",
["terminator.pyx"],
libraries=["m"],
extra_compile_args=['-fopenmp'],
extra_link_args=['-fopenmp'],
)
)
setup(include_dirs=[numpy.get_include()],
cmdclass={'build_ext': build_ext},
ext_modules=ext_modules,
)
| mit | Python | |
10a2fb7d52ce9affcc6157eb7731191b940a7480 | Write some tests | Perlence/porcupy | tests/test_for.py | tests/test_for.py | import pytest
from pyegs.compiler import compile as compile_
@pytest.mark.skip('Not implemented yet')
def test_for():
assert (compile_('items = [11, 22, 33]\nfor item in items: x = item') ==
'p1z 11 p2z 22 p3z 33 p4z 1 '
'p5z p4z+0 p5z p^5z p6z p5z '
'p5z p4z+1 p5z p^5z p6z p5z '
'p5z p4z+2 p5z p^5z p6z p5z')
assert (compile_('for x in range(5): y = x') ==
'p1z 0 p2z p1z '
'p1z 1 p2z p1z '
'p1z 2 p2z p1z '
'p1z 3 p2z p1z '
'p1z 4 p2z p1z')
assert (compile_('items = [11, 22, 33]\n'
'for i, item in enumerate(items):\n'
' x = i'
' y = item') ==
'p1z 11 p2z 22 p3z 33 p4z 1 '
'p5z 0 p6z p4z+p5z p7z p5z p8z p6z '
'p5z 1 p6z p4z+p5z p7z p5z p8z p6z '
'p5z 2 p6z p4z+p5z p7z p5z p8z p6z')
| bsd-3-clause | Python | |
3b38f7638d1d2e3903cc388cfb3103b1e9c11bc2 | Add setup.py | tristanbrown/whaler | setup.py | setup.py | from setuptools import setup, find_packages
setup(name='whaler',
version='0.0.1',
author = "Tristan R. Brown",
author_email = "brown.tristan.r@gmail.com",
description = ("Analytical package for computational chemistry software, "
"ORCA."),
url = 'https://github.com/tristanbrown/whaler',
license = "MIT",
packages = find_packages(),
install_requires = [''],
entry_points = {
'console_scripts': [
'whaler = my_project.__main__:main'
]
},
) | mit | Python | |
54eca489024d3d8f354a44d161797edb8e916600 | Add tests - now very simple and primitive | diNard/Saw | tests/test_saw.py | tests/test_saw.py | import unittest
from saw.saw import Saw
class Test_Saw(unittest.TestCase):
def setUp(self):
text = "Starting right this second, it's way easier to merge Pull Requests! \
We usually merge them from the comfortable glow of our computers, but with the\
new mobile site we're comfortable merging smaller Pull Requests while sitting\
on the hyperloop (or while on the bus, I guess)."
self.obj = Saw().load(text)
def test_saw(self):
self.assertEqual(self.obj.paragraphs[0].sentences[0].blocks, self.obj.blocks)
if __name__ == "__main__":
unittest.main() | mit | Python | |
1bd21c7b35a100e0f72f03bd9e0d783dc136c41e | Check for multiple outcome codes occurring today | ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend | cla_backend/apps/cla_butler/management/commands/monitor_multiple_outcome_codes.py | cla_backend/apps/cla_butler/management/commands/monitor_multiple_outcome_codes.py | # coding=utf-8
import logging
from django.core.management.base import BaseCommand
from django.db.models import Count, Max, Min
from django.utils.timezone import now
from cla_butler.stack import is_first_instance, InstanceNotInAsgException, StackException
from cla_eventlog.models import Log
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'LGA-294 specific monitoring command. Alert when multiple outcome codes ' \
'that should only occur once are found for today (since 00:00)'
def handle(self, *args, **options):
if self.should_run_housekeeping(**options):
self.check_for_multiple_outcome_codes()
else:
logger.debug('LGA-294 Skip check_for_multiple_outcome_codes: running on secondary instance')
@staticmethod
def check_for_multiple_outcome_codes():
# Outcome codes defined to appear only once on a case:
# https://docs.google.com/spreadsheets/d/1hN64bA_H2a_0eC_5-k-0IY2-RKbCor2VGflp1ykQGa0/
start_of_today = now().replace(hour=0, minute=0, second=0, microsecond=0)
once_only_codes = ['PCB', 'COPE', 'DUPL', 'MRNB', 'NCOE', 'DESP', 'DECL', 'MRCC', 'NRES', 'CPTA',
'COSPF', 'SPFM', 'SPFN', 'DREFER', 'COI', 'CLSP', 'MANALC', 'MANREF', 'MIS',
'MIS-MEANS', 'MIS-OOS', 'REF-EXT', 'REF-INT', 'REFSP', 'REOPEN', 'SPOR', 'WROF']
once_only_events_today = Log.objects.filter(created__gte=start_of_today, code__in=once_only_codes)
once_only_codes_today = once_only_events_today.only('case__reference', 'code', 'created')
once_only_codes_today_counts = once_only_codes_today.values('case__reference', 'code') \
.annotate(total=Count('code'), earliest=Min('created'), latest=Max('created'))
multiple_codes_today = once_only_codes_today_counts.filter(total__gt=1).order_by('-total')
if multiple_codes_today.exists():
for i in multiple_codes_today:
logger.warning('LGA-294 investigation. Multiple outcome codes today for case: {}'.format(i))
else:
logger.info('LGA-294 No multiple outcome codes found for today')
@staticmethod
def should_run_housekeeping(**options):
if options.get('force', False):
return True
try:
return is_first_instance()
except InstanceNotInAsgException:
logger.info('EC2 instance not in an ASG')
return True
except StackException:
logger.info('Not running on EC2 instance')
return True
| mit | Python | |
81418934093eb1eb20862e7fb9f97d6bfbc3cf10 | add setup.py | hephs/dispatk | setup.py | setup.py | #!/usr/bin/python
"""Multiple dispatcher on arguments values."""
from setuptools import setup
long_description = """
# dispatk
## Description
This function is inspired by singledispatch of Python 3.4+ (PEP 443),
but the dispatch happens on the key extracted fro the arguments values.
```
from dispatk import dispatk
@dispatk(lambda n: int(n))
def fib(n):
return fib(n-1) + fib(n-2)
@fib.register(0)
def _(n):
return 0
@fib.register(1, 2)
def _(n):
return 1
@fib.register(41)
def _(n):
return 165580141
```
*register* accepts one or more keys, so
```
@fib.register(1, 2)
def _(n):
return 1
```
is equivalent to
```
@fib.register(1)
@fib.register(2)
def _(n):
return 1
```
"""
setup(
name='dispatk',
version='0.1',
author='hephaestus',
description=__doc__,
long_description=long_description,
url='https://github.com/hephs/dispatk',
keywords='multiple dispatch generic functions genericfunctions decorator',
platforms=('any',),
license='MIT',
py_modules=('dispatk',),
zip_safe=True,
install_requires=(),
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Libraries',
'Topic :: Software Development :: Libraries :: Python Modules',
),
)
| mit | Python | |
0350dcce8a95df0c04748529b82c1a486d2d4148 | Add setup script, to automatically produce distributable packages | tbentropy/tilecutter,tbentropy/tilecutter,tbentropy/tilecutter,tbentropy/tilecutter | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
import sys, os, os.path
version = "0.5.1"
### this manifest enables the standard Windows XP-looking theme
##manifest = """
##<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
##<assembly xmlns="urn:schemas-microsoft-com:asm.v1"
##manifestVersion="1.0">
##<assemblyIdentity
## version="0.64.1.0"
## processorArchitecture="x86"
## name="Controls"
## type="win32"
##/>
##<description>Picalo</description>
##<dependency>
## <dependentAssembly>
## <assemblyIdentity
## type="win32"
## name="Microsoft.Windows.Common-Controls"
## version="6.0.0.0"
## processorArchitecture="X86"
## publicKeyToken="6595b64144ccf1df"
## language="*"
## />
## </dependentAssembly>
##</dependency>
##</assembly>
##"""
##
# returns a list of all the files in a directory tree
def walk_dir(dirname):
files = []
ret = [ (dirname, files) ]
for name in os.listdir(dirname):
fullname = os.path.join(dirname, name)
if os.path.isdir(fullname) and os.path.split(fullname)[1] != ".svn":
ret.extend(walk_dir(fullname))
else:
if os.path.split(fullname)[1] != ".svn":
files.append(fullname)
return ret
# Generic options
options = {
'name': 'TileCutter',
'version': version,
'description': 'Simutrans Building Editor',
'long_description': '',
'author': 'Timothy Baldock',
'author_email': 'tb@entropy.me.uk',
'url': 'http://entropy.me.uk/tilecutter',
"zipfile": "python\\library.zip",
## "packages": ["tcui",],
## "scripts": ["TileCutter5.pyw",],
## "package_data": {},
"data_files": ["../dist/msvcp71.dll", "tc.config", "test.png"] + walk_dir("languages")
}
# windows specific
if len(sys.argv) >= 2 and sys.argv[1] == "py2exe":
try:
import py2exe
except ImportError:
print 'Could not import py2exe. Windows exe could not be built.'
sys.exit(0)
# windows-specific options
options['windows'] = [
{
'script':'TileCutter5.pyw',
'windows':'TileCutter5.pyw',
'icon_resources': [
( 1, 'tilecutter.ico' ),
],
## 'other_resources': [
## ( 24, 1, manifest ),
## ],
},
]
options["options"] = {
# Bundling of .dlls into the zip results in massively bigger package?!
# Option 1 creates corrupt zip, option 2 adds dlls and makes them uncompressible
"py2exe": {"dist_dir": "../dist/win_dist_%s" % version, "bundle_files": 3,},
}
print str(options)
# mac specific
##if len(sys.argv) >= 2 and sys.argv[1] == 'py2app':
## try:
## import py2app
## except ImportError:
## print 'Could not import py2app. Mac bundle could not be built.'
## sys.exit(0)
## # mac-specific options
## options['app'] = ['rur_start.py']
## options['options'] = {
## 'py2app': {
## 'argv_emulation': True,
## 'iconfile': 'rur_images/icon_mac.icns',
## 'packages': [],
## }
## }
# run the setup
setup(**options) | bsd-3-clause | Python | |
af76038a7cbaa076b3fe0b57145a2fc72bc50b7b | Add basic setup script | alco/numspell,alco/numspell | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
setup(name='Numspell',
version='0.9',
description='A Python module for spelling numbers',
author='Alexei Sholik',
author_email='alcosholik@gmail.com',
url='https://github.com/alco/numspell',
license="MIT",
packages=['numspell'],
data_files=[('/usr/bin', ['spellnum'])],
)
| mit | Python | |
b05ede554c403bb3d03dbe65b02baa2a879f3310 | Add pretty-printers for qi::Buffer | aldebaran/libqi,aldebaran/libqi,bsautron/libqi,vbarbaresi/libqi,aldebaran/libqi | tools/printers.py | tools/printers.py | """Pretty printers for libqi.
Add python execfile("/path/to/this/file") in your ~/.gdbinit or /etc/gdb/gdbinit
Your gdb need to be compile with python and version > 7.0
"""
import gdb
import gdb.types
class QiBufferPrinter:
def __init__(self, val):
self.val = val
def to_string(self):
buf = self.val['_p']['px'].dereference()
return "qi::Buffer of length %i, capacity %i, sub-buffers %s" % (buf["used"], buf["available"], buf['_subBuffers'])
def lookup_type(val):
type = str(gdb.types.get_basic_type(val.type))
if type == 'qi::Buffer':
return QiBufferPrinter(val)
return None
if __name__ == "__main__":
gdb.pretty_printers.append(lookup_type)
| bsd-3-clause | Python | |
9df8506f86c1f6e95b6c9d0bda2dd8001dc0459b | Disable maxAudioWithBaselineShift test (works locally but not on travis) | linuxipho/mycroft-core,forslund/mycroft-core,forslund/mycroft-core,Dark5ide/mycroft-core,linuxipho/mycroft-core,aatchison/mycroft-core,MycroftAI/mycroft-core,Dark5ide/mycroft-core,MycroftAI/mycroft-core,aatchison/mycroft-core | test/client/dynamic_energy_test.py | test/client/dynamic_energy_test.py | import unittest
import audioop
from speech_recognition import AudioSource
from mycroft.client.speech.mic import ResponsiveRecognizer
__author__ = 'seanfitz'
class MockStream(object):
def __init__(self):
self.chunks = []
def inject(self, chunk):
self.chunks.append(chunk)
def read(self, chunk_size):
result = self.chunks[0]
if len(self.chunks) > 1:
self.chunks = self.chunks[1:]
return result
class MockSource(AudioSource):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def __init__(self, stream=None):
self.stream = stream if stream else MockStream()
self.CHUNK = 1024
self.SAMPLE_RATE = 16000
self.SAMPLE_WIDTH = 2
class DynamicEnergytest(unittest.TestCase):
def setUp(self):
pass
@unittest.skip('Disabled while unittests are brought upto date')
def testMaxAudioWithBaselineShift(self):
low_base = b"".join(["\x10\x00\x01\x00"] * 100)
higher_base = b"".join(["\x01\x00\x00\x01"] * 100)
source = MockSource()
for i in xrange(100):
source.stream.inject(low_base)
source.stream.inject(higher_base)
recognizer = ResponsiveRecognizer(None)
sec_per_buffer = float(source.CHUNK) / (source.SAMPLE_RATE *
source.SAMPLE_WIDTH)
test_seconds = 30.0
while test_seconds > 0:
test_seconds -= sec_per_buffer
data = source.stream.read(source.CHUNK)
energy = recognizer.calc_energy(data, source.SAMPLE_WIDTH)
recognizer.adjust_threshold(energy, sec_per_buffer)
higher_base_energy = audioop.rms(higher_base, source.SAMPLE_WIDTH)
# after recalibration (because of max audio length) new threshold
# should be >= 1.5 * higher_base_energy
delta_below_threshold = (
recognizer.energy_threshold - higher_base_energy)
min_delta = higher_base_energy * .5
assert abs(delta_below_threshold - min_delta) < 1
| import unittest
import audioop
from speech_recognition import AudioSource
from mycroft.client.speech.mic import ResponsiveRecognizer
__author__ = 'seanfitz'
class MockStream(object):
def __init__(self):
self.chunks = []
def inject(self, chunk):
self.chunks.append(chunk)
def read(self, chunk_size):
result = self.chunks[0]
if len(self.chunks) > 1:
self.chunks = self.chunks[1:]
return result
class MockSource(AudioSource):
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
pass
def __init__(self, stream=None):
self.stream = stream if stream else MockStream()
self.CHUNK = 1024
self.SAMPLE_RATE = 16000
self.SAMPLE_WIDTH = 2
class DynamicEnergytest(unittest.TestCase):
def setUp(self):
pass
def testMaxAudioWithBaselineShift(self):
low_base = b"".join(["\x10\x00\x01\x00"] * 100)
higher_base = b"".join(["\x01\x00\x00\x01"] * 100)
source = MockSource()
for i in xrange(100):
source.stream.inject(low_base)
source.stream.inject(higher_base)
recognizer = ResponsiveRecognizer(None)
sec_per_buffer = float(source.CHUNK) / (source.SAMPLE_RATE *
source.SAMPLE_WIDTH)
test_seconds = 30.0
while test_seconds > 0:
test_seconds -= sec_per_buffer
data = source.stream.read(source.CHUNK)
energy = recognizer.calc_energy(data, source.SAMPLE_WIDTH)
recognizer.adjust_threshold(energy, sec_per_buffer)
higher_base_energy = audioop.rms(higher_base, source.SAMPLE_WIDTH)
# after recalibration (because of max audio length) new threshold
# should be >= 1.5 * higher_base_energy
delta_below_threshold = (
recognizer.energy_threshold - higher_base_energy)
min_delta = higher_base_energy * .5
assert abs(delta_below_threshold - min_delta) < 1
| apache-2.0 | Python |
8cdf49b093e6fe95570fe4f8c9c488bc8755d83f | Create dust_gp2y1010.py | jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi | apps/dust/dust_gp2y1010.py | apps/dust/dust_gp2y1010.py | import time
import sys
#import adc_mcp3008
sys.path.append("../adc")
sys.path.append("../../../../thingsweb/weblib/recv")
sys.path.append("../../../log_lib")
from adc_mcp3008 import *
from lastvalue import *
from raspi_log import *
import requests, json
import fcntl, socket, struct
import RPi.GPIO as GPIO
# Please check pin number of MCP3008
# 19 - CLK, 23 - MISO, 24 - MOSI, 25 - CS
# if you want to use, modify ../adc/adc_mcp3008
#def readadc(adcnum, clockpin, mosipin, misopin, cspin):
# return adcout
um = 0
sensorname = "dust.ws"
url = "http://125.7.128.53:4242/api/put"
# HW setup, GPIO
# GIO PIN5
GPIO.setup(5, GPIO.OUT)
GPIO.output(5, False) # off
adc_port = 0
def getHwAddr(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
info = fcntl.ioctl(s.fileno(), 0x8927, struct.pack('256s', ifname[:15]))
return ':'.join(['%02x' %ord(char) for char in info[18:24]])
macAddr = getHwAddr('eth0')
macAddr = macAddr.replace(':','.')
print "starting ...."
loggerinit(sensorname)
while True :
# setup GPIO 5
# set high GPIO 5
GPIO.output(5, True) # on
time.sleep(0.000175) # 0.000280
r = read(adc_port)
time.sleep(0.000000) # 0.000040
# set low GPIO 5
GPIO.output(5, False) # off
time.sleep(0.009550) # 0.00968
if r > 187 :
um = (500.0/2.9)*(3.3/1024)*r-103.44
else :
um = 0.1
if r > 10 :
print "r :",r
def sendit():
data = {
"metric": "rc1.dust.um",
"timestamp": time.time(),
"value": um,
"tags": {
"eth0": macAddr,
"stalk": "VOLOSSH" ,
"sensor" : "dust.gp2y",
"name" : sensorname,
"floor_room": "10fl_min_room",
"building": "woosung",
"owner": "kang",
"country": "kor" }
#tags should be less than 9, 8 is alright, 9 returns http error
}
try :
ret = requests.post(url, data=json.dumps(data))
print "http return : %s" %ret
logadd(r)
logadd(um)
except requests.exceptions.Timeout :
logerror("time out")
except requests.exceptions.ConnectionError :
logerror("connection error")
if r > 0:
print "read : ", um
if __name__ == "__main__" :
print "main
| bsd-2-clause | Python | |
84ae11ed02eabcc9c00167c710d9eb4e8462e7b0 | Create router.py | Hojalab/sarafu,pesaply/sarafu,Hojalab/sarafu,pesaply/sarafu | router.py | router.py | import jnettool.tool.elements.NetworkElements
import jnettool.tool.Routing
import jnettool.tool.RouteInspector
ne = jnettool.tools.elements.NetworkElements('ip')
try
routing_tableb = ne.getRoutingTable()
except jnettool.tools.elements.MissingVar:
logging.exception('No routing_tableb found')
ne.cleanup('rollback')
else
num_routes =routing_table.getSize()
for RToffset in range (num_routes):
route = routing_table.getRouteByIndex(RToffset)
name = route.getName()
ipaddr = route.get.IPAddr()
print "$15s -> %s" % (name ipaddr)
finally
ne.cleanup ('commit')
ne.dissconnect()
##########################################################
from nettools import NetworkElements
with NetworkElements ('IP') as ne:
for route in ne.routing_table:
print "$15s -> %s" % (route.name route.ipaddr)
class NetworkElements (Exception):
pass
class NetworkElements (object)"
def __init__(self , ipaddr):
self.oldne == jnettool.tools.NetworkElements ()
class
| mit | Python | |
5662e4277c77dc3be308934a1e389cf24259037a | Create remap.py | AndrejIT/map_unexplore | remap.py | remap.py | #!/usr/bin/env python
#Licence LGPL v2.1
#Creates copy of map db, leaving only specified(filtered) blocks.
#Can also be used for map backup, may-be even online backup.
import sqlite3
import mt_block_parser
import re
def getIntegerAsBlock(i):
x = unsignedToSigned(i % 4096, 2048)
i = int((i - x) / 4096)
y = unsignedToSigned(i % 4096, 2048)
i = int((i - y) / 4096)
z = unsignedToSigned(i % 4096, 2048)
return x,y,z
def unsignedToSigned(i, max_positive):
if i < max_positive:
return i
else:
return i - 2*max_positive
source = r'<Put your path to world folder here>/map.sqlite'
target = r'<Put your path to world folder here>/map.sqlite.backup'
#use compiled regular expression to filter blocks by block content. it is faster that checking "in array".
useful_block_evidence = re.compile(
"default:cobble|"+
"protector:protect|default:chest_locked|doors:door_steel|"+
"default:chest|default:torch|default:stonebrick|default:glass|default:obsidian_glass|"+
"default:ladder|default:rail|default:fence_wood|"+
"bones:bones"
)
sourceconn = sqlite3.connect(source)
targetconn = sqlite3.connect(target)
sourcecursor = sourceconn.cursor()
targetcursor = targetconn.cursor()
targetcursor.execute("CREATE TABLE IF NOT EXISTS `blocks` (`pos` INT NOT NULL PRIMARY KEY, `data` BLOB);")
for row in sourcecursor.execute("SELECT `pos`, `data` "+" FROM `blocks`;"):
pos=getIntegerAsBlock(row[0])
if pos[0]**2 + pos[2]**2 < (160/16)**2 and pos[1]>(-60/16): #160 nodes radius and 60 nodes deep
targetcursor.execute("INSERT OR IGNORE INTO `blocks` VALUES (?, ?);", (row[0], row[1]))
else:
try:
temp = mt_block_parser.MtBlockParser(row[1])
if useful_block_evidence.search(temp.nameIdMappingsRead)!=None:
targetcursor.execute("INSERT OR IGNORE INTO `blocks` VALUES (?, ?);", (row[0], row[1]))
except:
print "Block parse error:", pos[0], pos[1], pos[2]
targetconn.commit()
sourceconn.close()
targetconn.close()
| lgpl-2.1 | Python | |
a049ecd3c3d46f42d145d6d02a9f4f100ab3f4d9 | Create rules_cc.bzl for TSL | tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-experimental_link_static_libraries_once,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow | tensorflow/tsl/platform/rules_cc.bzl | tensorflow/tsl/platform/rules_cc.bzl | """Provides an indirection layer to bazel cc_rules"""
load(
"//tensorflow/tsl/platform/default:rules_cc.bzl",
_cc_binary = "cc_binary",
_cc_import = "cc_import",
_cc_library = "cc_library",
_cc_shared_library = "cc_shared_library",
_cc_test = "cc_test",
)
cc_binary = _cc_binary
cc_import = _cc_import
cc_library = _cc_library
cc_shared_library = _cc_shared_library
cc_test = _cc_test
| apache-2.0 | Python | |
6d5f1afdfe963a927e510a9a2e044fbd4796184f | add tests for cliff, arete, ridge, valley | mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource | test/601-cliff-arete-ridge-valley.py | test/601-cliff-arete-ridge-valley.py | #cliff in Yosemite
assert_has_feature(
13, 1374, 3166, "earth",
{"kind": "cliff", "id": 291684864,
"sort_key": 227})
#arete in Yosemite
assert_has_feature(
13, 1379, 3164, "earth",
{"kind": "arete", "id": 375271242,
"sort_key": 228})
#ridge with name in Santa Cruz Mountains, California
assert_has_feature(
13, 1317, 3182, "earth",
{"kind": "ridge", "id": 115675159,
"name": "Castle Rock Ridge", "label_placement": 'yes'})
#valley with name in Yosemite
assert_has_feature(
13, 1381, 3164, "earth",
{"kind": "valley", "id": 407467016,
"name": "Lyell Canyon", "label_placement": 'yes'}) | mit | Python | |
918723bb1cbaea66358bd0701728bfb89fa6f00c | add servo_SG90.py | wkentaro/keyopener,wkentaro/keyopener | code/servo_SG90.py | code/servo_SG90.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# servo_SG90.py
# author: Kentaro Wada <www.kentaro.wada@gmail.com>
import time
from RPi import GPIO
def move_pos(pin, deg, speed):
"""
(int) deg: 0 - 60 [deg]
(float) speed: -1 - 1
"""
t_start = time.time()
duration = 0.1 * deg / 60
while time.time() - t_start < duration:
high_duration = 0.0015 + speed * 0.0005
GPIO.output(pin, GPIO.HIGH)
time.sleep(high_duration)
GPIO.output(pin, GPIO.LOW)
time.sleep(0.02 - high_duration)
if __name__ == '__main__':
PIN_CTRL = 21
GPIO.setmode(GPIO.BCM)
GPIO.setup(PIN_CTRL, GPIO.OUT)
move_pos(PIN_CTRL, deg=40, speed=1)
GPIO.cleanup() | mit | Python | |
d31ee1207292e1b6c9f874b500837f91e477728c | Create sender.py | AkshayJoshi/Trial | sender.py | sender.py | """IMAGE_ID="" #Paste image ID here
FLAVOR_ID="" #Paste Flavor_ID here
SSH_KEY="" #Paste key name here - Assuming key already exists"""
from wsgiref.simple_server import make_server
import json
def createvm(ip,instance_name,image_id,flavor_id,ssh_key=None):
"""Creates an instance remotely"""
if ssh_key==None:
print "SSH key not found"
print "Create a new key and try again"
else:
global cmd
cmd= "nova boot "+instance_name+" --image \""+image_id+"\" --flavor "+flavor_id+" --key-name "+ssh_key
send(ip)
""" Now, send cmd to the ip specified in the parameters through a server.
Receive at the other end and execute as a command"""
def deletevm(ip,instance_name):
global cmd
cmd = "nova delete "+instance_name
send(ip)
def application(environ, start_response):
global cmd
#result = get_data
response_body = json.dumps(cmd)
status = '200 OK'
response_headers = [('Content-Type', 'application/json'),
('Content-Length', str(len(response_body)))]
start_response(status, response_headers)
print response_body
return [response_body]
def send(ip_addr):
httpd = make_server(ip_addr,8051,application)
httpd.handle_request()
| unlicense | Python | |
ff8c866675d8eece3c527a245ec17520f357203e | test the `fastfood` command | samstav/fastfood,martinb3/fastfood,martinb3/fastfood,samstav/fastfood,martinb3/fastfood,samstav/fastfood,rackerlabs/fastfood,rackerlabs/fastfood,samstav/fastfood,martinb3/fastfood,rackerlabs/fastfood | tests/functional/test_entry_point.py | tests/functional/test_entry_point.py | """Functional tests for command line use."""
import subprocess
import unittest
class TestFastfoodCLI(unittest.TestCase):
def test_fastfood_command_is_there(self):
cmd = ['fastfood', '--help']
try:
subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, OSError) as err:
msg = 'Error while running `%s`' % subprocess.list2cmdline(cmd)
self.fail(msg='%s --> %r' % (msg, err))
def test_help_output(self):
cmd = ['fastfood', '--help']
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, OSError) as err:
msg = 'Error while running `%s`' % subprocess.list2cmdline(cmd)
self.fail(msg='%s --> %r' % (msg, err))
self.assertIn('usage', output.lower())
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python | |
ff151c8ea04268d2060cf8d281294a0d500ecbba | Test that data volumes now survive a crash when recreating | marcusmartins/compose,alexandrev/compose,alunduil/fig,sanscontext/docker.github.io,dopry/compose,aanand/fig,tangkun75/compose,danix800/docker.github.io,cclauss/compose,tiry/compose,joaofnfernandes/docker.github.io,Yelp/docker-compose,BSWANG/denverdino.github.io,twitherspoon/compose,alunduil/fig,rillig/docker.github.io,mosquito/docker-compose,sanscontext/docker.github.io,dnephin/compose,twitherspoon/compose,ZJaffee/compose,zhangspook/compose,cgvarela/compose,docker-zh/docker.github.io,mindaugasrukas/compose,shubheksha/docker.github.io,charleswhchan/compose,mark-adams/compose,mnowster/compose,ph-One/compose,rillig/docker.github.io,Chouser/compose,ChrisChinchilla/compose,johnstep/docker.github.io,MSakamaki/compose,browning/compose,aanand/fig,alexisbellido/docker.github.io,j-fuentes/compose,artemkaint/compose,jorgeLuizChaves/compose,lukemarsden/compose,phiroict/docker,rgbkrk/compose,menglingwei/denverdino.github.io,Katlean/fig,anweiss/docker.github.io,feelobot/compose,vlajos/compose,denverdino/denverdino.github.io,menglingwei/denverdino.github.io,joaofnfernandes/docker.github.io,mohitsoni/compose,runcom/compose,londoncalling/docker.github.io,alexisbellido/docker.github.io,gdevillele/docker.github.io,bfirsh/fig,LuisBosquez/docker.github.io,screwgoth/compose,anweiss/docker.github.io,ggtools/compose,denverdino/denverdino.github.io,swoopla/compose,Yelp/docker-compose,josephpage/compose,philwrenn/compose,michael-k/docker-compose,docker/docker.github.io,KalleDK/compose,genki/compose,vdemeester/compose,rillig/docker.github.io,gdevillele/docker.github.io,mbailey/compose,JimGalasyn/docker.github.io,mnuessler/compose,joeuo/docker.github.io,lmesz/compose,RobertNorthard/compose,danix800/docker.github.io,docker/docker.github.io,thaJeztah/docker.github.io,shin-/docker.github.io,denverdino/docker.github.io,bdwill/docker.github.io,danix800/docker.github.io,denverdino/docker.github.io,LuisBosquez/docker.github.io,d2bit/compose,xydinesh/compose,hoogenm/compose,amitsaha/compose,sdurrheimer/compose,DoubleMalt/compose,iamluc/compose,ekristen/compose,shubheksha/docker.github.io,dnephin/compose,mark-adams/compose,shin-/docker.github.io,josephpage/compose,joeuo/docker.github.io,alexandrev/compose,jonaseck2/compose,d2bit/compose,kikkomep/compose,jonaseck2/compose,shin-/compose,benhamill/compose,jzwlqx/denverdino.github.io,londoncalling/docker.github.io,joeuo/docker.github.io,denverdino/denverdino.github.io,albers/compose,talolard/compose,menglingwei/denverdino.github.io,sdurrheimer/compose,schmunk42/compose,funkyfuture/docker-compose,johnstep/docker.github.io,troy0820/docker.github.io,ChrisChinchilla/compose,sanscontext/docker.github.io,mnuessler/compose,DoubleMalt/compose,simonista/compose,BSWANG/denverdino.github.io,johnstep/docker.github.io,prologic/compose,bdwill/docker.github.io,mdaue/compose,dbdd4us/compose,joeuo/docker.github.io,benhamill/compose,feelobot/compose,jzwlqx/denverdino.github.io,bobphill/compose,johnstep/docker.github.io,kojiromike/compose,noironetworks/compose,Chouser/compose,andrewgee/compose,JimGalasyn/docker.github.io,phiroict/docker,xydinesh/compose,jrabbit/compose,TomasTomecek/compose,saada/compose,VinceBarresi/compose,JimGalasyn/docker.github.io,mohitsoni/compose,gdevillele/docker.github.io,zhangspook/compose,vlajos/compose,lmesz/compose,iamluc/compose,viranch/compose,nhumrich/compose,mrfuxi/compose,rillig/docker.github.io,runcom/compose,artemkaint/compose,mrfuxi/compose,dockerhn/compose,gdevillele/docker.github.io,brunocascio/compose,ionrock/compose,docker-zh/docker.github.io,RobertNorthard/compose,bfirsh/fig,Katlean/fig,joaofnfernandes/docker.github.io,jgrowl/compose,tiry/compose,rstacruz/compose,thaJeztah/compose,shubheksha/docker.github.io,londoncalling/docker.github.io,dbdd4us/compose,sanscontext/docker.github.io,mchasal/compose,mbailey/compose,sanscontext/docker.github.io,brunocascio/compose,philwrenn/compose,TomasTomecek/compose,kojiromike/compose,londoncalling/docker.github.io,alexisbellido/docker.github.io,JimGalasyn/docker.github.io,aduermael/docker.github.io,bobphill/compose,denverdino/docker.github.io,aduermael/docker.github.io,thaJeztah/docker.github.io,anweiss/docker.github.io,jzwlqx/denverdino.github.io,moxiegirl/compose,uvgroovy/compose,bbirand/compose,troy0820/docker.github.io,troy0820/docker.github.io,docker-zh/docker.github.io,jzwlqx/denverdino.github.io,michael-k/docker-compose,KevinGreene/compose,docker-zh/docker.github.io,goloveychuk/compose,qzio/compose,ralphtheninja/compose,ralphtheninja/compose,denverdino/docker.github.io,tangkun75/compose,KalleDK/compose,shubheksha/docker.github.io,thaJeztah/compose,cclauss/compose,danix800/docker.github.io,joeuo/docker.github.io,menglingwei/denverdino.github.io,ph-One/compose,GM-Alex/compose,docker/docker.github.io,browning/compose,denverdino/compose,anweiss/docker.github.io,unodba/compose,bdwill/docker.github.io,unodba/compose,albers/compose,anweiss/docker.github.io,aduermael/docker.github.io,BSWANG/denverdino.github.io,nhumrich/compose,simonista/compose,dockerhn/compose,LuisBosquez/docker.github.io,genki/compose,uvgroovy/compose,shubheksha/docker.github.io,jorgeLuizChaves/compose,BSWANG/denverdino.github.io,jrabbit/compose,rstacruz/compose,pspierce/compose,ekristen/compose,goloveychuk/compose,phiroict/docker,docker-zh/docker.github.io,alexisbellido/docker.github.io,phiroict/docker,Dakno/compose,denverdino/compose,denverdino/denverdino.github.io,swoopla/compose,MSakamaki/compose,au-phiware/compose,dopry/compose,JimGalasyn/docker.github.io,andrewgee/compose,KevinGreene/compose,BSWANG/denverdino.github.io,ggtools/compose,ionrock/compose,kikkomep/compose,ZJaffee/compose,bbirand/compose,alexisbellido/docker.github.io,screwgoth/compose,bsmr-docker/compose,amitsaha/compose,joaofnfernandes/docker.github.io,LuisBosquez/docker.github.io,mosquito/docker-compose,qzio/compose,denverdino/docker.github.io,TheDataShed/compose,troy0820/docker.github.io,talolard/compose,schmunk42/compose,jeanpralo/compose,shin-/compose,j-fuentes/compose,TheDataShed/compose,cgvarela/compose,jiekechoo/compose,pspierce/compose,moxiegirl/compose,au-phiware/compose,dilgerma/compose,VinceBarresi/compose,shin-/docker.github.io,denverdino/denverdino.github.io,gtrdotmcs/compose,thaJeztah/docker.github.io,rgbkrk/compose,noironetworks/compose,thaJeztah/docker.github.io,aduermael/docker.github.io,mindaugasrukas/compose,jessekl/compose,joaofnfernandes/docker.github.io,menglingwei/denverdino.github.io,gdevillele/docker.github.io,bdwill/docker.github.io,londoncalling/docker.github.io,bdwill/docker.github.io,mdaue/compose,saada/compose,mchasal/compose,funkyfuture/docker-compose,docker/docker.github.io,shin-/docker.github.io,lukemarsden/compose,hoogenm/compose,prologic/compose,Dakno/compose,jzwlqx/denverdino.github.io,jgrowl/compose,charleswhchan/compose,dilgerma/compose,jiekechoo/compose,viranch/compose,johnstep/docker.github.io,mnowster/compose,vdemeester/compose,gtrdotmcs/compose,docker/docker.github.io,phiroict/docker,thaJeztah/docker.github.io,shin-/docker.github.io,LuisBosquez/docker.github.io,jeanpralo/compose,jessekl/compose,marcusmartins/compose,GM-Alex/compose,bsmr-docker/compose | tests/integration/resilience_test.py | tests/integration/resilience_test.py | from __future__ import unicode_literals
from __future__ import absolute_import
import mock
from compose.project import Project
from .testcases import DockerClientTestCase
class ResilienceTest(DockerClientTestCase):
def test_recreate_fails(self):
db = self.create_service('db', volumes=['/var/db'], command='top')
project = Project('composetest', [db], self.client)
container = db.create_container()
db.start_container(container)
host_path = container.get('Volumes')['/var/db']
project.up()
container = db.containers()[0]
self.assertEqual(container.get('Volumes')['/var/db'], host_path)
with mock.patch('compose.service.Service.create_container', crash):
with self.assertRaises(Crash):
project.up()
project.up()
container = db.containers()[0]
self.assertEqual(container.get('Volumes')['/var/db'], host_path)
class Crash(Exception):
pass
def crash(*args, **kwargs):
raise Crash()
| apache-2.0 | Python | |
588d2627dee336efdf44862ab31bbbd896aad912 | Add basic tests for several permission types | songyi199111/sentry,boneyao/sentry,kevinlondon/sentry,gg7/sentry,gencer/sentry,korealerts1/sentry,looker/sentry,jean/sentry,wujuguang/sentry,kevinastone/sentry,imankulov/sentry,imankulov/sentry,looker/sentry,ewdurbin/sentry,gg7/sentry,llonchj/sentry,boneyao/sentry,Kryz/sentry,hongliang5623/sentry,felixbuenemann/sentry,BuildingLink/sentry,ifduyue/sentry,looker/sentry,zenefits/sentry,wujuguang/sentry,Kryz/sentry,fuziontech/sentry,vperron/sentry,korealerts1/sentry,JTCunning/sentry,wong2/sentry,beeftornado/sentry,drcapulet/sentry,kevinastone/sentry,fotinakis/sentry,zenefits/sentry,JamesMura/sentry,mvaled/sentry,jean/sentry,TedaLIEz/sentry,kevinlondon/sentry,daevaorn/sentry,JamesMura/sentry,boneyao/sentry,felixbuenemann/sentry,llonchj/sentry,wujuguang/sentry,ifduyue/sentry,hongliang5623/sentry,Natim/sentry,argonemyth/sentry,JackDanger/sentry,1tush/sentry,Natim/sentry,TedaLIEz/sentry,Kryz/sentry,vperron/sentry,1tush/sentry,drcapulet/sentry,TedaLIEz/sentry,gencer/sentry,jokey2k/sentry,nicholasserra/sentry,ifduyue/sentry,wong2/sentry,JTCunning/sentry,alexm92/sentry,BuildingLink/sentry,camilonova/sentry,JamesMura/sentry,looker/sentry,gencer/sentry,zenefits/sentry,mvaled/sentry,mvaled/sentry,mvaled/sentry,mitsuhiko/sentry,llonchj/sentry,JackDanger/sentry,jean/sentry,felixbuenemann/sentry,daevaorn/sentry,zenefits/sentry,pauloschilling/sentry,argonemyth/sentry,daevaorn/sentry,BayanGroup/sentry,BuildingLink/sentry,fuziontech/sentry,hongliang5623/sentry,1tush/sentry,zenefits/sentry,camilonova/sentry,fotinakis/sentry,BuildingLink/sentry,jokey2k/sentry,JamesMura/sentry,JTCunning/sentry,beeftornado/sentry,mitsuhiko/sentry,drcapulet/sentry,mvaled/sentry,kevinastone/sentry,jokey2k/sentry,ifduyue/sentry,mvaled/sentry,alexm92/sentry,ewdurbin/sentry,kevinlondon/sentry,camilonova/sentry,pauloschilling/sentry,JackDanger/sentry,BayanGroup/sentry,alexm92/sentry,JamesMura/sentry,ngonzalvez/sentry,wong2/sentry,fotinakis/sentry,looker/sentry,vperron/sentry,jean/sentry,BuildingLink/sentry,gencer/sentry,jean/sentry,korealerts1/sentry,Natim/sentry,ewdurbin/sentry,ngonzalvez/sentry,BayanGroup/sentry,beeftornado/sentry,argonemyth/sentry,gencer/sentry,nicholasserra/sentry,songyi199111/sentry,ifduyue/sentry,imankulov/sentry,fotinakis/sentry,gg7/sentry,ngonzalvez/sentry,nicholasserra/sentry,daevaorn/sentry,pauloschilling/sentry,songyi199111/sentry,fuziontech/sentry | tests/sentry/api/test_permissions.py | tests/sentry/api/test_permissions.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from sentry.api.permissions import has_perm
from sentry.constants import MEMBER_USER, MEMBER_ADMIN
from sentry.testutils import TestCase
class TeamPermissionTest(TestCase):
def test_basic_user(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
assert not has_perm(team, user, MEMBER_USER)
assert not has_perm(team, user, MEMBER_ADMIN)
def test_owner(self):
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
assert has_perm(team, owner, MEMBER_USER)
assert has_perm(team, owner, MEMBER_ADMIN)
def test_team_member(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
team.member_set.create(user=user, type=MEMBER_USER)
assert has_perm(team, user, MEMBER_USER)
assert not has_perm(team, user, MEMBER_ADMIN)
def test_team_admin(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
team.member_set.create(user=user, type=MEMBER_ADMIN)
assert has_perm(team, user, MEMBER_USER)
assert has_perm(team, user, MEMBER_ADMIN)
class ProjectPermissionTest(TestCase):
def test_basic_user(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
assert not has_perm(project, user, MEMBER_USER)
assert not has_perm(project, user, MEMBER_ADMIN)
def test_owner(self):
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
assert has_perm(project, owner, MEMBER_USER)
assert has_perm(project, owner, MEMBER_ADMIN)
def test_team_member(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
team.member_set.create(user=user, type=MEMBER_USER)
assert has_perm(project, user, MEMBER_USER)
assert not has_perm(project, user, MEMBER_ADMIN)
def test_team_admin(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
team.member_set.create(user=user, type=MEMBER_ADMIN)
assert has_perm(project, user, MEMBER_USER)
assert has_perm(project, user, MEMBER_ADMIN)
class GroupPermissionTest(TestCase):
def test_basic_user(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
group = self.create_group(project=project)
assert not has_perm(group, user, MEMBER_USER)
assert not has_perm(group, user, MEMBER_ADMIN)
def test_owner(self):
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
group = self.create_group(project=project)
assert has_perm(group, owner, MEMBER_USER)
assert has_perm(group, owner, MEMBER_ADMIN)
def test_team_member(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
team.member_set.create(user=user, type=MEMBER_USER)
group = self.create_group(project=project)
assert has_perm(group, user, MEMBER_USER)
assert not has_perm(group, user, MEMBER_ADMIN)
def test_team_admin(self):
user = self.create_user(is_superuser=False, email='bar@example.com')
owner = self.create_user(email='foo@example.com')
team = self.create_team(owner=owner)
project = self.create_project(team=team)
team.member_set.create(user=user, type=MEMBER_ADMIN)
group = self.create_group(project=project)
assert has_perm(group, user, MEMBER_USER)
assert has_perm(group, user, MEMBER_ADMIN)
| bsd-3-clause | Python | |
bb48a8a9a763e594efa06b397d7731f25cedd65d | add parallel perceptron | sfu-natlang/glm-parser,sfu-natlang/glm-parser,sfu-natlang/glm-parser,sfu-natlang/glm-parser | src/learn/spark_perceptron.py | src/learn/spark_perceptron.py | from __future__ import division
import logging
import multiprocessing
from hvector._mycollections import mydefaultdict
from hvector.mydouble import mydouble
from weight.weight_vector import *
# Time accounting and control
import debug.debug
import time
import sys
from pyspark import SparkContext
logging.basicConfig(filename='glm_parser.log',
level=logging.DEBUG,
format='%(asctime)s %(levelname)s: %(message)s',
datefmt='%m/%d/%Y %I:%M:%S %p')
class AveragePerceptronLearner():
def __init__(self, w_vector, max_iter=1):
"""
:param w_vector: A global weight vector instance that stores
the weight value (float)
:param max_iter: Maximum iterations for training the weight vector
Could be overridden by parameter max_iter in the method
:return: None
"""
logging.debug("Initialize AveragePerceptronLearner ... ")
self.w_vector = w_vector
return
def parallel_learn(self, f_argmax, data_pool=None, max_iter=-1, d_filename=None, dump_freq=1):
# sigma_s
self.weight_sum_dict.clear()
self.last_change_dict.clear()
self.c = 1
data_list = []
sentence = 0
while data_pool.has_next_data():
sentence_count+=1
data = dp.get_next_data()
data_list.append(data)
sc = SparkContext(appName="iterParameterMixing")
train_data = sc.parallelize(data_list).cache()
def avg_perc_train(train_data,w_vector,f_argmax):
while train_data.has_next_data():
data_instance = data_pool.get_next_data()
gold_global_vector = data_instance.gold_global_vector
current_global_vector = f_argmax(data_instance)
# otherwise, the gold_global_vector will change because of the change in weights
w_vector.data_dict.iadd(gold_global_vector.feature_dict)
w_vector.data_dict.iaddc(current_global_vector.feature_dict, -1)
return weight_vector.items()
for round in range(0,epochs):
fv = {}
weight_vector = self.w_vector
feat_vec_list = train_data.mapPartitions(lambda t: avg_perc_train(t, weight_vector))
feat_vec_list = feat_vec_list.combineByKey((lambda x: (x,1)),
(lambda x, y: (x[0] + y, x[1] + 1)),
(lambda x, y: (x[0] + y[0], x[1] + y[1]))).collect()
self.w_vector.data_dict.clear()
for (feat, (a,b)) in feat_vec_list:
fv[feat] = float(a)/float(b)
self.w_vector.data_dict.iadd(fv)
sc.stop()
| mit | Python | |
33a0b48ec475ec02f60ed27e572709136515763a | Create maxdiff.py | paperparrot/maxdiff | maxdiff.py | maxdiff.py | __author__ = 'sebastiengenty'
import numpy as np
import pandas as pd
# This program is made to take the utilities from a MaxDiff estimation and compute the relative importances for
# the attributes tested. Input for now is .csv though also working on a .xlsx solution.
def avg_imp(utilities_file, filter_var='none', weight='none'):
""" Actual function doing all the heavy lifting. Takes in the utility scores and filters and computes the average
importances. Has the option of adding weights if need be.
:param utilities_file: CSV file containing the utility scores. Top row should be labels.
:param filter_var: CSV file containing the filter values. Each filter group should be its own variable, no overlaps
:param weight:
:return:
"""
raw = pd.read_csv(utilities_file, index_col='session')
rescaled = np.exp(raw)
rescaled = rescaled.divide(rescaled.sum(axis=0),axis=0)
if weight is 'none':
rescaled = rescaled
else:
rescaled = rescaled*weight
if filter_var is 'none':
output = rescaled.means()
else:
filts = pd.read_csv(filter_var, index_col='session')
data = pd.concat(rescaled, filts)
output = data.groupby()
| apache-2.0 | Python | |
3d5787cd860b1b3baaa041e49efe6d4af09396ac | Add migrations package to setup.py | rauleb/django-auditlog,kbussell/django-auditlog,Zmeylol/auditlog,robmagee/django-auditlog,chris-griffin/django-auditlog,johnrtipton/django-auditlog,jjkester/django-auditlog | setup.py | setup.py | from distutils.core import setup
setup(
name='django-auditlog',
version='0.2.1',
packages=['auditlog', 'auditlog.migrations'],
package_dir={'': 'src'},
url='https://github.com/jjkester/django-auditlog',
license='MIT',
author='Jan-Jelle Kester',
author_email='janjelle@jjkester.nl',
description='Audit log app for Django',
install_requires=[
'Django>=1.5'
]
)
| from distutils.core import setup
setup(
name='django-auditlog',
version='0.2.1',
packages=['auditlog',],
package_dir={'': 'src'},
url='https://github.com/jjkester/django-auditlog',
license='MIT',
author='Jan-Jelle Kester',
author_email='janjelle@jjkester.nl',
description='Audit log app for Django',
install_requires=[
'Django>=1.5'
]
)
| mit | Python |
5515fc7207404013ea9ba655b29bc414320971d9 | add setup.py | experimentengine/optimizely-client-python,wlowry88/optimizely-client-python,optimizely/optimizely-client-python | setup.py | setup.py | from setuptools import setup
setup(name='optimizely',
version='0.1',
description='An interface to Optimizely\'s REST API.',
url='https://github.com/optimizely/optimizely-client-python',
author='Optimizely',
packages=['optimizely'],
install_requires=[
'requests',
],
zip_safe=False) | mit | Python | |
3ca6447572f165a2de59d8420d7b853158b0ed66 | Add test cases for segment. | xrloong/Xie | tests/test/xie/graphics/segment.py | tests/test/xie/graphics/segment.py | import unittest
import copy
from xie.graphics.segment import BeelineSegment
from xie.graphics.segment import QCurveSegment
from xie.graphics.segment import StrokePath
from xie.graphics.segment import SegmentFactory
class SegmentTestCase(unittest.TestCase):
def setUp(self):
self.segmentFactory = SegmentFactory()
self.generateTestData()
def tearDown(self):
pass
def generateTestData(self):
self.beeline_0=BeelineSegment((0, 0))
self.beeline_1=BeelineSegment((9, 118))
self.beeline_2=BeelineSegment((-114, 103))
self.beeline_3=BeelineSegment((123, -24))
self.beeline_4=BeelineSegment((-11, -27))
self.qcurve_1=QCurveSegment((33, 21), (57, 97))
self.qcurve_2=QCurveSegment((80, 66), (-30, 16))
self.qcurve_3=QCurveSegment((-5, -51), (65, -113))
self.qcurve_4=QCurveSegment((-123, 71), (-37, -17))
self.qcurve_5=QCurveSegment((-42, -74), (-25, 5))
self.stroke_path_1=StrokePath([self.beeline_1])
self.stroke_path_2=StrokePath([self.beeline_2])
self.stroke_path_3=StrokePath([self.qcurve_1])
self.stroke_path_4=StrokePath([self.qcurve_2])
self.stroke_path_5=StrokePath([self.beeline_1, self.qcurve_1])
self.stroke_path_6=StrokePath([self.beeline_1, self.qcurve_2])
self.stroke_path_7=StrokePath([self.beeline_2, self.qcurve_1])
def testBeelineEquality(self):
self.assertEqual(self.beeline_1, self.beeline_1)
self.assertEqual(self.beeline_1, copy.deepcopy(self.beeline_1))
self.assertNotEqual(self.beeline_1, self.beeline_2)
def testQCurveEquality(self):
self.assertEqual(self.qcurve_1, self.qcurve_1)
self.assertEqual(self.qcurve_1, copy.deepcopy(self.qcurve_1))
self.assertNotEqual(self.qcurve_1, self.qcurve_2)
def testStrokePathEquality(self):
self.assertEqual(self.stroke_path_1, self.stroke_path_1)
self.assertEqual(self.stroke_path_1, copy.deepcopy(self.stroke_path_1))
self.assertNotEqual(self.stroke_path_1, self.stroke_path_2)
self.assertEqual(self.stroke_path_3, self.stroke_path_3)
self.assertEqual(self.stroke_path_3, copy.deepcopy(self.stroke_path_3))
self.assertNotEqual(self.stroke_path_3, self.stroke_path_4)
self.assertEqual(self.stroke_path_5, self.stroke_path_5)
self.assertEqual(self.stroke_path_5, copy.deepcopy(self.stroke_path_5))
self.assertNotEqual(self.stroke_path_5, StrokePath(self.stroke_path_6))
self.assertNotEqual(self.stroke_path_5, StrokePath(self.stroke_path_7))
self.assertEqual(StrokePath([]), StrokePath([]))
def testBeeline(self):
self.assertEqual(self.beeline_1.getEndPoint(), (9, 118))
self.assertEqual(self.beeline_2.getEndPoint(), (-114, 103))
self.assertEqual(self.beeline_3.getEndPoint(), (123, -24))
self.assertEqual(self.beeline_4.getEndPoint(), (-11, -27))
# specific
beeline=self.beeline_0
self.assertEqual(self.beeline_0.getEndPoint(), (0, 0))
def testBeelineBoundary(self):
self.assertEqual(self.beeline_1.computeBoundary(), (0, 0, 9, 118))
self.assertEqual(self.beeline_2.computeBoundary(), (-114, 0, 0, 103))
self.assertEqual(self.beeline_3.computeBoundary(), (0, -24, 123, 0))
self.assertEqual(self.beeline_4.computeBoundary(), (-11, -27, 0, 0))
self.assertEqual(self.beeline_0.computeBoundary(), (0, 0, 0, 0))
def testQCurve(self):
self.assertEqual(self.qcurve_1.getControlPoint(), (33, 21))
self.assertEqual(self.qcurve_1.getEndPoint(), (57, 97))
def testQCurveBoundary(self):
self.assertEqual(self.qcurve_1.computeBoundary(), (0, 0, 57, 97))
self.assertEqual(self.qcurve_2.computeBoundary(), (-30, 0, 34, 38))
self.assertEqual(self.qcurve_3.computeBoundary(), (-1, -113, 65, 0))
self.assertEqual(self.qcurve_4.computeBoundary(), (-73, -17, 0, 32))
self.assertEqual(self.qcurve_5.computeBoundary(), (-30, -36, 0, 5))
| apache-2.0 | Python | |
6f7abe8ea3e08ad15adab85c947ad4667e3dd3ab | Add simplejson requirement to setup | Bogh/django-oscar,anentropic/django-oscar,rocopartners/django-oscar,jinnykoo/wuyisj.com,jlmadurga/django-oscar,anentropic/django-oscar,bschuon/django-oscar,bschuon/django-oscar,dongguangming/django-oscar,rocopartners/django-oscar,michaelkuty/django-oscar,django-oscar/django-oscar,saadatqadri/django-oscar,monikasulik/django-oscar,ka7eh/django-oscar,faratro/django-oscar,adamend/django-oscar,itbabu/django-oscar,okfish/django-oscar,ademuk/django-oscar,nfletton/django-oscar,ahmetdaglarbas/e-commerce,machtfit/django-oscar,binarydud/django-oscar,itbabu/django-oscar,taedori81/django-oscar,lijoantony/django-oscar,sonofatailor/django-oscar,WillisXChen/django-oscar,jinnykoo/wuyisj.com,Idematica/django-oscar,spartonia/django-oscar,WadeYuChen/django-oscar,WillisXChen/django-oscar,faratro/django-oscar,rocopartners/django-oscar,QLGu/django-oscar,Jannes123/django-oscar,kapari/django-oscar,WadeYuChen/django-oscar,django-oscar/django-oscar,nickpack/django-oscar,manevant/django-oscar,jmt4/django-oscar,sonofatailor/django-oscar,jlmadurga/django-oscar,okfish/django-oscar,vovanbo/django-oscar,sonofatailor/django-oscar,thechampanurag/django-oscar,jlmadurga/django-oscar,MatthewWilkes/django-oscar,binarydud/django-oscar,lijoantony/django-oscar,pdonadeo/django-oscar,amirrpp/django-oscar,vovanbo/django-oscar,michaelkuty/django-oscar,DrOctogon/unwash_ecom,bnprk/django-oscar,django-oscar/django-oscar,pdonadeo/django-oscar,vovanbo/django-oscar,nickpack/django-oscar,spartonia/django-oscar,ademuk/django-oscar,makielab/django-oscar,mexeniz/django-oscar,jinnykoo/christmas,solarissmoke/django-oscar,jinnykoo/christmas,jinnykoo/wuyisj.com,WillisXChen/django-oscar,marcoantoniooliveira/labweb,saadatqadri/django-oscar,WadeYuChen/django-oscar,eddiep1101/django-oscar,DrOctogon/unwash_ecom,kapari/django-oscar,amirrpp/django-oscar,nfletton/django-oscar,eddiep1101/django-oscar,ka7eh/django-oscar,makielab/django-oscar,pasqualguerrero/django-oscar,taedori81/django-oscar,machtfit/django-oscar,monikasulik/django-oscar,ademuk/django-oscar,Jannes123/django-oscar,taedori81/django-oscar,makielab/django-oscar,itbabu/django-oscar,pdonadeo/django-oscar,saadatqadri/django-oscar,django-oscar/django-oscar,mexeniz/django-oscar,josesanch/django-oscar,mexeniz/django-oscar,amirrpp/django-oscar,jinnykoo/wuyisj,makielab/django-oscar,jlmadurga/django-oscar,Idematica/django-oscar,nfletton/django-oscar,WillisXChen/django-oscar,MatthewWilkes/django-oscar,kapt/django-oscar,Bogh/django-oscar,Bogh/django-oscar,mexeniz/django-oscar,kapt/django-oscar,binarydud/django-oscar,john-parton/django-oscar,pasqualguerrero/django-oscar,adamend/django-oscar,thechampanurag/django-oscar,josesanch/django-oscar,dongguangming/django-oscar,jinnykoo/christmas,eddiep1101/django-oscar,anentropic/django-oscar,itbabu/django-oscar,WadeYuChen/django-oscar,spartonia/django-oscar,bnprk/django-oscar,MatthewWilkes/django-oscar,Jannes123/django-oscar,anentropic/django-oscar,solarissmoke/django-oscar,taedori81/django-oscar,elliotthill/django-oscar,jmt4/django-oscar,QLGu/django-oscar,WillisXChen/django-oscar,thechampanurag/django-oscar,faratro/django-oscar,dongguangming/django-oscar,jinnykoo/wuyisj,ahmetdaglarbas/e-commerce,sasha0/django-oscar,ka7eh/django-oscar,adamend/django-oscar,Bogh/django-oscar,ahmetdaglarbas/e-commerce,sasha0/django-oscar,okfish/django-oscar,michaelkuty/django-oscar,vovanbo/django-oscar,sonofatailor/django-oscar,elliotthill/django-oscar,josesanch/django-oscar,sasha0/django-oscar,manevant/django-oscar,dongguangming/django-oscar,ahmetdaglarbas/e-commerce,kapt/django-oscar,manevant/django-oscar,lijoantony/django-oscar,jmt4/django-oscar,binarydud/django-oscar,machtfit/django-oscar,pdonadeo/django-oscar,nickpack/django-oscar,amirrpp/django-oscar,marcoantoniooliveira/labweb,kapari/django-oscar,bnprk/django-oscar,QLGu/django-oscar,john-parton/django-oscar,spartonia/django-oscar,jinnykoo/wuyisj,pasqualguerrero/django-oscar,nickpack/django-oscar,pasqualguerrero/django-oscar,bschuon/django-oscar,saadatqadri/django-oscar,sasha0/django-oscar,okfish/django-oscar,jinnykoo/wuyisj,eddiep1101/django-oscar,DrOctogon/unwash_ecom,faratro/django-oscar,john-parton/django-oscar,lijoantony/django-oscar,WillisXChen/django-oscar,MatthewWilkes/django-oscar,rocopartners/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj.com,adamend/django-oscar,thechampanurag/django-oscar,kapari/django-oscar,solarissmoke/django-oscar,bschuon/django-oscar,elliotthill/django-oscar,nfletton/django-oscar,marcoantoniooliveira/labweb,ademuk/django-oscar,Jannes123/django-oscar,michaelkuty/django-oscar,monikasulik/django-oscar,john-parton/django-oscar,solarissmoke/django-oscar,marcoantoniooliveira/labweb,jmt4/django-oscar,monikasulik/django-oscar,QLGu/django-oscar,Idematica/django-oscar,manevant/django-oscar,bnprk/django-oscar | setup.py | setup.py | #!/usr/bin/env python
"""
Installation script:
To release a new version to PyPi:
- Ensure the version is correctly set in oscar.__init__.py
- Run: python setup.py sdist upload
"""
from setuptools import setup, find_packages
from oscar import get_version
setup(name='django-oscar',
version=get_version().replace(' ', '-'),
url='https://github.com/tangentlabs/django-oscar',
author="David Winterbottom",
author_email="david.winterbottom@tangentlabs.co.uk",
description="A domain-driven e-commerce framework for Django 1.3+",
long_description=open('README.rst').read(),
keywords="E-commerce, Django, domain-driven",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django==1.4',
'PIL==1.1.7',
'South==0.7.3',
'django-extra-views==0.2.0',
'django-haystack==1.2.7',
'django-treebeard==1.61',
'sorl-thumbnail==11.12',
'python-memcached==1.48',
'django-sorting==0.1',
'simplejson==2.5.2',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python']
)
| #!/usr/bin/env python
"""
Installation script:
To release a new version to PyPi:
- Ensure the version is correctly set in oscar.__init__.py
- Run: python setup.py sdist upload
"""
from setuptools import setup, find_packages
from oscar import get_version
setup(name='django-oscar',
version=get_version().replace(' ', '-'),
url='https://github.com/tangentlabs/django-oscar',
author="David Winterbottom",
author_email="david.winterbottom@tangentlabs.co.uk",
description="A domain-driven e-commerce framework for Django 1.3+",
long_description=open('README.rst').read(),
keywords="E-commerce, Django, domain-driven",
license='BSD',
platforms=['linux'],
packages=find_packages(exclude=["sandbox*", "tests*"]),
include_package_data=True,
install_requires=[
'django==1.4',
'PIL==1.1.7',
'South==0.7.3',
'django-extra-views==0.2.0',
'django-haystack==1.2.7',
'django-treebeard==1.61',
'sorl-thumbnail==11.12',
'python-memcached==1.48',
'django-sorting==0.1',
],
# See http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=['Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: Unix',
'Programming Language :: Python']
)
| bsd-3-clause | Python |
b16f666af5e7dd1e2201e8e57c0590933b5b73e9 | Add setup script | INM-6/python-gymz | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup
setup(
name='gymz',
version='0.0.1',
author='Jakob Jordan, Philipp Weidel',
author_email='j.jordan@fz-juelich.de',
description=('A light-weight ZMQ wrapper for the OpenAI Gym.'),
license='MIT',
keywords='openai-gym reinforcement-learning zmq',
url='https://github.com/INM-6/python-gymz',
packages=['gymz', 'examples'],
scripts=['gymz-controller'],
data_files=['DefaultConfig.json'],
long_description=open('README.md').read(),
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
'Topic :: Scientific/Engineering',
],
)
| mit | Python | |
713e715fb3657a274f60c687eec60bad0d9c7da9 | Update synth.py yaml location (#6480) | tseaver/google-cloud-python,GoogleCloudPlatform/gcloud-python,googleapis/google-cloud-python,tswast/google-cloud-python,tseaver/google-cloud-python,tswast/google-cloud-python,tswast/google-cloud-python,dhermes/google-cloud-python,dhermes/gcloud-python,googleapis/google-cloud-python,dhermes/google-cloud-python,dhermes/google-cloud-python,jonparrott/google-cloud-python,GoogleCloudPlatform/gcloud-python,tseaver/google-cloud-python,jonparrott/google-cloud-python,dhermes/gcloud-python,jonparrott/gcloud-python,jonparrott/gcloud-python | container/synth.py | container/synth.py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
gapic = gcp.GAPICGenerator()
#----------------------------------------------------------------------------
# Generate container client
#----------------------------------------------------------------------------
library = gapic.py_library(
'container',
'v1',
config_path='/google/container/artman_container_v1.yaml',
artman_output_name='container-v1')
s.move(library / 'google/cloud/container_v1')
# Issues exist where python files should define the source encoding
# https://github.com/googleapis/gapic-generator/issues/2097
s.replace(
'google/**/proto/*_pb2.py',
r"(^.*$\n)*",
r"# -*- coding: utf-8 -*-\n\g<0>")
| # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
from synthtool import gcp
gapic = gcp.GAPICGenerator()
#----------------------------------------------------------------------------
# Generate container client
#----------------------------------------------------------------------------
library = gapic.py_library(
'container',
'v1',
config_path='/google/container/artman_container.yaml',
artman_output_name='container-v1')
s.move(library / 'google/cloud/container_v1')
# Issues exist where python files should define the source encoding
# https://github.com/googleapis/gapic-generator/issues/2097
s.replace(
'google/**/proto/*_pb2.py',
r"(^.*$\n)*",
r"# -*- coding: utf-8 -*-\n\g<0>")
| apache-2.0 | Python |
8e175782c3b79e64d543fb478b146d308d2a2ad8 | Add small python script which calculates how much disk space we save by using CAS | scitran/api,scitran/core,scitran/core,scitran/core,scitran/core,scitran/api | bin/oneoffs/cas_statistic.py | bin/oneoffs/cas_statistic.py | import os
import pymongo
from collections import Counter
db_uri = os.getenv('SCITRAN_PERSISTENT_DB_URI', 'localhost:9001')
db = pymongo.MongoClient(db_uri).get_database('scitran')
COLLECTIONS = ['projects', 'acquisitions', 'analyses']
COLLECTIONS_WITH_EMBEDDED = [('sessions', 'subject')]
def files_of_collection(collection, embedded_doc=None):
hash_size_pairs = []
cursor = db.get_collection(collection).find({})
for document in cursor:
hash_size_pairs += files_of_document(document)
if embedded_doc:
hash_size_pairs += files_of_document(document.get(embedded_doc, {}))
return hash_size_pairs
def files_of_document(document):
hash_size_pairs = []
files = document.get('files', [])
for f in files:
hash_size_pairs.append((f['hash'], f['size']))
return hash_size_pairs
def main():
hash_size_pairs = []
for collection in COLLECTIONS:
hash_size_pairs += files_of_collection(collection)
for collection, embedded_doc in COLLECTIONS_WITH_EMBEDDED:
hash_size_pairs += files_of_collection(collection, embedded_doc)
counter = Counter(hash_size_pairs)
size_with_cas = 0
size_wo_cas = 0
file_count_cas = len(counter)
file_count_wo_cas = 0
for hash_size_pair in counter:
size_with_cas += hash_size_pair[1]
size_wo_cas += hash_size_pair[1] * counter[hash_size_pair]
file_count_wo_cas += counter[hash_size_pair]
saved_disk_space = size_wo_cas - size_with_cas
print('Total size (CAS): %s Bytes' % size_with_cas)
print('Total size (wo CAS): %s Bytes' % size_wo_cas)
print('Number of files (CAS): %s' % file_count_cas)
print('Number of files (wo CAS): %s' % file_count_wo_cas)
print('Saved disk space: %s Bytes (%s%%)' % (
saved_disk_space, round(saved_disk_space / float(size_wo_cas) * 100, 2)))
if __name__ == '__main__':
main()
| mit | Python | |
04a24befbaba975720acf5533bc97e7b3d601672 | Install script. | seomoz/url-py,pombredanne/url-py,smeinecke/url-py,seomoz/url-py,masayuko/url-py | setup.py | setup.py | #!/usr/bin/env python
# Copyright (c) 2012 SEOmoz
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(
name = 'url',
version = '0.1.0',
description = 'URL Parsing',
long_description = '''
Some helper functions for parsing URLs, sanitizing them, normalizing them.
This includes support for escaping, unescaping, punycoding, unpunycoding,
cleaning parameter and query strings, and a little more sanitization.
''',
author = 'Dan Lecocq',
author_email = 'dan@seomoz.org',
url = 'http://github.com/seomoz/url-py',
py_modules = ['url'],
license = 'MIT',
platforms = 'Posix; MacOS X',
test_suite = 'tests.testReppy',
classifiers = [
'License :: OSI Approved :: MIT License',
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'Topic :: Internet :: WWW/HTTP'],
) | mit | Python | |
9afb9954f5d58fe051f60bd336a39d794699d405 | add a demo that plots a comparison of threshold types | rgommers/pywt,rgommers/pywt,rgommers/pywt,rgommers/pywt,grlee77/pywt,PyWavelets/pywt,grlee77/pywt,PyWavelets/pywt | demo/plot_thresholds.py | demo/plot_thresholds.py | import numpy as np
import matplotlib.pyplot as plt
import pywt
s = np.linspace(-4, 4, 1000)
s_soft = pywt.threshold(s, value=0.5, mode='soft')
s_hard = pywt.threshold(s, value=0.5, mode='hard')
s_garotte = pywt.threshold(s, value=0.5, mode='garotte')
s_firm1 = pywt.threshold_firm(s, value_low=0.5, value_high=1)
s_firm2 = pywt.threshold_firm(s, value_low=0.5, value_high=2)
s_firm3 = pywt.threshold_firm(s, value_low=0.5, value_high=4)
fig, ax = plt.subplots(1, 2, figsize=(10, 4))
ax[0].plot(s, s_soft)
ax[0].plot(s, s_hard)
ax[0].plot(s, s_garotte)
ax[0].legend(['soft (0.5)', 'hard (0.5)', 'non-neg. garotte (0.5)'])
ax[0].set_xlabel('input value')
ax[0].set_ylabel('thresholded value')
ax[1].plot(s, s_soft)
ax[1].plot(s, s_hard)
ax[1].plot(s, s_firm1)
ax[1].plot(s, s_firm2)
ax[1].plot(s, s_firm3)
ax[1].legend(['soft (0.5)', 'hard (0.5)', 'firm(0.5, 1)', 'firm(0.5, 2)',
'firm(0.5, 4)'])
ax[1].set_xlabel('input value')
ax[1].set_ylabel('thresholded value')
plt.show()
| mit | Python | |
e1b32cdd95c8a11cb492d27d9ac11a46c4037f2e | Add setup.py | mlafeldt/rdd.py,mlafeldt/rdd.py,mlafeldt/rdd.py | setup.py | setup.py | # -*- coding: utf-8 -*-
from setuptools import setup, find_packages
import rdd
setup(name='rdd',
version=rdd.__version__,
author='Mathias Lafeldt',
author_email='mathias.lafeldt@gmail.com',
url='https://github.com/mlafeldt/rdd.py',
license='MIT',
description='Python implementation of the Readability Shortener API',
long_description=open('README.md').read() + '\n\n' +
open('HISTORY.rst').read(),
classifiers=['Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python'],
packages=find_packages(),
zip_safe=False,
setup_requires=[],
install_requires=['requests>=0.7.0'],
entry_points="""
# -*- Entry points: -*-
[console_scripts]
rdd=rdd.cli:main
""")
| mit | Python | |
d78e3ba37cb2c88300d1e96010e79c6e4329f720 | bump version to 0.3 | kalessin/python-hubstorage,scrapinghub/python-hubstorage,torymur/python-hubstorage | setup.py | setup.py | from setuptools import setup, find_packages
setup(name='hubstorage',
version='0.3',
license='BSD',
description='Client interface for Scrapinghub HubStorage',
author='Scrapinghub',
author_email='info@scrapinghub.com',
url='http://scrapinghub.com',
platforms = ['Any'],
packages = find_packages(),
install_requires = ['requests'],
classifiers = [ 'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python']
)
| from setuptools import setup, find_packages
setup(name='hubstorage',
version='0.2',
license='BSD',
description='Client interface for Scrapinghub HubStorage',
author='Scrapinghub',
author_email='info@scrapinghub.com',
url='http://scrapinghub.com',
platforms = ['Any'],
packages = find_packages(),
install_requires = ['requests'],
classifiers = [ 'Development Status :: 4 - Beta',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python']
)
| bsd-3-clause | Python |
9238e229c63b32e35b46508959450e97901b46ab | Add package setup. | ohsu-qin/qipipe | setup.py | setup.py | import glob
from distutils.core import setup
requires = ['pydicom']
setup(
name = 'qipipe',
version = '1.1.1',
author = 'Fred Loney',
author_email = 'loneyf@ohsu.edu',
packages = find_packages('lib'),
package_dir = {'':'lib'}
scripts = glob.glob('bin/*'),
url = 'http://quip1.ohsu.edu/git/qipipe',
license = 'Proprietary',
description = '.',
long_description = __doc__,
classifiers = [
"Development Status :: 3 - Alpha",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: Other/Proprietary License",
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
],
install_requires = requires
)
| bsd-2-clause | Python | |
cd596bd419352d95fb31aab7116a1dddb061828c | add a setup.py for the trait_documenter package | itziakos/trait-documenter | setup.py | setup.py | #----------------------------------------------------------------------------
#
# Copyright (c) 2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in /LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
#
#----------------------------------------------------------------------------
from setuptools import setup, find_packages
setup(
name='trait_documenter',
version='1.0.0',
author='Enthought, Inc',
author_email='info@enthought.com',
url='https://github.com/enthought/trait-documenter',
description='Autodoc extention for documenting traits',
long_description=open('README.rst').read(),
packages=find_packages())
| bsd-3-clause | Python | |
d611af5a613e444c0c31c269aa382f0996c375d9 | Add setup.py from h3's fork | madteckhead/django-jquery-file-upload,madteckhead/django-jquery-file-upload | setup.py | setup.py | """
django-jquery-file-upload
"""
from setuptools import setup, find_packages
setup(
name = "django-jquery-file-upload",
version = "0.0.1",
url = "",
license = "The MIT License (MIT)",
description = "A minimal django project containing a minimal app with a working jquery file upload form based on the work by Sebastian Tschan: http://aquantum-demo.appspot.com/file-upload",
author = 'Sebastian Tschan / Sigurd Gartmann',
packages = find_packages(),
package_dir = {'': '.'},
install_requires = [],
)
| mit | Python | |
5ce21100b42dad13815d21cd3fae9a2710756314 | add setup.py | LeeBergstrand/pygenprop | setup.py | setup.py | #!/usr/bin/env python
"""
Created by: Lee Bergstrand (2018)
Description: Setup for installing pygenprop.
"""
from setuptools import setup
setup(name='pygenprop',
version='0.1',
description='A python library for programmatic usage of EBI InterPro Genome Properties.',
url='https://github.com/Micromeda/pygenprop',
author='Lee Bergstrand',
author_email='flyingcircus@example.com',
license='Apache License 2.0',
packages=['pygenprop'],
install_requires=[
'Cython==0.29',
'pandas==0.23.4',
],
zip_safe=False)
| apache-2.0 | Python | |
b9a23638d4cf6cc692a2dce3d42268ca73343493 | Add setup.py | caleb531/alfred-workflow-packager | setup.py | setup.py | #!/usr/bin/env python
# coding=utf-8
from setuptools import setup
setup(
name='alfred-workflow-packager',
version='0.8b',
description='A CLI utility for packaging and exporting Alfred workflows',
url='https://github.com/caleb531/alfred-workflow-packager',
author='Caleb Evans',
author_email='caleb@calebevans.me',
license='MIT',
keywords='alfred workflow package export',
packages=['awp'],
install_requires=[
'biplist >= 1, < 2',
'jsonschema >= 2, < 3'
],
entry_points={
'console_scripts': [
'alfred-workflow-packager=awp.packager:main',
'workflow-packager=awp.packager:main'
]
}
)
| mit | Python | |
8124ad701f11aad4e127e26fb34e850b80e03bdf | Add compare_to_sim | bryanwweber/UConnRCMPy | compare_to_sim.py | compare_to_sim.py | # -*- coding: utf-8 -*-
"""
Created on Thu May 21 17:55:00 2015
@author: weber
"""
import numpy as np
import matplotlib.pyplot as plt
from glob import glob
from pressure_traces import copy
simdata = np.genfromtxt('export.csv', delimiter=',', skip_header=1)
simtime = simdata[:, 0]
simvolume = simdata[:, 1]
simtemperature = simdata[:, 2]
simpressure = simdata[:, 3]
flist = glob('*pressure.txt')
expdata = np.genfromtxt(flist[0])
exptime = expdata[:, 0]
exppressure = expdata[:, 1]
fig = plt.figure(2)
ax = fig.add_subplot(1, 1, 1)
ax.plot(exptime, exppressure)
ax.plot(simtime, simpressure)
m = plt.get_current_fig_manager()
m.window.showMaximized()
maxT = np.amax(simtemperature)
print('{:.0f}'.format(maxT))
copy(str(maxT))
| bsd-3-clause | Python | |
d952b1b42ad82d40b0eb741f189d2525b73e2d19 | add basic configuration | paked/distance-matrix | setup.py | setup.py | from distutils.core import setup
setup(name='distance_matrix',
author='Harrison Shoebridge',
author_email='harrison@theshoebridges.com',
description='A small wrapper around the google distance matrix api',
version='0.1',
py_modules=['distance_matrix'])
| mit | Python | |
39b27532e79f27612bbae0b748ef91d89a429b84 | Exclude buggy setuptools-scm version | untitaker/vdirsyncer,untitaker/vdirsyncer,hobarrera/vdirsyncer,hobarrera/vdirsyncer,untitaker/vdirsyncer | setup.py | setup.py | # -*- coding: utf-8 -*-
'''
Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer.
'''
from setuptools import Command, find_packages, setup
requirements = [
# https://github.com/mitsuhiko/click/issues/200
'click>=5.0',
'click-log>=0.1.3',
# https://github.com/pimutils/vdirsyncer/issues/478
'click-threading>=0.2',
# !=2.9.0: https://github.com/kennethreitz/requests/issues/2930
#
# >=2.4.1: https://github.com/shazow/urllib3/pull/444
# Without the above pull request, `verify=False` also disables fingerprint
# validation. This is *not* what we want, and it's not possible to
# replicate vdirsyncer's current behavior (verifying fingerprints without
# verifying against CAs) with older versions of urllib3.
'requests >=2.4.1, !=2.9.0',
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
'requests_toolbelt >=0.4.0',
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
'atomicwrites>=0.1.7'
]
class PrintRequirements(Command):
description = 'Prints minimal requirements'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for requirement in requirements:
print(requirement.replace(">", "=").replace(" ", ""))
setup(
# General metadata
name='vdirsyncer',
author='Markus Unterwaditzer',
author_email='markus@unterwaditzer.net',
url='https://github.com/pimutils/vdirsyncer',
description='Synchronize calendars and contacts',
license='MIT',
long_description=open('README.rst').read(),
# Runtime dependencies
install_requires=requirements,
# Optional dependencies
extras_require={
'remotestorage': ['requests-oauthlib'],
'google': ['requests-oauthlib'],
},
# Build dependencies
setup_requires=['setuptools_scm != 1.12.0'],
# Other
packages=find_packages(exclude=['tests.*', 'tests']),
include_package_data=True,
cmdclass={
'minimal_requirements': PrintRequirements
},
use_scm_version={
'write_to': 'vdirsyncer/version.py'
},
entry_points={
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
},
)
| # -*- coding: utf-8 -*-
'''
Vdirsyncer synchronizes calendars and contacts.
Please refer to https://vdirsyncer.pimutils.org/en/stable/packaging.html for
how to package vdirsyncer.
'''
from setuptools import Command, find_packages, setup
requirements = [
# https://github.com/mitsuhiko/click/issues/200
'click>=5.0',
'click-log>=0.1.3',
# https://github.com/pimutils/vdirsyncer/issues/478
'click-threading>=0.2',
# !=2.9.0: https://github.com/kennethreitz/requests/issues/2930
#
# >=2.4.1: https://github.com/shazow/urllib3/pull/444
# Without the above pull request, `verify=False` also disables fingerprint
# validation. This is *not* what we want, and it's not possible to
# replicate vdirsyncer's current behavior (verifying fingerprints without
# verifying against CAs) with older versions of urllib3.
'requests >=2.4.1, !=2.9.0',
# https://github.com/sigmavirus24/requests-toolbelt/pull/28
# And https://github.com/sigmavirus24/requests-toolbelt/issues/54
'requests_toolbelt >=0.4.0',
# https://github.com/untitaker/python-atomicwrites/commit/4d12f23227b6a944ab1d99c507a69fdbc7c9ed6d # noqa
'atomicwrites>=0.1.7'
]
class PrintRequirements(Command):
description = 'Prints minimal requirements'
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
for requirement in requirements:
print(requirement.replace(">", "=").replace(" ", ""))
setup(
# General metadata
name='vdirsyncer',
author='Markus Unterwaditzer',
author_email='markus@unterwaditzer.net',
url='https://github.com/pimutils/vdirsyncer',
description='Synchronize calendars and contacts',
license='MIT',
long_description=open('README.rst').read(),
# Runtime dependencies
install_requires=requirements,
# Optional dependencies
extras_require={
'remotestorage': ['requests-oauthlib'],
'google': ['requests-oauthlib'],
},
# Build dependencies
setup_requires=['setuptools_scm'],
# Other
packages=find_packages(exclude=['tests.*', 'tests']),
include_package_data=True,
cmdclass={
'minimal_requirements': PrintRequirements
},
use_scm_version={
'write_to': 'vdirsyncer/version.py'
},
entry_points={
'console_scripts': ['vdirsyncer = vdirsyncer.cli:main']
},
)
| mit | Python |
1805e9cc2e2aeb3770b03ebcb05ac189566fa165 | add setup.py | sotetsuk/memozo | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='memozo',
version="0.1.0",
description='Memotize objects to desk easily',
author='sotetsuk',
url='https://github.com/sotetsuk/memozo',
author_email='sotetsu.koyamada@gmail.com',
license='MIT',
install_requires=[],
packages=find_packages(),
classifiers=[
"Programming Language :: Python :: 3.5",
"License :: OSI Approved :: MIT License"
]
) | mit | Python | |
37dcd46678ca8f22640aa314b174fda78be5392d | Bump version number | snahor/wtforms-mongoengine,rozza/flask-mongoengine,snahor/wtforms-mongoengine,rochacbruno/flask-mongoengine,losintikfos/flask-mongoengine,gerasim13/flask-mongoengine-1,gerasim13/flask-mongoengine-1,rozza/flask-mongoengine,quokkaproject/flask-mongoengine,quokkaproject/flask-mongoengine,rochacbruno/flask-mongoengine,losintikfos/flask-mongoengine | setup.py | setup.py | """
Flask-MongoEngine
--------------
Flask support for MongoDB using MongoEngine.
Includes `WTForms`_ support.
Links
`````
* `development version
<https://github.com/sbook/flask-mongoengine/raw/master#egg=Flask-MongoEngine-dev>`_
"""
from setuptools import setup
setup(
name='Flask-MongoEngine',
version='0.1.3-dev',
url='https://github.com/sbook/flask-mongoengine',
license='BSD',
author='Ross Lawley',
author_email='ross.lawley@streetlife.com',
description='Flask support for MongoDB and with WTF model forms',
long_description=__doc__,
packages=['flaskext',
'flaskext.mongoengine',
'flaskext.mongoengine.wtf'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'mongoengine',
'flask-wtf'
],
include_package_data=True,
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| """
Flask-MongoEngine
--------------
Flask support for MongoDB using MongoEngine.
Includes `WTForms`_ support.
Links
`````
* `development version
<https://github.com/sbook/flask-mongoengine/raw/master#egg=Flask-MongoEngine-dev>`_
"""
from setuptools import setup
setup(
name='Flask-MongoEngine',
version='0.1.2-dev',
url='https://github.com/sbook/flask-mongoengine',
license='BSD',
author='Ross Lawley',
author_email='ross.lawley@streetlife.com',
description='Flask support for MongoDB and with WTF model forms',
long_description=__doc__,
packages=['flaskext',
'flaskext.mongoengine',
'flaskext.mongoengine.wtf'],
namespace_packages=['flaskext'],
test_suite='nose.collector',
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'mongoengine',
'flask-wtf'
],
include_package_data=True,
tests_require=[
'nose',
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| bsd-3-clause | Python |
dbe16470f596ee53ec18975afca4aa0605b9b1bc | Add setup.py. | elektito/pybtracker | setup.py | setup.py | #!/usr/bin/env python3
try:
from setuptools.core import setup
except ImportError:
from distutils.core import setup
from pip.req import parse_requirements
from pip.download import PipSession
import os
def get_file_path(name):
return os.path.abspath(os.path.join(
os.path.dirname(__file__),
name))
with open(get_file_path('version.py')) as f:
exec(f.read())
# read requirements from requirements.txt
requirements = parse_requirements(get_file_path('requirements.txt'),
session=PipSession())
requirements = [str(r.req) for r in requirements]
setup(
name = 'pybtracker',
py_modules = ['version'],
packages = ['pybtracker'],
install_requires = requirements,
version = __version__,
description = 'Simple asyncio-based UDP BitTorrent tracker, '
'with a simple client.',
author = 'Mostafa Razavi',
license = 'MIT',
author_email = 'mostafa@sepent.com',
url = 'https://github.com/elektito/pybtracker',
download_url = 'https://github.com/elektito/pybtracker/tarball/' + __version__,
keywords = ['bittorrent', 'torrent', 'tracker', 'asyncio', 'udp'],
classifiers = [
'Programming Language :: Python :: 3'
],
entry_points = {
'console_scripts': [
'pybtracker=pybtracker.server:main',
'pybtracker-client=pybtracker.client:main',
],
},
)
| mit | Python | |
d8dd594c2ce0defa430f5a7e873c87491c7aab47 | Allow hyperframe 5 | vladmunteanu/hyper-h2,vladmunteanu/hyper-h2,python-hyper/hyper-h2,python-hyper/hyper-h2,Kriechi/hyper-h2,Kriechi/hyper-h2 | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Get the version
version_regex = r'__version__ = ["\']([^"\']*)["\']'
with open('h2/__init__.py', 'r') as f:
text = f.read()
match = re.search(version_regex, text)
if match:
version = match.group(1)
else:
raise RuntimeError("No version number found!")
# Stealing this from Kenneth Reitz
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'h2',
]
readme = codecs.open('README.rst', encoding='utf-8').read()
history = codecs.open('HISTORY.rst', encoding='utf-8').read()
setup(
name='h2',
version=version,
description='HTTP/2 State-Machine based protocol implementation',
long_description=u'\n\n'.join([readme, history]),
author='Cory Benfield',
author_email='cory@lukasa.co.uk',
url='http://hyper.rtfd.org',
packages=packages,
package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst', 'NOTICES']},
package_dir={'h2': 'h2'},
include_package_data=True,
license='MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
install_requires=[
'hyperframe>=3.1, <6, !=4.0.0',
'hpack>=2.2, <3',
],
extras_require={
':python_version == "2.7" or python_version == "3.3"': ['enum34>=1.0.4, <2'],
}
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import os
import re
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
# Get the version
version_regex = r'__version__ = ["\']([^"\']*)["\']'
with open('h2/__init__.py', 'r') as f:
text = f.read()
match = re.search(version_regex, text)
if match:
version = match.group(1)
else:
raise RuntimeError("No version number found!")
# Stealing this from Kenneth Reitz
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
packages = [
'h2',
]
readme = codecs.open('README.rst', encoding='utf-8').read()
history = codecs.open('HISTORY.rst', encoding='utf-8').read()
setup(
name='h2',
version=version,
description='HTTP/2 State-Machine based protocol implementation',
long_description=u'\n\n'.join([readme, history]),
author='Cory Benfield',
author_email='cory@lukasa.co.uk',
url='http://hyper.rtfd.org',
packages=packages,
package_data={'': ['LICENSE', 'README.rst', 'CONTRIBUTORS.rst', 'HISTORY.rst', 'NOTICES']},
package_dir={'h2': 'h2'},
include_package_data=True,
license='MIT License',
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy',
],
install_requires=[
'hyperframe>=3.1, <5, !=4.0.0',
'hpack>=2.2, <3',
],
extras_require={
':python_version == "2.7" or python_version == "3.3"': ['enum34>=1.0.4, <2'],
}
)
| mit | Python |
5e98fe8b6ac368e29d739a2dfa690fdea01382ca | Bring back setup.py | mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea,mozilla/ichnaea | setup.py | setup.py | from codecs import open
import os
import os.path
from setuptools import find_packages, setup
here = os.path.relpath(os.path.abspath(os.path.dirname(__file__)))
with open(os.path.join(here, 'README.rst'), encoding='utf-8') as fd:
long_description = fd.read()
__version__ = '2.2.1'
setup(
name='ichnaea',
version=__version__,
description='Mozilla Location Service - Ichnaea',
long_description=long_description,
url='https://github.com/mozilla/ichnaea',
author='Mozilla',
license="Apache 2.0",
classifiers=[
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: Implementation :: CPython",
"Framework :: Pyramid",
"Topic :: Internet :: WWW/HTTP",
"Topic :: Internet :: WWW/HTTP :: WSGI :: Application"
],
keywords="web services geo location",
packages=find_packages(),
include_package_data=True,
zip_safe=False,
entry_points={
'console_scripts': [
'location_dump=ichnaea.scripts.dump:console_entry',
'location_map=ichnaea.scripts.datamap:console_entry',
'location_region_json=ichnaea.scripts.region_json:console_entry',
],
},
)
| apache-2.0 | Python | |
95c8e291fe1f89e1d880deff34c6e0aa98f6fdf2 | Package description for setuptools. | shrubberysoft/homophony | setup.py | setup.py | # Copyright (c) 2009 Shrubbery Software
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from setuptools import setup, find_packages
setup(name='homophony',
version='0.1',
description='Django and zc.testbrowser integration',
long_description=open('README').read(),
author='Shrubbery Software',
author_email='team@shrubberysoft.com',
url='http://github.com/shrubberysoft/homophony',
packages=find_packages('src'),
package_dir={'' : 'src'},
install_requires=['wsgi_intercept==0.4', 'zc.testbrowser']
)
| mit | Python | |
62439e5c8f174752cd5774c3f240b7cc8fa158ad | add packaging boilerplate | alexvalentine/mecode,jminardi/mecode,razeh/mecode,travisbusbee/mecode | setup.py | setup.py | from setuptools import setup, find_packages
INFO = {'name': 'mecode',
'version': '0.0.1',
}
setup(
name=INFO['name'],
version=INFO['version'],
author='Jack Minardi',
packages=find_packages(),
zip_safe=False,
maintainer='Jack Minardi',
maintainer_email='jack@minardi.org',
)
| mit | Python | |
f6b9a0c75a3b5b37b74856226c8210717def606f | Implement astroid trajectory | bit0001/trajectory_tracking,bit0001/trajectory_tracking | src/trajectory/astroid_trajectory.py | src/trajectory/astroid_trajectory.py | #!/usr/bin/env python
from math import cos, pi, sin
from .trajectory import Trajectory
class AstroidTrajectory(object, Trajectory):
def __init__(self, radius, period):
Trajectory.__init__(self)
self.radius = radius
self.period = period
def get_position_at(self, t):
super(AstroidTrajectory, self).get_position_at(t)
self.position.x = self.radius * cos(2 * pi * t / self.period) ** 3
self.position.y = self.radius * sin(2 * pi * t / self.period) ** 3
return self.position
| mit | Python | |
a9091a140ff158b79b05da5c20e18a0f1368d3d9 | add missing package init file | gonicus/gosa,gonicus/gosa,gonicus/gosa,gonicus/gosa | backend/src/gosa/backend/plugins/two_factor/filter/__init__.py | backend/src/gosa/backend/plugins/two_factor/filter/__init__.py | # This file is part of the GOsa project.
#
# http://gosa-project.org
#
# Copyright:
# (C) 2016 GONICUS GmbH, Germany, http://www.gonicus.de
#
# See the LICENSE file in the project's top-level directory for details.
__import__('pkg_resources').declare_namespace(__name__)
| lgpl-2.1 | Python | |
846ec085ff7bc1f5be250ac1ac2df87534bbc5c9 | add gunicorn.conf.py | linventifatelier/cartoterra,linventifatelier/cartoterra,linventifatelier/cartoterra | deploy/gunicorn.conf.py | deploy/gunicorn.conf.py | import os
def numCPUs():
if not hasattr(os, "sysconf"):
raise RuntimeError("No sysconf detected.")
return os.sysconf("SC_NPROCESSORS_ONLN")
workers = numCPUs() * 2 + 1
bind = "127.0.0.1:8000"
pidfile = "/tmp/gunicorn-demo.pid"
backlog = 2048
logfile = "/home/linventifatelier/earthbuilding/log/gunicorn_demo.log"
loglevel = "info"
timeout=60
| agpl-3.0 | Python | |
5574ddd6aaf6ab041bdb186b1455792bff803731 | Add range to ConditionFactory | WillisXChen/django-oscar,michaelkuty/django-oscar,WillisXChen/django-oscar,michaelkuty/django-oscar,spartonia/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,michaelkuty/django-oscar,django-oscar/django-oscar,spartonia/django-oscar,WillisXChen/django-oscar,django-oscar/django-oscar,sasha0/django-oscar,ka7eh/django-oscar,anentropic/django-oscar,john-parton/django-oscar,john-parton/django-oscar,ka7eh/django-oscar,django-oscar/django-oscar,anentropic/django-oscar,solarissmoke/django-oscar,WillisXChen/django-oscar,sasha0/django-oscar,okfish/django-oscar,WillisXChen/django-oscar,okfish/django-oscar,okfish/django-oscar,okfish/django-oscar,john-parton/django-oscar,ka7eh/django-oscar,michaelkuty/django-oscar,anentropic/django-oscar,anentropic/django-oscar,WillisXChen/django-oscar,sonofatailor/django-oscar,solarissmoke/django-oscar,sonofatailor/django-oscar,spartonia/django-oscar,sasha0/django-oscar,spartonia/django-oscar,solarissmoke/django-oscar,john-parton/django-oscar,ka7eh/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,sasha0/django-oscar | src/oscar/test/factories/offer.py | src/oscar/test/factories/offer.py | import factory
from oscar.core.loading import get_model
__all__ = [
'RangeFactory', 'ConditionFactory', 'BenefitFactory',
'ConditionalOfferFactory',
]
class RangeFactory(factory.DjangoModelFactory):
name = factory.Sequence(lambda n: 'Range %d' % n)
slug = factory.Sequence(lambda n: 'range-%d' % n)
class Meta:
model = get_model('offer', 'Range')
@factory.post_generation
def products(self, create, extracted, **kwargs):
if not create or not extracted:
return
RangeProduct = get_model('offer', 'RangeProduct')
for product in extracted:
RangeProduct.objects.create(product=product, range=self)
class BenefitFactory(factory.DjangoModelFactory):
type = get_model('offer', 'Benefit').PERCENTAGE
value = 10
max_affected_items = None
range = factory.SubFactory(RangeFactory)
class Meta:
model = get_model('offer', 'Benefit')
class ConditionFactory(factory.DjangoModelFactory):
type = get_model('offer', 'Condition').COUNT
value = 10
range = factory.SubFactory(RangeFactory)
class Meta:
model = get_model('offer', 'Condition')
class ConditionalOfferFactory(factory.DjangoModelFactory):
name = 'Test offer'
benefit = factory.SubFactory(BenefitFactory)
condition = factory.SubFactory(ConditionFactory)
class Meta:
model = get_model('offer', 'ConditionalOffer')
| import factory
from oscar.core.loading import get_model
__all__ = [
'RangeFactory', 'ConditionFactory', 'BenefitFactory',
'ConditionalOfferFactory',
]
class RangeFactory(factory.DjangoModelFactory):
name = factory.Sequence(lambda n: 'Range %d' % n)
slug = factory.Sequence(lambda n: 'range-%d' % n)
class Meta:
model = get_model('offer', 'Range')
@factory.post_generation
def products(self, create, extracted, **kwargs):
if not create or not extracted:
return
RangeProduct = get_model('offer', 'RangeProduct')
for product in extracted:
RangeProduct.objects.create(product=product, range=self)
class BenefitFactory(factory.DjangoModelFactory):
type = get_model('offer', 'Benefit').PERCENTAGE
value = 10
max_affected_items = None
range = factory.SubFactory(RangeFactory)
class Meta:
model = get_model('offer', 'Benefit')
class ConditionFactory(factory.DjangoModelFactory):
type = get_model('offer', 'Condition').COUNT
value = 10
class Meta:
model = get_model('offer', 'Condition')
class ConditionalOfferFactory(factory.DjangoModelFactory):
name = 'Test offer'
benefit = factory.SubFactory(BenefitFactory)
condition = factory.SubFactory(ConditionFactory)
class Meta:
model = get_model('offer', 'ConditionalOffer')
| bsd-3-clause | Python |
710150538efcb56f2b8e968c58707ce2bbe8431b | add script to fill in ltr termini | glennhickey/teHmm,glennhickey/teHmm | bin/fillTermini.py | bin/fillTermini.py | #!/usr/bin/env python
#Copyright (C) 2014 by Glenn Hickey
#
#Released under the MIT license, see LICENSE.txt
import sys
import os
import argparse
import copy
from pybedtools import BedTool, Interval
"""
Stick a bed interval between pairs of lastz termini. Script written to be used
in conjunction with tsdFinder.py:
lastz termini -> fill termini -> bed input (which gets merged up automatically)
for tsdFinder.py. Example:
scaffold_1 141 225 1+ 43 +
scaffold_1 4479 4563 1+ 43 +
becomes
scaffold_1 141 225 1+ 43 +
scaffold_1 225 4479 1+ 43 +
scaffold_1 4479 4563 1+ 43 +
"""
def main(argv=None):
if argv is None:
argv = sys.argv
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Add interval between pairs of candidate termini. Input "
"bed must have pairs of termini (left first) in contiguous rows.")
parser.add_argument("inBed", help="bed with ltr results to process")
parser.add_argument("outBed", help="bed to write output to.")
args = parser.parse_args()
assert os.path.exists(args.inBed)
outFile = open(args.outBed, "w")
prevInterval = None
for interval in BedTool(args.inBed):
# Right termini
if prevInterval is not None:
if interval.name != prevInterval.name:
raise RuntimeError("Consecutive intervals dont have same id"
"\n%s%s" % (prevInterval, interval))
# make the new interval, dont bother giving a new name for now
fillInterval = copy.deepcopy(prevInterval)
fillInterval.start = prevInterval.end
fillInterval.end = interval.start
outFile.write(str(prevInterval))
outFile.write(str(fillInterval))
outFile.write(str(interval))
prevInterval = None
# Left termini
else:
prevInterval = interval
outFile.close()
if __name__ == "__main__":
sys.exit(main())
| mit | Python | |
eb8354c67e20aca10e0118efcbd8afb98bbb7dde | add missing kde.py | BIOS-IMASL/bomeba0,bomeba/bomeba0 | bomeba0/visualization/kde.py | bomeba0/visualization/kde.py | """One-dimensional kernel density estimate plots."""
import numpy as np
import matplotlib.pyplot as plt
from scipy.signal import gaussian, convolve, convolve2d # pylint: disable=no-name-in-module
from scipy.sparse import coo_matrix
from scipy.stats import entropy
def plot_kde(values, values2=None, contour=True, ax=None):
"""2D KDE plot taking into account boundary conditions.
The code was adapted from arviz library
Parameters
----------
values : array-like
Values to plot
values2 : array-like, optional
Values to plot. If present, a 2D KDE will be estimated
contour : bool
If True plot the 2D KDE using contours, otherwise plot a smooth 2D KDE. Defaults to True.
ax : matplotlib axes
Returns
-------
ax : matplotlib axes
"""
if ax is None:
ax = plt.gca()
else:
gridsize = (128, 128) if contour else (256, 256)
density, xmin, xmax, ymin, ymax = _fast_kde_2d(values, values2, gridsize=gridsize)
g_s = complex(gridsize[0])
x_x, y_y = np.mgrid[xmin:xmax:g_s, ymin:ymax:g_s]
ax.grid(False)
if contour:
qcfs = ax.contourf(x_x, y_y, density, antialiased=True)
qcfs.collections[0].set_alpha(0)
else:
ax.pcolormesh(x_x, y_y, density)
return ax
def _fast_kde_2d(x, y, gridsize=(128, 128), circular=True):
"""
2D fft-based Gaussian kernel density estimate (KDE).
The code was adapted from https://github.com/mfouesneau/faststats
Parameters
----------
x : Numpy array or list
y : Numpy array or list
gridsize : tuple
Number of points used to discretize data. Use powers of 2 for fft optimization
circular: bool
If True, use circular boundaries. Defaults to False
Returns
-------
grid: A gridded 2D KDE of the input points (x, y)
xmin: minimum value of x
xmax: maximum value of x
ymin: minimum value of y
ymax: maximum value of y
"""
x = np.asarray(x, dtype=float)
x = x[np.isfinite(x)]
y = np.asarray(y, dtype=float)
y = y[np.isfinite(y)]
xmin, xmax = x.min(), x.max()
ymin, ymax = y.min(), y.max()
len_x = len(x)
weights = np.ones(len_x)
n_x, n_y = gridsize
d_x = (xmax - xmin) / (n_x - 1)
d_y = (ymax - ymin) / (n_y - 1)
xyi = np.vstack((x, y)).T
xyi -= [xmin, ymin]
xyi /= [d_x, d_y]
xyi = np.floor(xyi, xyi).T
scotts_factor = len_x ** (-1 / 6)
cov = np.cov(xyi)
std_devs = np.diag(cov ** 0.5)
kern_nx, kern_ny = np.round(scotts_factor * 2 * np.pi * std_devs)
inv_cov = np.linalg.inv(cov * scotts_factor ** 2)
x_x = np.arange(kern_nx) - kern_nx / 2
y_y = np.arange(kern_ny) - kern_ny / 2
x_x, y_y = np.meshgrid(x_x, y_y)
kernel = np.vstack((x_x.flatten(), y_y.flatten()))
kernel = np.dot(inv_cov, kernel) * kernel
kernel = np.exp(-kernel.sum(axis=0) / 2)
kernel = kernel.reshape((int(kern_ny), int(kern_nx)))
boundary = "wrap" if circular else "symm"
grid = coo_matrix((weights, xyi), shape=(n_x, n_y)).toarray()
grid = convolve2d(grid, kernel, mode="same", boundary=boundary)
norm_factor = np.linalg.det(2 * np.pi * cov * scotts_factor ** 2)
norm_factor = len_x * d_x * d_y * norm_factor ** 0.5
grid /= norm_factor
return grid, xmin, xmax, ymin, ymax
| apache-2.0 | Python | |
418a2104ec20a0b8f651320c1cbea10a533dc44d | Add tools/sys-tools/gdb/cmd_example.py | benquike/cheatsheets,benquike/cheatsheets,benquike/cheatsheets,benquike/cheatsheets,benquike/cheatsheets,benquike/cheatsheets | tools/sys-tools/gdb/cmd_example.py | tools/sys-tools/gdb/cmd_example.py | import gdb
class SavePrefixCommand (gdb.Command):
'''
Save the current breakpoints to a file.
This command takes a single argument, a file name.
The breakpoints can be restored using the 'source' command.
'''
def __init__(self):
super(SavePrefixCommand, self).__init__ ("save breakpoints",
gdb.COMMAND_SUPPORT,
gdb.COMPLETE_FILENAME)
def invoke (self, arg, from_tty):
with open (arg, 'w') as f:
for bp in gdb.get_breakpoints ():
print >> f, "break", bp.get_location (),
if bp.get_thread () is not None:
print >> f, " thread", bp.get_thread (),
if bp.get_condition () is not None:
print >> f, " if", bp.get_condition (),
print >> f
if not bp.is_enabled ():
print >> f, "disable $bpnum"
# Note: we don't save the ignore count; there doesn't
# seem to be much point.
commands = bp.get_commands ()
if commands is not None:
print >> f, "commands"
# Note that COMMANDS has a trailing newline.
print >> f, commands,
print >> f, "end"
print >> f
SavePrefixCommand()
| cc0-1.0 | Python | |
61677566ce685379456e7853c69a78ea32353422 | Add auto fixture to make sure that output dir does not exists when tests are run | liumengjun/django-static-precompiler,liumengjun/django-static-precompiler,paera/django-static-precompiler,jaheba/django-static-precompiler,paera/django-static-precompiler,jaheba/django-static-precompiler,paera/django-static-precompiler,jaheba/django-static-precompiler,paera/django-static-precompiler,jaheba/django-static-precompiler,liumengjun/django-static-precompiler,liumengjun/django-static-precompiler,liumengjun/django-static-precompiler | static_precompiler/tests/conftest.py | static_precompiler/tests/conftest.py | from static_precompiler.settings import ROOT, OUTPUT_DIR
import shutil
import os
import pytest
@pytest.fixture(autouse=True)
def _no_output_dir(request):
""" Make sure that output dir does not exists. """
path = os.path.join(ROOT, OUTPUT_DIR)
if os.path.exists(path):
shutil.rmtree(path)
def fin():
if os.path.exists(path):
shutil.rmtree(path)
request.addfinalizer(fin)
| mit | Python | |
6a6a76ff5274b01ae8570ae7b1a4153b8705100f | move worker func | adrn/StreamMorphology,adrn/StreamMorphology,adrn/StreamMorphology | streammorphology/freqmap/mpi_util.py | streammorphology/freqmap/mpi_util.py | # coding: utf-8
""" Utilities for running frequency mapping with MPI (map) """
from __future__ import division, print_function
__author__ = "adrn <adrn@astro.columbia.edu>"
# Standard library
import os
from collections import OrderedDict
# Third-party
import numpy as np
from astropy.utils import isiterable
# Project
# ...
| mit | Python | |
422b9458d26866b9f6692ddb0ccf2305c3ac6ea7 | Add an extra file to the surrogates experiments. | negrinho/deep_architect,negrinho/deep_architect | dev/surrogates/plots.py | dev/surrogates/plots.py | import darch.search_logging as sl
import darch.visualization as vi
import numpy as np
import seaborn as sns; sns.set()
# checking these across time.
log_lst = sl.read_search_folder('./logs/cifar10_medium/run-0')
xkey = 'epoch_number'
ykey = 'validation_accuracy'
num_lines = 8
time_plotter = vi.LinePlot(xlabel='time_in_minutes', ylabel=ykey)
epoch_plotter = vi.LinePlot(xlabel=xkey, ylabel=ykey)
for lg in sorted(log_lst, key=lambda x: x['results']['sequences'][ykey][-1], reverse=True)[:num_lines]:
r = lg['results']['sequences']
time_plotter.add_line(np.linspace(0.0, 120.0, len(r[xkey]) + 1)[1:], r[ykey])
epoch_plotter.add_line(r[xkey], r[ykey])
time_plotter.plot()
epoch_plotter.plot()
| mit | Python | |
2bcf71638dba9a252378c251bbd32e6f72f74028 | Add integration test | resmo/cloudstack,jcshen007/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,resmo/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,wido/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,wido/cloudstack,GabrielBrascher/cloudstack,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,DaanHoogland/cloudstack,mufaddalq/cloudstack-datera-driver,jcshen007/cloudstack,jcshen007/cloudstack,jcshen007/cloudstack,GabrielBrascher/cloudstack,mufaddalq/cloudstack-datera-driver,resmo/cloudstack,resmo/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack,wido/cloudstack,wido/cloudstack,wido/cloudstack,DaanHoogland/cloudstack,GabrielBrascher/cloudstack | test/integration/smoke/test_pvlan.py | test/integration/smoke/test_pvlan.py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
""" test for private vlan isolation
"""
#Import Local Modules
import marvin
from marvin.cloudstackTestCase import *
from marvin.cloudstackAPI import *
from marvin import remoteSSHClient
from marvin.integration.lib.utils import *
from marvin.integration.lib.base import *
from marvin.integration.lib.common import *
from nose.plugins.attrib import attr
import telnetlib
#Import System modules
import time
_multiprocess_shared_ = True
class TestPVLAN(cloudstackTestCase):
zoneId = 1
networkOfferingId = 7
vlan = 1234
isolatedpvlan = 567
def setUp(self):
self.apiClient = self.testClient.getApiClient()
def test_create_pvlan_network(self):
self.debug("Test create pvlan network")
createNetworkCmd = createNetwork.createNetworkCmd()
createNetworkCmd.name = "pvlan network"
createNetworkCmd.displaytext = "pvlan network"
createNetworkCmd.netmask = "255.255.255.0"
createNetworkCmd.gateway = "10.10.10.1"
createNetworkCmd.startip = "10.10.10.10"
createNetworkCmd.gateway = "10.10.10.20"
createNetworkCmd.vlan = "1234"
createNetworkCmd.isolatedpvlan = "567"
createNetworkCmd.zoneid = self.zoneId
createNetworkCmd.networkofferingid = self.networkOfferingId
createNetworkResponse = self.apiClient.createNetwork(createNetworkCmd)
self.networkId = createNetworkResponse.id
self.broadcasttype = createNetworkResponse.broadcastdomaintype
self.broadcasturi = createNetworkResponse.broadcasturi
self.assertIsNotNone(createNetworkResponse.id, "Network failed to create")
self.assertTrue(createNetworkResponse.broadcastdomaintype, "Pvlan")
self.assertTrue(createNetworkResponse.broadcasturi, "pvlan://1234-i567")
self.debug("Clean up test pvlan network")
deleteNetworkCmd = deleteNetwork.deleteNetworkCmd()
deleteNetworkCmd.id = self.networkId;
self.apiClient.deleteNetwork(deleteNetworkCmd)
#Test invalid parameter
# CLOUDSTACK-2392: Should not allow create pvlan with ipv6
createNetworkCmd.ip6gateway="fc00:1234::1"
createNetworkCmd.ip6cidr="fc00:1234::/64"
createNetworkCmd.startipv6="fc00:1234::10"
createNetworkCmd.endipv6="fc00:1234::20"
err = 0;
try:
createNetworkResponse = self.apiClient.createNetwork(createNetworkCmd)
except Exception as e:
err = 1;
self.debug("Try alloc with ipv6, got:%s" % e)
self.assertEqual(err, 1, "Shouldn't allow create PVLAN network with IPv6");
| apache-2.0 | Python | |
0e2c7c255043d7ccc5042a8e4905557f2d43e2ca | Add Mandrill backend | hator/django-templated-email,hator/django-templated-email | templated_email/backends/mandrill.py | templated_email/backends/mandrill.py | import vanilla_django
from django.core.mail import EmailMessage
from django.conf import settings
from django.utils.translation import ugettext as _
# Make sure you have Mandrill as your email backend
class TemplateBackend(vanilla_django.TemplateBackend):
def __init__(self, *args, **kwargs):
vanilla_django.TemplateBackend.__init__(self, *args, **kwargs)
def send(self, template_name, from_email, recipient_list, context, cc=None,
bcc=None, fail_silently=False, headers=None, template_prefix=None,
template_suffix=None, template_dir=None, file_extension=None,
**kwargs):
msg = EmailMessage(from_email=from_email, to=recipient_list)
msg.template_name = template_name
msg.global_merge_vars = context
if cc:
msg.cc = cc
if bcc:
msg.bcc = bcc
msg.use_template_subject = kwargs.get('use_template_subject', True)
msg.use_template_from = kwargs.get('use_template_from', True)
msg.async = kwargs.get('async', True)
msg.send()
return msg.mandrill_response
| mit | Python | |
8da12f8f269746b086f5d208afb390e304f12e9b | Add stub for submit_glue.py | HazyResearch/metal,HazyResearch/metal | metal/mmtl/submit_glue.py | metal/mmtl/submit_glue.py | import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser(
description="Evaluate a model and make glue submission bundle", add_help=False
)
parser.add_argument("-mf", "--model-file")
args = parser.parse_args()
| apache-2.0 | Python | |
e1f8358e09cd3299effc112544206361d5a6c7dd | Add migration for latest-ing | thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee,thrive-refugee/thrive-refugee | refugee_manager/migrations/0003_auto_20141108_1609.py | refugee_manager/migrations/0003_auto_20141108_1609.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('refugee_manager', '0002_volunteer_mailing_address'),
]
operations = [
migrations.AlterModelOptions(
name='assessment',
options={'get_latest_by': 'date'},
),
]
| mit | Python | |
660e0955979b7d11b7442a00747673700413bf1d | Add a test of spline filtering vs. matrix solving. | mdhaber/scipy,rgommers/scipy,andyfaff/scipy,lhilt/scipy,mdhaber/scipy,jamestwebber/scipy,grlee77/scipy,jamestwebber/scipy,aeklant/scipy,perimosocordiae/scipy,perimosocordiae/scipy,person142/scipy,zerothi/scipy,WarrenWeckesser/scipy,anntzer/scipy,perimosocordiae/scipy,Stefan-Endres/scipy,andyfaff/scipy,vigna/scipy,jor-/scipy,tylerjereddy/scipy,lhilt/scipy,person142/scipy,Eric89GXL/scipy,andyfaff/scipy,lhilt/scipy,Stefan-Endres/scipy,endolith/scipy,ilayn/scipy,arokem/scipy,e-q/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,anntzer/scipy,scipy/scipy,Eric89GXL/scipy,e-q/scipy,arokem/scipy,mdhaber/scipy,nmayorov/scipy,person142/scipy,zerothi/scipy,tylerjereddy/scipy,aarchiba/scipy,matthew-brett/scipy,matthew-brett/scipy,ilayn/scipy,aeklant/scipy,perimosocordiae/scipy,anntzer/scipy,WarrenWeckesser/scipy,jor-/scipy,zerothi/scipy,andyfaff/scipy,jamestwebber/scipy,WarrenWeckesser/scipy,aarchiba/scipy,vigna/scipy,pizzathief/scipy,rgommers/scipy,anntzer/scipy,WarrenWeckesser/scipy,aarchiba/scipy,gfyoung/scipy,rgommers/scipy,ilayn/scipy,pizzathief/scipy,scipy/scipy,pizzathief/scipy,zerothi/scipy,anntzer/scipy,andyfaff/scipy,aarchiba/scipy,lhilt/scipy,e-q/scipy,Stefan-Endres/scipy,nmayorov/scipy,tylerjereddy/scipy,aarchiba/scipy,endolith/scipy,jor-/scipy,nmayorov/scipy,scipy/scipy,vigna/scipy,gfyoung/scipy,tylerjereddy/scipy,arokem/scipy,mdhaber/scipy,matthew-brett/scipy,gertingold/scipy,person142/scipy,zerothi/scipy,matthew-brett/scipy,arokem/scipy,jamestwebber/scipy,scipy/scipy,vigna/scipy,scipy/scipy,rgommers/scipy,jamestwebber/scipy,aeklant/scipy,e-q/scipy,e-q/scipy,mdhaber/scipy,jor-/scipy,gertingold/scipy,aeklant/scipy,gertingold/scipy,zerothi/scipy,gfyoung/scipy,Stefan-Endres/scipy,rgommers/scipy,anntzer/scipy,aeklant/scipy,WarrenWeckesser/scipy,gfyoung/scipy,vigna/scipy,nmayorov/scipy,pizzathief/scipy,gfyoung/scipy,perimosocordiae/scipy,Eric89GXL/scipy,grlee77/scipy,mdhaber/scipy,matthew-brett/scipy,lhilt/scipy,grlee77/scipy,perimosocordiae/scipy,endolith/scipy,gertingold/scipy,ilayn/scipy,endolith/scipy,Stefan-Endres/scipy,gertingold/scipy,jor-/scipy,person142/scipy,andyfaff/scipy,endolith/scipy,pizzathief/scipy,grlee77/scipy,ilayn/scipy,Eric89GXL/scipy,arokem/scipy,nmayorov/scipy,grlee77/scipy,tylerjereddy/scipy,scipy/scipy,Eric89GXL/scipy,ilayn/scipy,endolith/scipy,Eric89GXL/scipy | scipy/ndimage/tests/test_splines.py | scipy/ndimage/tests/test_splines.py | """Tests for spline filtering."""
from __future__ import division, print_function, absolute_import
import numpy as np
import pytest
from numpy.testing import assert_almost_equal
from scipy import ndimage
def get_spline_knot_values(order):
"""Knot values to the right of a B-spline's center."""
knot_values = {0: [1],
1: [1],
2: [6, 1],
3: [4, 1],
4: [230, 76, 1],
5: [66, 26, 1]}
return knot_values[order]
def make_spline_knot_matrix(n, order, mode='mirror'):
"""Matrix to invert to find the spline coefficients."""
knot_values = get_spline_knot_values(order)
matrix = np.zeros((n, n))
for diag, knot_value in enumerate(knot_values):
indices = np.arange(diag, n)
if diag == 0:
matrix[indices, indices] = knot_value
else:
matrix[indices, indices - diag] = knot_value
matrix[indices - diag, indices] = knot_value
knot_values_sum = knot_values[0] + 2 * sum(knot_values[1:])
if mode == 'mirror':
start, step = 1, 1
elif mode == 'reflect':
start, step = 0, 1
elif mode == 'wrap':
start, step = -1, -1
else:
raise ValueError('unsupported mode {}'.format(mode))
for row in range(len(knot_values) - 1):
for idx, knot_value in enumerate(knot_values[row + 1:]):
matrix[row, start + step*idx] += knot_value
matrix[-row - 1, -start - 1 - step*idx] += knot_value
return matrix / knot_values_sum
@pytest.mark.parametrize('order', [0, 1, 2, 3, 4, 5])
def test_spline_filter_vs_matrix_solution(order):
n = 100
eye = np.eye(n, dtype=float)
spline_filter_axis_0 = ndimage.spline_filter1d(eye, axis=0, order=order)
spline_filter_axis_1 = ndimage.spline_filter1d(eye, axis=1, order=order)
matrix = make_spline_knot_matrix(n, order)
assert_almost_equal(eye, np.dot(spline_filter_axis_0, matrix))
assert_almost_equal(eye, np.dot(spline_filter_axis_1, matrix.T))
| bsd-3-clause | Python | |
da0794fd63960fa8a836e27ab540cd9f8b8713d7 | Test program for issue #81 | WojciechMula/pyahocorasick,pombredanne/pyahocorasick,WojciechMula/pyahocorasick,pombredanne/pyahocorasick,pombredanne/pyahocorasick,pombredanne/pyahocorasick,WojciechMula/pyahocorasick,WojciechMula/pyahocorasick | unresolved_bugs/bug_81.py | unresolved_bugs/bug_81.py | # -*- coding: utf-8 -*-
"""
Aho-Corasick string search algorithm.
Author : Wojciech Muła, wojciech_mula@poczta.onet.pl
WWW : http://0x80.pl
License : public domain
"""
import os
import sys
import ahocorasick
try:
range = xrange # for Py2
except NameError:
pass
def get_memory_usage():
# Linux only
pid = os.getpid()
lines = []
try:
with open('/proc/%d/status' % pid, 'rt') as f:
lines = f.readlines()
except:
pass
for line in lines:
if line.startswith('VmSize'):
return float(line.split()[1])
return 0
def test():
with open('README.rst', 'r') as f:
data = f.read().split()
ac = ahocorasick.Automaton()
for i, word in enumerate(data):
ac.add_word(word, i)
ac.make_automaton()
for i in range(1024):
s = list(ac.keys())
if __name__ == '__main__':
before = get_memory_usage()
test()
after = get_memory_usage()
print("Memory's usage growth: %s (before = %s, after = %s)" % (after - before, before, after))
assert(before == after)
| bsd-3-clause | Python | |
1d4137d43f1b91400d1347d131f56b1698163337 | Create rockpaper.py | jovian34/j34rockpaper | rockpaper.py | rockpaper.py | import players
def showdown(human_choice, computer_guess, h_obj, c_obj):
if (human_choice, computer_guess) == (1, 3):
print('Rock crushes scissors. Human Wins!')
print('-------------------------------------')
h_obj.add_to_score()
elif (human_choice, computer_guess) == (1, 2):
print('Paper smothers rock. Computer Wins!')
print('-------------------------------------')
c_obj.add_to_score()
elif (human_choice, computer_guess) == (2, 1):
print('Paper smothers rock. Human Wins!')
print('-------------------------------------')
h_obj.add_to_score()
elif (human_choice, computer_guess) == (2, 3):
print('Scissors cut paper. Computer Wins!')
print('-------------------------------------')
c_obj.add_to_score()
elif (human_choice, computer_guess) == (3, 2):
print('Scissors cut paper. Human Wins!')
print('-------------------------------------')
h_obj.add_to_score()
elif (human_choice, computer_guess) == (3, 1):
print('Rock crushes scissors. Computer Wins!')
print('-------------------------------------')
c_obj.add_to_score()
elif (human_choice, computer_guess) == (1, 1):
print('Were all getting our Rocks on today!')
print('------------------------------------')
elif (human_choice, computer_guess) == (2, 2):
print('Paper is on sale at Office Depot!!!!')
print('------------------------------------')
elif (human_choice, computer_guess) == (3, 3):
print('Scissors! Dead Again is a great movie!')
print('--------------------------------------')
else:
print('Hmmm... something is wrong with the code.')
def main():
human = players.Human()
computer = players.Computer()
while human.score < 10 and computer.score < 10:
print('Human: {} ... '
'Computer: {}'.format(human.score, computer.score))
print('====================================================')
showdown(human.user_output(),computer.get_random(), human, computer)
if human.score > computer.score:
print('Human wins by score '
'of {} to {}'.format(human.score, computer.score))
else:
print('Computer wins by score '
'of {} to {}'.format(computer.score, human.score))
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
48fc7cad7eb4cec0b928aba3daca7e934d46d87c | Add unit tests for sdnvpn | opnfv/functest,mywulin/functest,mywulin/functest,opnfv/functest | functest/tests/unit/features/test_sdnvpn.py | functest/tests/unit/features/test_sdnvpn.py | #!/usr/bin/env python
# Copyright (c) 2017 Orange and others.
#
# All rights reserved. This program and the accompanying materials
# are made available under the terms of the Apache License, Version 2.0
# which accompanies this distribution, and is available at
# http://www.apache.org/licenses/LICENSE-2.0
# pylint: disable=missing-docstring
import logging
import unittest
from functest.opnfv_tests.features import sdnvpn
from functest.utils import constants
class SdnVpnTesting(unittest.TestCase):
logging.disable(logging.CRITICAL)
def setUp(self):
self.sdnvpn = sdnvpn.SdnVpnTests()
def test_init(self):
self.assertEqual(self.sdnvpn.project_name, "sdnvpn")
self.assertEqual(self.sdnvpn.case_name, "bgpvpn")
self.assertEqual(
self.sdnvpn.repo,
constants.CONST.__getattribute__("dir_repo_sdnvpn"))
self.assertEqual(
self.sdnvpn.cmd,
'cd {}/sdnvpn/test/functest && python ./run_tests.py'.format(
self.sdnvpn.repo))
if __name__ == "__main__":
unittest.main(verbosity=2)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.