commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
a4b242ebd107f9321cc5b87aee2cf608940007f4 | Make permission name more consistent. | borderitsolutions/amadaa,borderitsolutions/amadaa,borderitsolutions/amadaa | product/migrations/0005_auto_20161015_1536.py | product/migrations/0005_auto_20161015_1536.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2016-10-15 15:36
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('product', '0004_auto_20161015_1534'),
]
operations = [
migrations.AlterModelOptions(
name='productcategory',
options={'permissions': (('manage_productcategories', 'Manage product categories'),)},
),
]
| mit | Python | |
b0a4f510ed343825a8073a68c4dc0e3066b560ec | add example canICA | abenicho/isvr | nilearn/example_canICA.py | nilearn/example_canICA.py | # -*- coding: utf-8 -*-
from nilearn import datasets
dataset = datasets.fetch_adhd()
func_files = dataset.func # The list of 4D nifti files for each subject
### Apply CanICA ##############################################################
from nilearn.decomposition.canica import CanICA
n_components = 20
canica = CanICA(n_components=n_components, smoothing_fwhm=6.,
memory="nilearn_cache", memory_level=5,
threshold=3., verbose=10, random_state=0)
canica.fit(func_files)
# Retrieve the independent components in brain space
components_img = canica.masker_.inverse_transform(canica.components_)
# components_img is a Nifti Image object, and can be saved to a file with
# the following line:
components_img.to_filename('canica_resting_state.nii.gz')
### Visualize the results #####################################################
# Show some interesting components
import nibabel
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map
for i in range(n_components):
plot_stat_map(nibabel.Nifti1Image(components_img.get_data()[..., i],
components_img.get_affine()),
display_mode="z", title="IC %d"%i, cut_coords=1,
colorbar=False)
plt.show()
| bsd-3-clause | Python | |
25c2502fce4556b5b72e96116745c83d1689677f | Add tests for artist serializers | FreeMusicNinja/api.freemusic.ninja | artists/tests/test_serializers.py | artists/tests/test_serializers.py | from unittest import TestCase
from ..models import Artist, Hyperlink
from ..serializers import ArtistSerializer, HyperlinkSerializer
class HyperlinkSerializerTest(TestCase):
"""Tests for Hyperlink serializer."""
def test_valid_fields(self):
id_ = 4
name = 'jamendo'
display_name = "Jamendo"
url = "http://www.jamendo.com/artist/1333"
link = Hyperlink(id=id_, name=name, url=url)
serializer = HyperlinkSerializer(link)
self.assertEqual(serializer.data, {
'id': id_,
'name': name,
'display_name': display_name,
'url': url,
})
class ArtistSerializerTest(TestCase):
"""Tests for Artist serializer."""
def test_no_links(self):
id_ = 2
name = "Brad Sucks"
artist = Artist(id=id_, name=name)
serializer = ArtistSerializer(artist)
self.assertEqual(serializer.data, {
'id': id_,
'name': name,
'links': [],
})
| bsd-3-clause | Python | |
5e2cb194b174b8e9b99777d125f1fdaaf0eddace | add config handling, #25 | openego/eDisGo,openego/eDisGo | edisgo/tools/config.py | edisgo/tools/config.py | """This file is part of eDisGo, a python package for distribution grid
analysis and optimization.
It is developed in the project open_eGo: https://openegoproject.wordpress.com
eDisGo lives at github: https://github.com/openego/edisgo/
The documentation is available on RTD: http://edisgo.readthedocs.io
Based on code by oemof development team
This module provides a highlevel layer for reading and writing config files.
There must be a file called "config.ini" in the root-folder of the project.
The file has to be of the following structure to be imported correctly.
# this is a comment \n
# the filestructure is like: \n
\n
[netCDF] \n
RootFolder = c://netCDF \n
FilePrefix = cd2_ \n
\n
[mySQL] \n
host = localhost \n
user = guest \n
password = root \n
database = znes \n
\n
[SectionName] \n
OptionName = value \n
Option2 = value2 \n
"""
__copyright__ = "Reiner Lemoine Institut gGmbH"
__license__ = "GNU Affero General Public License Version 3 (AGPL-3.0)"
__url__ = "https://github.com/openego/edisgo/blob/master/LICENSE"
__author__ = "nesnoj, gplssm"
import os
from glob import glob
import shutil
import edisgo
import logging
logger = logging.getLogger('edisgo')
try:
import configparser as cp
except:
# to be compatible with Python2.7
import ConfigParser as cp
cfg = cp.RawConfigParser()
_loaded = False
# load config dirs
package_path = edisgo.__path__[0]
internal_config_file = os.path.join(package_path, 'config', 'config_internal')
try:
cfg.read(internal_config_file)
except:
logger.exception('Internal config {} file not found.'.format(internal_config_file))
def load_config(filename):
config_dir = get('dirs', 'config_dir')
config_file = os.path.join(extend_root_path(config_dir), filename)
# config file does not exist -> copy default
if not os.path.isfile(config_file):
logger.info('Config file {} not found, I will create a default version'
.format(config_file))
shutil.copy(os.path.join(os.path.join(package_path, 'config'),
filename.replace('.cfg', '_default.cfg')),
config_file)
cfg.read(config_file)
global _loaded
_loaded = True
def get(section, key):
"""
returns the value of a given key of a given section of the main
config file.
:param section: the section.
:type section: str.
:param key: the key.
:type key: str.
:returns: the value which will be casted to float, int or boolean.
if no cast is successfull, the raw string will be returned.
"""
if not _loaded:
pass
try:
return cfg.getfloat(section, key)
except Exception:
try:
return cfg.getint(section, key)
except:
try:
return cfg.getboolean(section, key)
except:
return cfg.get(section, key)
def get_root_path():
"""Returns the basic edisgo path and creates it if necessary.
"""
root_dir = get('dirs', 'root_dir')
root_path = os.path.join(os.path.expanduser('~'), root_dir)
# root dir does not exist
if not os.path.isdir(root_path):
# create it
logger.info('eDisGo root path {} not found, I will create it.'
.format(root_path))
os.mkdir(root_path)
# copy default config files
config_dir = get('dirs', 'config_dir')
config_path = extend_root_path(config_dir)
logger.info('I will create a default set of config files in {}'
.format(config_path))
internal_config_dir = os.path.join(package_path, 'config')
for file in glob(os.path.join(internal_config_dir, '*.cfg')):
shutil.copy(file,
os.path.join(config_path,
os.path.basename(file)
.replace('_default', '')))
return root_path
def extend_root_path(subdir):
"""Returns a path based on the basic edisgo path and creates it if
necessary. The subfolder is the name of the path extension.
"""
extended_path = os.path.join(get_root_path(), subdir)
if not os.path.isdir(extended_path):
os.mkdir(extended_path)
logger.info('Path {} not found, I will create it.'
.format(extended_path))
return extended_path
| agpl-3.0 | Python | |
79563ccb72b50ad9b0a7cf037ad46efc98a1f79b | Create call.py | gappleto97/Senior-Project | common/call.py | common/call.py | def call(mod,cmd,*args,**kargs):
"""Calls arbitrary python code
Arguments:
mod - The module from which you are calling
cmd - The command in said module
*args - Any arguments you need to give to it
index=0 - A specific index at which to return
end=0 - An end range from which to return
Use case:
if you don't know what command you need to run at compile time
"""
if mod == "__builtins__":
m = __builtins__
else:
m = __import__(mod)
func = getattr(m,cmd)
if args:
r = func(*args)
elif type(func) != type(open) and type(func) != type(call):
r = func
else:
r = func()
index = kargs.get('index')
end = kargs.get('end')
if end is not None and index is not None:
return r[index:end]
elif index is not None:
return r[index]
else:
return r
def process(tup):
"""Convert tuples into a format readable by call.call"""
args = []
ix=None
ex=None
for item in tup[0]:
if type(item) == type("index="):
if item[:6] == "index=":
ix = int(item[6:])
elif item[:4] == "end=":
ex = int(item[4:])
else:
args.append(item)
else:
args.append(item)
args = tuple(args)
a = call(*args,index=ix,end=ex)
return a == tup[1]
def parse(d):
"""Checks a dict keyed by the related python calls to see if they are the expected value
Dict format:
Key:
tuple:
[0] - module from which the command is called (or "__builtins__")
[1] - command which you are calling
[*] - "index=x", where x is the index you wish
[*] - "end=x", where x is the end of the range you wish returned
[*] - all other arguments in the order the command is supposed to receive it
keyed arguments are not supported
Value:
The expected return value
"""
from multiprocessing import Pool
p = []
for item in d:
p.append((item,d[item]))
r = Pool().map(process,p)
for res in r:
if not res:
return False
return True
| mit | Python | |
686da2bf6b71961ea82e72640b7e6ff16c4723d7 | Add bubblesort example. Have problems with type inference. | seibert/numba,jriehl/numba,pombredanne/numba,sklam/numba,seibert/numba,GaZ3ll3/numba,GaZ3ll3/numba,seibert/numba,sklam/numba,seibert/numba,IntelLabs/numba,stefanseefeld/numba,IntelLabs/numba,IntelLabs/numba,stonebig/numba,gmarkall/numba,ssarangi/numba,sklam/numba,numba/numba,jriehl/numba,ssarangi/numba,stonebig/numba,pitrou/numba,shiquanwang/numba,numba/numba,seibert/numba,cpcloud/numba,numba/numba,gdementen/numba,shiquanwang/numba,sklam/numba,pombredanne/numba,gdementen/numba,pombredanne/numba,pitrou/numba,stefanseefeld/numba,sklam/numba,ssarangi/numba,cpcloud/numba,gmarkall/numba,pitrou/numba,jriehl/numba,cpcloud/numba,cpcloud/numba,shiquanwang/numba,stuartarchibald/numba,cpcloud/numba,stefanseefeld/numba,GaZ3ll3/numba,gmarkall/numba,ssarangi/numba,GaZ3ll3/numba,pitrou/numba,pitrou/numba,stefanseefeld/numba,gdementen/numba,ssarangi/numba,stefanseefeld/numba,stuartarchibald/numba,stonebig/numba,gdementen/numba,jriehl/numba,stonebig/numba,stuartarchibald/numba,gmarkall/numba,jriehl/numba,IntelLabs/numba,stuartarchibald/numba,gmarkall/numba,numba/numba,gdementen/numba,pombredanne/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,GaZ3ll3/numba,pombredanne/numba,stonebig/numba | examples/bubblesort.py | examples/bubblesort.py | from numba import *
import numpy as np
from timeit import default_timer as timer
#@autojit
#def bubbleswap(X, i):
# tmp = X[i]
# X[i] = X[i + 1]
# X[i + 1] = tmp
def bubblesort(X, doprint):
N = X.shape[0]
for end in range(N, 1, -1):
for i in range(end - 1):
cur = X[i]
if cur > X[i + 1]:
# Works if the swap if another function.
# bubbleswap(X, i)
# But, the following is causing error.
tmp = X[i]
X[i] = X[i + 1]
X[i + 1] = tmp
if doprint:
print "Iteration:", i, X
bubblesort_fast = autojit(bubblesort)
def main():
Xtest = np.array(list(reversed(range(8))))
X0 = Xtest.copy()
bubblesort(X0, True)
X1 = Xtest.copy()
bubblesort_fast(X1, True) # This fails
print X0
print X1
assert all(X0 == X1)
# REP = 10
# N = 100
#
# Xorig = np.array(list(reversed(range(N))))
#
# t0 = timer()
# for t in range(REP):
# X0 = Xorig.copy()
# bubblesort(X0, False)
# tpython = (timer() - t0) / REP
#
# t1 = timer()
# for t in range(REP):
# X1 = Xorig.copy()
# bubblesort_fast(X1, False)
# tnumba = (timer() - t1) / REP
#
# assert all(X0 == X1)
#
# print 'Python', tpython
# print 'Numba', tnumba
# print 'Speedup', tpython / tnumba, 'x'
if __name__ == '__main__':
main()
| bsd-2-clause | Python | |
8c9ff0787d1d862765bbd657b09357d31a402e1f | add collector for https://torstatus.blutmagie.de/ | spantons/attacks-pages-collector | collectors/torstatus.blutmagie.py | collectors/torstatus.blutmagie.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import socket
import re
from bs4 import BeautifulSoup
import requests
import ipwhois
from pprint import pprint
def get_url(url):
try:
res = requests.get(url)
except requests.exceptions.ConnectionError:
raise requests.exceptions.ConnectionError("DNS lookup failures")
else:
if res.status_code != 200:
raise requests.exceptions.ConnectionError(
"the {}, answer with {} error".format(url, res.status_code))
return res
def get_host(ip):
attempts = 5
host = "undefined"
while attempts:
try:
data = socket.gethostbyaddr(ip)
host = data[0]
break
except socket.herror:
attempts -= 1
return host
def get_who_is_and_country(ip):
try:
ip_obj = ipwhois.IPWhois(ip)
who_is = ip_obj.lookup(retry_count=5)
return str(who_is), who_is['asn_country_code']
except ipwhois.exceptions.IPDefinedError:
return "Private-Use Networks", "undefined"
except ipwhois.exceptions.WhoisLookupError:
return "undefined", "undefined"
def gather():
base_url = "https://torstatus.blutmagie.de/"
attack_type = "TOR"
res = get_url(base_url)
soup = BeautifulSoup(res.content, "lxml")
table_info = soup.findAll('table', {"class": "displayTable"})
for row in table_info[0].findAll('tr', {"class": "r"}):
col = row.findAll('td', {"class": "TDS"})
col = col[0].findAll('td', {"class": "iT"})
items = str(col[0].text).split('[')
host = items[0].strip()
ip = items[1][:-1].strip()
ip = re.findall(r'[0-9]+(?:\.[0-9]+){3}', ip)
if ip == 0:
continue
ip_address = ip[0]
who_is, country = get_who_is_and_country(ip_address)
doc = {
'IP': ip_address,
'SourceInfo': base_url,
'Type': attack_type,
'Country': country,
'Domain': host,
'URL': host,
'WhoIsInfo': who_is,
}
pprint(doc)
if __name__ == '__main__':
gather()
| mit | Python | |
265b47de5a54d7c3a6a7be70b10f16b05f40d0b2 | add tests for "$ oj login --check URL" | kmyk/online-judge-tools,kmyk/online-judge-tools | tests/command_login.py | tests/command_login.py | import os
import subprocess
import sys
import time
import unittest
import tests.utils
class LoginTest(unittest.TestCase):
def snippet_call_login_check_failure(self, url):
ojtools = os.path.abspath('oj')
with tests.utils.sandbox(files=[]) as tempdir:
env = dict(**os.environ)
env['HOME'] = tempdir
self.assertRaises
proc = subprocess.run([ojtools, 'login', '--check', url], env=env, stdout=sys.stdout, stderr=sys.stderr)
self.assertEqual(proc.returncode, 1)
def test_call_login_check_atcoder_failure(self):
self.snippet_call_login_check_failure('https://atcoder.jp/')
def test_call_login_check_codeforces_failure(self):
self.snippet_call_login_check_failure('https://codeforces.com/')
def test_call_login_check_yukicoder_failure(self):
self.snippet_call_login_check_failure('https://yukicoder.me/')
@unittest.skipIf('CI' in os.environ, 'login is required')
def test_call_login_check_atcoder_success(self):
ojtools = os.path.abspath('oj')
subprocess.check_call([ojtools, 'login', '--check', 'https://atcoder.jp/'], stdout=sys.stdout, stderr=sys.stderr)
@unittest.skipIf('CI' in os.environ, 'login is required')
def test_call_login_check_codeforces_success(self):
ojtools = os.path.abspath('oj')
subprocess.check_call([ojtools, 'login', '--check', 'https://codeforces.com/'], stdout=sys.stdout, stderr=sys.stderr)
@unittest.skipIf('CI' in os.environ, 'login is required')
def test_call_login_check_yukicoder_success(self):
ojtools = os.path.abspath('oj')
subprocess.check_call([ojtools, 'login', '--check', 'https://yukicoder.me/'], stdout=sys.stdout, stderr=sys.stderr)
| mit | Python | |
0681d3833cd3c82d95ce80f12b492706f26b5ffa | add geco_slo_channel_plot in progress | stefco/geco_data,stefco/geco_data,stefco/geco_data | geco_slow_channel_plot.py | geco_slow_channel_plot.py | #!/usr/bin/env python
# (c) Stefan Countryman 2017
import matplotlib.pyplot as plt
import numpy as np
import geco_gwpy_dump as g
import gwpy.segments
import gwpy.time
import sys
if len(sys.argv) == 1:
job = g.Job.load()
else:
job = g.Job.load(sys.argv[1])
segs = gwpy.segments.DataQualityFlag.query_segdb('L1:DMT-ANALYSIS_READY:1',
job.start, job.end)
INDEX_MISSING_FMT = ('{} index not found for segment {} of {}, time {}\n'
'Setting {} index to {}.')
for i, q in enumerate(job.full_queries):
means = []
mins = []
maxs = []
stds = []
times = []
t = q.read()
for ii, s in enumerate(segs.active):
# this next bit seems to be necessary due to a bug
start = gwpy.time.to_gps(s.start).gpsSeconds
end = gwpy.time.to_gps(s.end).gpsSeconds
# the start index for this segment might be outside the full timeseries
try:
i_start = np.argwhere(t.times.value == (start // 60 * 60))[0][0]
except IndexError:
i_start = 0
print(INDEX_MISSING_FMT.format('Start', ii, len(segs.active),
start, 'start', i_start))
# the end index for this segment might be outside the full timeseries
try:
i_end = np.argwhere(t.times.value == (end // 60 * 60 + 60))[0][0]
except IndexError:
i_end = -2
print(INDEX_MISSING_FMT.format('End', ii, len(segs.active),
end, 'end', i_end))
tt = t[i_start:i_end+1]
means.append( tt.mean().value )
mins.append( tt.min().value )
maxs.append( tt.max().value )
stds.append( tt.std().value )
times.append( tt.times.mean().value )
f = plt.plot(times, means, "o'black'",
times, mins, "v'red'",
times, maxs, "^'blue'",
times, maxs-stds, "1'pink'",
times, maxs+stds, "2'teal'")
f.set_title('{} from {} to {}'.format(t.channel.name,
gwpy.time.from_gps(j.start),
gwpy.time.from_gps(j.end)))
f.savefig('{}__{}__{}.png'.format(q.start, q.end, q.sanitized_channel))
| mit | Python | |
fa6f2e35db07571759d654088d77cb7a206c5722 | Create test.py | skellykiernan/pylearn,skellykiernan/pylearn | test.py | test.py | import unittest
import awesome
class TestMethods(unittest.TestCase):
def test_add(self):
self.assertEqual(awesome.smile(), ":)")
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python | |
8bda92da85bd666aa91b657319a019e00bf27126 | add sample configuration file | TakeshiTseng/ryu,Zouyiran/ryu,zyq001/ryu,o3project/ryu-oe,pichuang/ryu,habibiefaried/ryu,gopchandani/ryu,zangree/ryu,ynkjm/ryu,takahashiminoru/ryu,torufuru/oolhackathon,gopchandani/ryu,yamada-h/ryu,zangree/ryu,John-Lin/ryu,habibiefaried/ryu,OpenState-SDN/ryu,elahejalalpour/ELRyu,haniehrajabi/ryu,lsqtongxin/ryu,umkcdcrg01/ryu_openflow,darjus-amzn/ryu,alanquillin/ryu,Tejas-Subramanya/RYU_MEC,fkakuma/ryu,lagopus/ryu-lagopus-ext,castroflavio/ryu,darjus-amzn/ryu,Tesi-Luca-Davide/ryu,zangree/ryu,Tesi-Luca-Davide/ryu,fujita/ryu,yamt/ryu,jalilm/ryu,muzixing/ryu,StephenKing/ryu,hisaharu/ryu,darjus-amzn/ryu,alyosha1879/ryu,alanquillin/ryu,darjus-amzn/ryu,evanscottgray/ryu,fujita/ryu,John-Lin/ryu,ynkjm/ryu,pichuang/ryu,evanscottgray/ryu,diogommartins/ryu,gareging/SDN_Framework,alanquillin/ryu,muzixing/ryu,fkakuma/ryu,fujita/ryu,lzppp/mylearning,haniehrajabi/ryu,diogommartins/ryu,yamt/ryu,lagopus/ryu-lagopus-ext,fujita/ryu,fkakuma/ryu,StephenKing/ryu,StephenKing/ryu,ynkjm/ryu,lsqtongxin/ryu,fkakuma/ryu,gareging/SDN_Framework,ysywh/ryu,takahashiminoru/ryu,castroflavio/ryu,takahashiminoru/ryu,alyosha1879/ryu,umkcdcrg01/ryu_openflow,iwaseyusuke/ryu,hisaharu/ryu,ntts-clo/ryu,lzppp/mylearning,o3project/ryu-oe,ynkjm/ryu,iwaseyusuke/ryu,lzppp/mylearning,ttsubo/ryu,jalilm/ryu,OpenState-SDN/ryu,OpenState-SDN/ryu,elahejalalpour/ELRyu,pichuang/ryu,alyosha1879/ryu,Tejas-Subramanya/RYU_MEC,sivaramakrishnansr/ryu,StephenKing/ryu,haniehrajabi/ryu,yamt/ryu,hisaharu/ryu,iwaseyusuke/ryu,hisaharu/ryu,sivaramakrishnansr/ryu,takahashiminoru/ryu,lzppp/mylearning,Tesi-Luca-Davide/ryu,John-Lin/ryu,elahejalalpour/ELRyu,Tejas-Subramanya/RYU_MEC,ynkjm/ryu,haniehrajabi/ryu,shinpeimuraoka/ryu,zangree/ryu,ysywh/ryu,Tejas-Subramanya/RYU_MEC,lagopus/ryu-lagopus-ext,ysywh/ryu,habibiefaried/ryu,openvapour/ryu,zyq001/ryu,gopchandani/ryu,pichuang/ryu,jazzmes/ryu,TakeshiTseng/ryu,jkoelker/ryu,jalilm/ryu,torufuru/oolhackathon,TakeshiTseng/ryu,jalilm/ryu,ntts-clo/mld-ryu,jazzmes/ryu,Zouyiran/ryu,diogommartins/ryu,umkcdcrg01/ryu_openflow,ttsubo/ryu,yamada-h/ryu,lsqtongxin/ryu,pichuang/ryu,jkoelker/ryu,alyosha1879/ryu,haniehrajabi/ryu,TakeshiTseng/ryu,jazzmes/ryu,yamt/ryu,zyq001/ryu,osrg/ryu,StephenKing/summerschool-2015-ryu,alanquillin/ryu,zangree/ryu,muzixing/ryu,osrg/ryu,openvapour/ryu,ttsubo/ryu,lzppp/mylearning,osrg/ryu,Zouyiran/ryu,StephenKing/summerschool-2015-ryu,ttsubo/ryu,ysywh/ryu,darjus-amzn/ryu,StephenKing/summerschool-2015-ryu,hisaharu/ryu,osrg/ryu,alanquillin/ryu,gareging/SDN_Framework,muzixing/ryu,diogommartins/ryu,Tejas-Subramanya/RYU_MEC,iwaseyusuke/ryu,elahejalalpour/ELRyu,fkakuma/ryu,lsqtongxin/ryu,lagopus/ryu-lagopus-ext,gareging/SDN_Framework,Zouyiran/ryu,zyq001/ryu,habibiefaried/ryu,takahashiminoru/ryu,openvapour/ryu,torufuru/oolhackathon,gopchandani/ryu,castroflavio/ryu,lagopus/ryu-lagopus-ext,StephenKing/ryu,ttsubo/ryu,shinpeimuraoka/ryu,openvapour/ryu,muzixing/ryu,sivaramakrishnansr/ryu,sivaramakrishnansr/ryu,ntts-clo/ryu,shinpeimuraoka/ryu,gareging/SDN_Framework,OpenState-SDN/ryu,OpenState-SDN/ryu,fujita/ryu,elahejalalpour/ELRyu,ntts-clo/mld-ryu,yamt/ryu,umkcdcrg01/ryu_openflow,TakeshiTseng/ryu,shinpeimuraoka/ryu,shinpeimuraoka/ryu,Tesi-Luca-Davide/ryu,osrg/ryu,John-Lin/ryu,gopchandani/ryu,sivaramakrishnansr/ryu,openvapour/ryu,jalilm/ryu,zyq001/ryu,diogommartins/ryu,StephenKing/summerschool-2015-ryu,ysywh/ryu,iwaseyusuke/ryu,StephenKing/summerschool-2015-ryu,umkcdcrg01/ryu_openflow,John-Lin/ryu,habibiefaried/ryu,Zouyiran/ryu,lsqtongxin/ryu,Tesi-Luca-Davide/ryu,evanscottgray/ryu,jkoelker/ryu | ryu/services/protocols/bgp/bgp_sample_conf.py | ryu/services/protocols/bgp/bgp_sample_conf.py | import os
# =============================================================================
# BGP configuration.
# =============================================================================
BGP = {
# General BGP configuration.
'routing': {
# ASN for this BGP instance.
'local_as': 64512,
# BGP Router ID.
'router_id': '10.10.0.1',
# We list all BGP neighbors below. We establish EBGP sessions with peer
# with different AS number then configured above. We will
# establish IBGP session if AS number is same.
'bgp_neighbors': {
'10.0.0.1': {
'remote_as': 64513,
'multi_exit_disc': 100
},
'10.10.0.2': {
'remote_as': 64514,
},
},
'networks': [
'10.20.0.0/24',
'10.30.0.0/24',
'10.40.0.0/16',
'10.50.0.0/16',
],
},
}
# =============================================================================
# Logging configuration.
# =============================================================================
LOGGING = {
# We use python logging package for logging.
'version': 1,
'disable_existing_loggers': False,
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s ' +
'[%(process)d %(thread)d] %(message)s'
},
'simple': {
'format': '%(levelname)s %(asctime)s %(module)s %(lineno)s ' +
'%(message)s'
},
'stats': {
'format': '%(message)s'
},
},
'handlers': {
# Outputs log to console.
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'simple'
},
'console_stats': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'stats'
},
# Rotates log file when its size reaches 10MB.
'log_file': {
'level': 'ERROR',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join('.', 'bgpspeaker.log'),
'maxBytes': '10000000',
'formatter': 'verbose'
},
'stats_file': {
'level': 'DEBUG',
'class': 'logging.handlers.RotatingFileHandler',
'filename': os.path.join('.', 'statistics_bgps.log'),
'maxBytes': '10000000',
'formatter': 'stats'
},
},
# Fine-grained control of logging per instance.
'loggers': {
'bgpspeaker': {
'handlers': ['console', 'log_file'],
'handlers': ['console'],
'level': 'DEBUG',
'propagate': False,
},
'stats': {
'handlers': ['stats_file', 'console_stats'],
'level': 'INFO',
'propagate': False,
'formatter': 'stats',
},
},
# Root loggers.
'root': {
'handlers': ['console', 'log_file'],
'level': 'DEBUG',
'propagate': True,
},
}
| apache-2.0 | Python | |
1fb737426f69d5e5dbe48dd66a13a38918707f23 | Add tests to detcatscores | pySTEPS/pysteps | pysteps/tests/test_detcatscores.py | pysteps/tests/test_detcatscores.py | # -*- coding: utf-8 -*-
import pytest
import numpy as np
from pysteps.verification import det_cat_fcst
from numpy.testing import assert_array_almost_equal
# CREATE A LARGE DATASET TO MATCH
# EXAMPLES IN
# http://www.cawcr.gov.au/projects/verification/
fct_hits = 1.0*np.ones(82)
obs_hits = 1.0*np.ones(82)
fct_fa = 1.0*np.ones(38)
obs_fa = 1.0*np.zeros(38)
fct_misses = 1.0*np.zeros(23)
obs_misses = 1.0*np.ones(23)
fct_cr = 1.0*np.zeros(222)
obs_cr = 1.0*np.zeros(222)
obs_data = np.concatenate([obs_hits, obs_fa, obs_misses, obs_cr])
fct_data = np.concatenate([fct_hits, fct_fa, fct_misses, fct_cr])
test_data = [
([0.], [0.], 0., None, []),
([1., 3.], [2., 5.], 0., None, []),
([1., 3.], [2., 5.], 0., 'CSI', [1.0]),
([1., 3.], [2., 5.], 0., ('CSI', 'FAR'), [1.0, 0.0]),
([1., 3.], [2., 5.], 0., ('lolo',), []),
([1., 3.], [2., 5.], 0., ('CSI', None, 'FAR'), [1.0, 0.0]),
([1., 3.], [2., 5.], 1.0, ('CSI', None, 'FAR'), [0.5, 0.0]),
([1., 3.], [2., 5.], 1.0, ('lolo'), []), # test unknown score
(fct_data, obs_data, 0.0, ('ACC'), [0.83287671]), # ACCURACY score
(fct_data, obs_data, 0.0, ('BIAS'), [1.1428571]), # BIAS score
(fct_data, obs_data, 0.0, ('POD'), [0.7809524]), # POD score
(fct_data, obs_data, 0.0, ('FAR'), [0.316667]), # FAR score
# Probability of false detection (false alarm rate)
(fct_data, obs_data, 0.0, ('FA'), [0.146154]),
# CSI score
(fct_data, obs_data, 0.0, ('CSI'), [0.573426]),
# Heidke Skill Score
(fct_data, obs_data, 0.0, ('HSS'), [0.608871]),
# Hanssen-Kuipers Discriminant
(fct_data, obs_data, 0.0, ('HK'), [0.6348]),
# Gilbert Skill Score
(fct_data, obs_data, 0.0, ('GSS'), [0.437682]),
# Gilbert Skill Score
(fct_data, obs_data, 0.0, ('ETS'), [0.437682]),
# Symmetric extremal dependence index
(fct_data, obs_data, 0.0, ('SEDI'), [0.789308]),
]
@pytest.mark.parametrize("pred, obs, thr, scores, expected", test_data)
def test_det_cat_fcst(pred, obs, thr, scores, expected):
"""Test the det_cat_fcst."""
assert_array_almost_equal(det_cat_fcst(pred, obs, thr, scores), expected)
| bsd-3-clause | Python | |
42af700af58588fccaa84f5348a5c854d095d1a9 | Add ex2.2: multiple simple requests | MA3STR0/PythonAsyncWorkshop | code/ex2.2-simple_requests.py | code/ex2.2-simple_requests.py | from urllib.request import urlopen
import time
URLS = [
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
'http://127.0.0.1:8000',
]
def request_greetings():
responses = []
for url in URLS:
resp = urlopen(url)
responses.append(resp.read().decode('utf-8'))
texts = '\n'.join(responses)
return texts
if __name__ == "__main__":
t1 = time.time()
greetings = request_greetings()
print(time.time() - t1, "seconds passed")
print(greetings)
| mit | Python | |
f2ac12e66aa2209f98e6eed2283005f2a9e74768 | Create perm_missing_elem.py | py-in-the-sky/challenges,py-in-the-sky/challenges,py-in-the-sky/challenges | codility/perm_missing_elem.py | codility/perm_missing_elem.py | """
https://codility.com/programmers/task/perm_missing_elem/
Given array A of integers. N = len(A). Integers are distinct and taken from
range 1..(N+1), which means exactly one integer from 1..(N+1) is missing from
A. Find the missing integer.
Runtime: O(N)
Extra Space: O(1)
"""
def solution(A):
total1 = sum(xrange(1, len(A) + 2)) # sum of 1..(N+1)
total2 = sum(A)
return total1 - total2
| mit | Python | |
785af4d73a158fdf43ceff8bb4c974a0215606fd | add missing test file | douban/python-libmemcached,douban/python-libmemcached | tests/test_set_long.py | tests/test_set_long.py | import cmemcached
import unittest
import subprocess
import time
TEST_SERVER = "localhost"
memcached_process = None
def setup():
global memcached_process
memcached_process = subprocess.Popen(['memcached'])
time.sleep(0.5)
def teardown():
memcached_process.terminate()
class TestCmemcached_for_long(unittest.TestCase):
def setUp(self):
self.mc = cmemcached.Client([TEST_SERVER], comp_threshold=1024)
def test_set_get_long(self):
self.mc.set("key_long_short", long(1L))
v = self.mc.get("key_long_short")
self.assertEqual(v, 1L)
self.assertEqual(type(v), long)
big = 1233345435353543L
self.mc.set("key_long_big", big)
v = self.mc.get("key_long_big")
self.assertEqual(v, big)
self.assertEqual(type(v), long)
| bsd-3-clause | Python | |
d59e49ddf95c20b51ac285dccb0a1b43936d97ef | Test that some credential setting properly switches endpoint schemes | typepad/python-typepad-api | tests/test_tpclient.py | tests/test_tpclient.py | # Copyright (c) 2009-2010 Six Apart Ltd.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of Six Apart Ltd. nor the names of its contributors may
# be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import unittest
from urlparse import urlsplit
from oauth.oauth import OAuthConsumer, OAuthToken
import typepad.tpclient
class TestTypePadClient(unittest.TestCase):
def assertScheme(self, url, *args):
scheme = urlsplit(url)[0]
return self.assertEquals(scheme, *args)
def test_adjust_scheme(self):
c = typepad.tpclient.TypePadClient()
c.endpoint = 'http://api.typepad.com'
c.clear_credentials()
self.assertScheme(c.endpoint, 'http')
c.add_credentials('a', 'b')
self.assertScheme(c.endpoint, 'http')
c.add_credentials('a', 'b', domain='api.typepad.com')
self.assertScheme(c.endpoint, 'http')
c.add_credentials(OAuthConsumer('a', 'b'), OAuthToken('c', 'd'))
self.assertScheme(c.endpoint, 'http')
c.add_credentials(OAuthConsumer('a', 'b'), OAuthToken('c', 'd'), domain='api.example.com')
self.assertScheme(c.endpoint, 'http')
c.add_credentials(OAuthConsumer('a', 'b'), OAuthToken('c', 'd'), domain='typepad.com')
self.assertScheme(c.endpoint, 'http')
# This time for sure!!
c.add_credentials(OAuthConsumer('a', 'b'), OAuthToken('c', 'd'), domain='api.typepad.com')
self.assertScheme(c.endpoint, 'https')
# Try it again.
c.add_credentials(OAuthConsumer('a', 'b'), OAuthToken('c', 'd'), domain='api.typepad.com')
self.assertScheme(c.endpoint, 'https')
# Check that clearing works.
c.clear_credentials()
self.assertScheme(c.endpoint, 'http')
| bsd-3-clause | Python | |
4008a230fb64d059d9449a39f32abe245692d582 | Create VirgilContext implementation | VirgilSecurity/virgil-sdk-python | virgil_sdk/api/virgil_context.py | virgil_sdk/api/virgil_context.py | # Copyright (C) 2016 Virgil Security Inc.
#
# Lead Maintainer: Virgil Security Inc. <support@virgilsecurity.com>
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# (1) Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# (3) Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ''AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from virgil_sdk.client import CardValidator
from virgil_sdk.client import VirgilClient
from virgil_sdk.cryptography import VirgilCrypto
from virgil_sdk.storage import DefaultKeyStorage
class VirgilContext(object):
"""
The class manages the Virgil api dependencies during run time.
It also contains a list of preperties that uses to configurate the high-level components.
"""
def __init__(
self,
access_token=None, # type: Optional[str]
credentials=None, # type: Optional[Creantials]
card_verifiers=None, # type: Optional[list]
crypto=None, # type: Optional[Crypto]
key_storage=None, # type: Optional[KeyStorage]
client_params=None # type: Optional[dict]
):
# type: (...) -> None
"""Initializes a new instance of the VirgilContext class."""
self.access_token = access_token
self.credentials = credentials
self.client_params = client_params
self._card_verifiers = card_verifiers
self._crypto = crypto
self._key_storage = key_storage
self._client = None
@property
def card_verifiers(self):
"""Gets or sets a list of Virgil Card verifiers."""
if not self._card_verifiers:
self._card_verifiers = [CardValidator(self.crypto)]
return self._card_verifiers
@property
def crypto(self):
"""Gets a cryptographic keys storage."""
if not self._crypto:
self._crypto = VirgilCrypto()
return self._crypto
@property
def key_storage(self):
"""Sets a cryptographic keys storage."""
if not self._key_storage:
self._key_storage = DefaultKeyStorage()
return self._key_storage
@property
def client(self):
"""Gets a Virgil Security services client."""
if not self._client:
self._client = VirgilClient()
return self._client
| bsd-3-clause | Python | |
9ad8a03eac3d9fc94122ee2494ff1f6398467cfc | Add files via upload | SeanBeseler/data-structures | pque.py | pque.py | class Pque(object):
"""make as priority queue priority scale is 0 through -99
0 has greatest priority with ties being first come first pop"""
def __init__(self):
self.next_node = None
self.priority = 0
self.value = None
self.tail = None
self.head = None
self.size = 0
def insert(self,value , priority = -99):
""" inserts a value into the que defalt priority is -99"""
new_pque = Pque()
new_pque.priority = priority
if self.size is 0:
self.head = new_pque
self.tail = new_pque
else:
current_node = self.head
pre_node = None
for x in range(self.size):
if new_pque.priority > current_node.priority:
if current_node is self.head:
new_pque.next_node = self.head
self.head = new_pque
break
else:
pre_node.next_node = new_pque
new_pque.next_node = current_node
self.size += 1
new_pque.value = value
break
if current_node is self.tail:
self.tail.next_node = new_pque
self.tail = new_pque
break
else:
pre_node = current_node
current_node = current_node.next_node
self.size += 1
new_pque.value = value
def peek(self):
"""returns the data in the head of the pque with out removing it"""
if self.head is None:
raise IndexError ('que is empty')
return self.head.value
def pop(self):
"""returns the data in the head of pque and removes it """
if self.head is None:
raise IndexError ('que is empty')
temp_val = self.head.value
self.head = self.head.next_node
self.size -= 1
return temp_val
| mit | Python | |
b480a21d58c1fc1b8ad66442bd65a7d892d8efa1 | Create Blink.py | AnandVetcha/IntelEdison_Python | Blink.py | Blink.py | import time
import mraa
###### Programe Begins ####
print (mraa.getVersion())
Led = mraa.Gpio(4)
Led.dir(mraa.DIR_OUT)
Led.write(1)
value = 0
while 1:
if value == 0:
value = 1
else:
value = 0
Led.write(value)
print "LED "+str(value)
time.sleep(1)
| mit | Python | |
36af8c98005bfb6d51344b80a59cb6e48c8b55fb | Add outputter to display overstate stages | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/output/overstatestage.py | salt/output/overstatestage.py | '''
Display clean output of an overstate stage
'''
#[{'group2': {'match': ['fedora17-2', 'fedora17-3'],
# 'require': ['group1'],
# 'sls': ['nginx', 'edit']}
# }
# ]
# Import Salt libs
import salt.utils
def output(data):
'''
Format the data for printing stage information from the overstate system
'''
colors = salt.utils.get_colors(__opts__.get('color'))
ostr = ''
for comp in data:
for name, stage in comp.items():
ostr += '{0}{1}:{2}\n'.format(colors['LIGHT_BLUE'], name, colors['ENDC'])
for key in sorted(stage):
ostr += ' {0}{1}:{2}{3}\n'.format(
colors['LIGHT_BLUE'],
key,
stage[key],
colors['ENDC'])
return ostr
| apache-2.0 | Python | |
c278a1659e488df5917d77dd7e002d065e1c7586 | test db mysql | Fy-Network/fysql | tests/test_db_mysql.py | tests/test_db_mysql.py | # -*- coding: utf-8 -*-
import unittest
from fysql import *
database = MySQLDatabase('fysql', host='localhost', user='fysql', passwd='dev')
class DevTables(Table):
db = database
class User(DevTables):
firstname = CharColumn(max_length=150)
lastname = CharColumn(max_length=150)
role = CharColumn(index=True, unique=True)
class Post(DevTables):
title = CharColumn(default='Post title', max_length=255)
id_user = FKeyColumn(table=User, reference='user')
class TestMySQL(unittest.TestCase):
def test1_create_drop(self):
User.create_table()
Post.create_table()
d = str(database.get_tables())
r = '[u\'post\', u\'user\']'
self.assertEqual(d, r)
def test2_create_all(self):
database.create_all()
d = str(database.get_tables())
r = '[u\'post\', u\'user\']'
self.assertEqual(d, r)
def test3_insert_select(self):
user1 = User.create(firstname='Florian', lastname='Gasquez', role='Admin')
user2 = User.create(firstname='Jean', lastname='Bon', role='Noob')
Post.create(title='Mon super post 1', id_user=user1.id)
Post.create(title='Mon super post 2', id_user=user2.id)
Post.create(title='Mon super post 3', id_user=user1.id)
d = str(User.select().result) + str (Post.select().result)
r = '[{"id": 1, "lastname": "Gasquez", "role": "Admin", "firstname": "Florian"}, {"id": 2, "lastname": "Bon", "role": "Noob", "firstname": "Jean"}][{"id": 1, "id_user": 1, "user": {"id": 1, "lastname": "Gasquez", "role": "Admin", "firstname": "Florian"}, "title": "Mon super post 1"}, {"id": 2, "id_user": 2, "user": {"id": 2, "lastname": "Bon", "role": "Noob", "firstname": "Jean"}, "title": "Mon super post 2"}, {"id": 3, "id_user": 1, "user": {"id": 1, "lastname": "Gasquez", "role": "Admin", "firstname": "Florian"}, "title": "Mon super post 3"}]'
self.assertEqual(d, r)
def test4_get_update(self):
post = Post.get(Post.id==3)
post.title = 'Mon giga post 3'
post.user.lastname = 'Haha!'
post.save()
d = repr(post)
r = '{"id": 3, "id_user": 1, "user": {"id": 1, "lastname": "Haha!", "role": "Admin", "firstname": "Florian"}, "title": "Mon giga post 3"}'
self.assertEqual(d, r)
def test5_count_all(self):
d = Post.count_all()
r = 3
self.assertEqual(d, r)
def test6_count_where(self):
d = Post.count().where(User.id==1).result
r = 2
self.assertEqual(d, r)
def test7_remove(self):
post = Post.get(Post.id==3)
post.remove()
d = Post.get(Post.id==3)
r = False
self.assertEqual(d, r)
| mit | Python | |
1085114668dc13d86dac8de70557cd4242ab9d20 | Add tests for parse_csv, weighted_mode and weighted_replicate (#210) | Chipe1/aima-python,AWPorter/aima-python,JoeLaMartina/AlphametricProject,abbeymiles/aima-python,Fruit-Snacks/aima-python,grantvk/aima-python,willhess/aima-python,JoeLaMartina/AlphametricProject,willhess/aima-python,JamesDickenson/aima-python,jo-tez/aima-python,JamesDickenson/aima-python,chandlercr/aima-python,WhittKinley/ConnectProject,austinban/aima-python,AWPorter/aima-python,JoeLaMartina/aima-python,AmberJBlue/aima-python,SnShine/aima-python,armadill-odyssey/aima-python,NolanBecker/aima-python,WmHHooper/aima-python,SimeonFritz/aima-python,AmberJBlue/aima-python,WmHHooper/aima-python,Fruit-Snacks/aima-python,SeanCameronConklin/aima-python,WhittKinley/ConnectProject,jottenlips/aima-python,reachtarunhere/aima-python,phaller0513/aima-python,jo-tez/aima-python,aimacode/aima-python,jottenlips/aima-python,phaller0513/aima-python,zayneanderson/aima-python,austinban/aima-python,SeanCameronConklin/aima-python,armadill-odyssey/aima-python,JoeLaMartina/aima-python,WmHHooper/aima-python,SimeonFritz/aima-python,willhess/aima-python,Chipe1/aima-python,sofmonk/aima-python,abbeymiles/aima-python,JamesDickenson/aima-python,phaller0513/aima-python,chandlercr/aima-python,JoeLaMartina/aima-python,reachtarunhere/aima-python,WmHHooper/aima-python,chandlercr/aima-python,NolanBecker/aima-python,NolanBecker/aima-python,grantvk/aima-python,AWPorter/aima-python,aimacode/aima-python,abbeymiles/aima-python,WhittKinley/aima-python,JoeLaMartina/AlphametricProject,WhittKinley/ConnectProject,zayneanderson/aima-python,armadill-odyssey/aima-python,WhittKinley/aima-python,austinban/aima-python,AmberJBlue/aima-python,SnShine/aima-python,Fruit-Snacks/aima-python,SeanCameronConklin/aima-python,grantvk/aima-python,jottenlips/aima-python,SimeonFritz/aima-python,WhittKinley/aima-python,sofmonk/aima-python,zayneanderson/aima-python | tests/test_learning.py | tests/test_learning.py | import pytest
from learning import parse_csv, weighted_mode, weighted_replicate
def test_parse_csv():
assert parse_csv('1, 2, 3 \n 0, 2, na') == [[1, 2, 3], [0, 2, 'na']]
def test_weighted_mode():
assert weighted_mode('abbaa', [1,2,3,1,2]) == 'b'
def test_weighted_replicate():
assert weighted_replicate('ABC', [1,2,1], 4) == ['A', 'B', 'B', 'C']
| mit | Python | |
8d88336d831eeb5e6603b9ff66f2e4906c4f9e2e | Create test.py | YigengZhang/DL4SRL | test.py | test.py | print "Hello world!"
| apache-2.0 | Python | |
a72a80ce70b8c08ef5e33c41ee467f6ad7157f71 | test contains scripts for making and trying out new colormaps | kthyng/cmocean,matplotlib/cmocean | test.py | test.py | '''
Making and testing colormaps.
'''
import numpy as np
from matplotlib import cm, colors
import matplotlib.pyplot as plt
import pdb
import matplotlib as mpl
from skimage import color
def cmap(rgbin, N=10):
'''
Input an array of rgb values to generate a colormap.
Inputs:
rgbin is an [mx3] array, where m is the number of input color triplets which
are interpolated between to make the colormap that is returned. hex values
can be input instead, as [mx1] in single quotes with a #.
N is the number of levels to be interpolated to.
'''
# rgb inputs here
if not mpl.cbook.is_string_like(rgbin[0]):
# normalize to be out of 1 if out of 256 instead
if rgbin.max() > 1:
rgbin = rgbin/256.
cmap = mpl.colors.LinearSegmentedColormap.from_list('mycmap', rgbin)
return cmap
def test(cmap):
'''
Test colormap by plotting.
Input a colormap instance. Use a named one with cm.get_cmap(colormap)
'''
# indices to step through colormap
x = np.linspace(0.0, 1.0, 100)
# will plot colormap and lightness
rgb = cmap(x)[np.newaxis,:,:3]
# rgb = cm.get_cmap(cmap)(x)[np.newaxis,:,:3]
lab = color.rgb2lab(rgb)
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(x, lab[0,:,0], c=x, cmap=cmap, s=300, linewidths=0.)
def read(varin):
'''
Read in dataset for variable var
'''
fname = 'MS2_L10.mat.txt'
d = np.loadtxt(fname, comments='*')
var = ['lat', 'lon', 'depth', 'temp', 'density', 'sigma', 'oxygen',
'voltage 2', 'voltage 3', 'fluorescence-CDOM', 'fluorescence-ECO',
'turbidity', 'pressure', 'salinity', 'RINKO temperature',
'RINKO DO - CTD temp', 'RINKO DO - RINKO temp', 'bottom', 'PAR']
# return data for variable varin
return d[:,0], d[:,1], d[:,2], d[:,var.index(varin)]
def show(cmap, var, vmin=None, vmax=None):
'''
Show a colormap for a chosen input variable var side by side with
black and white and jet colormaps.
Inputs:
cmap colormap instance
var variable to plot. Choose from:
'''
# get variable data
lat, lon, z, data = read(var)
fig = plt.figure(figsize=(16,12))
# Plot with grayscale
ax = fig.add_subplot(3,1,1)
map1 = ax.scatter(lon, -z, c=data, cmap='gray', s=10, linewidths=0., vmin=vmin, vmax=vmax)
plt.colorbar(map1, ax=ax)
# Plot with jet
ax = fig.add_subplot(3,1,2)
map1 = ax.scatter(lon, -z, c=data, cmap='jet', s=10, linewidths=0., vmin=vmin, vmax=vmax)
plt.colorbar(map1, ax=ax)
# Plot with cmap
ax = fig.add_subplot(3,1,3)
map1 = ax.scatter(lon, -z, c=data, cmap=cmap, s=10, linewidths=0., vmin=vmin, vmax=vmax)
ax.set_xlabel('Longitude [degrees]')
ax.set_ylabel('Depth [m]')
plt.colorbar(map1, ax=ax)
plt.suptitle(var)
| mit | Python | |
6aec92990790dd5ba04ca1079dbc5fe9106f8747 | Add test suite | Maome/pygooglesms | test.py | test.py | import sys
import unittest
import pygooglesms
from pygooglesms import GoogleSMS
class TestPyGoogleSMS(unittest.TestCase):
GOOD_LOGIN = 'CHANGEME'
GOOD_PASSWD = 'CHANGEME'
TEST_NUMBER = 'CHANGEME'
BAD_LOGIN = 'nobody@gmail.com'
BAD_PASSWD = 'terrible'
BAD_AUTH_MSG = 'No auth token provided by server (Bad account?)'
NOT_LOGGED_IN_MSG = 'Not logged in'
def test_good_login(self):
GoogleSMS(self.GOOD_LOGIN, self.GOOD_PASSWD)
def test_bad_login(self):
try:
GoogleSMS(self.BAD_LOGIN, self.BAD_PASSWD)
except pygooglesms.GoogleAuthError as error:
if error.msg == self.BAD_AUTH_MSG:
return
raise error
def test_bad_login_good_user(self):
try:
GoogleSMS(self.GOOD_LOGIN, self.BAD_PASSWD)
except pygooglesms.GoogleAuthError as error:
if error.msg == self.BAD_AUTH_MSG:
return
raise error
def test_sms_with_bad_login(self):
sms = GoogleSMS(self.GOOD_LOGIN, self.GOOD_PASSWD)
try:
sms.login(self.BAD_LOGIN, self.BAD_PASSWD)
except Exception:
pass
try:
sms.send(self.TEST_NUMBER, 'test_message')
except pygooglesms.GoogleVoiceError as error:
if error.msg == self.NOT_LOGGED_IN_MSG:
return
raise error
def test_sms_with_good_login(self):
sms = GoogleSMS(self.GOOD_LOGIN, self.GOOD_PASSWD)
sms.send(self.TEST_NUMBER, 'test_message')
# some way to validate this? send message to self?
if __name__ == "__main__":
suite = unittest.TestLoader().loadTestsFromTestCase(TestPyGoogleSMS)
result = unittest.TextTestRunner(verbosity=2).run(suite)
error = len(result.errors) + len(result.failures)
sys.exit(error)
| mit | Python | |
197685058df0fdee0cefa0e59f0faa038d809730 | Rename and use Maze base class | tijko/PyMaze | Prims.py | Prims.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import random
from maze import *
from itertools import product
from collections import defaultdict
os.environ['SDL_VIDEO_WINDOW_POS'] = '600, 30'
class Prims(Maze):
def __init__(self):
super(Prims, self).__init__()
self.maze = list()
self.explored = set()
self.frontier = defaultdict(list)
maze_coords = list(range(1296))
random.shuffle(maze_coords)
self.matrix = dict(zip(maze_coords, product(range(0, 720, 20),
range(0, 720, 20))))
self.maze_structure = self.gen_maze()
if (not ((700, 40) in self.maze_structure) and
not ((680, 20) in self.maze_structure)):
if ((660, 20) in self.maze_structure):
self.maze_structure.append((700, 40,))
else:
self.maze_structure.append((680, 20,))
self.maze_structure.extend([(700, 20,), (720, 20,), (20, 700,)])
@property
def get_first_wall(self):
#XXX: works because the keys match up with the length but
# wouldn't if keys = 'str' (better random.choice just keys)
self.pos = random.choice(self.matrix)
for weight in self.matrix:
if self.matrix[weight] in self.all_mvs:
self.frontier[weight].extend([self.matrix[weight], self.pos])
self.explored.update(tuple(self.pos))
self.screen.blit(self.wall, self.pos)
self.prev = self.pos
@property
def all_mvs(self):
directions = {1:[0, -20], 2:[0, 20], 3:[-20, 0], 4:[20, 0]}
moves = [v for v in [tuple(map(self.mv_chk, self.pos, directions[i]))
for i in directions] if all(j>0 and j<700 for j in v)]
return moves
@property
def vec_chk(self):
diff = tuple(map(self.lt_chk, self.pos, self.prev))
nodes = {(0, 20,):[[-20, 0], [20, 0], [20, 20], [-20, 20], [0, 20]],
(0, -20,):[[-20, 0], [20, 0], [-20, -20], [20, -20], [0, -20]],
(20, 0,):[[20, 0], [20, -20], [20, 20], [0, -20], [0, 20]],
(-20, 0,):[[-20, 0], [-20, -20], [-20, 20], [0, -20], [0, 20]]
}
moves = [v for v in [tuple(map(self.mv_chk, self.pos, i))
for i in nodes[diff]]]
return moves
def gen_maze(self):
self.get_first_wall
while self.frontier:
self.explored.update([tuple(self.frontier[k][0])
for k in self.frontier])
lowest_weight = min(self.frontier)
self.pos = self.frontier[lowest_weight][0]
self.prev = self.frontier[lowest_weight][1]
del self.frontier[lowest_weight]
if (all(i > 0 and i < 700 for i in self.pos) and
self.pos not in self.maze):
if not any(i for i in self.vec_chk if i in self.maze):
self.screen.blit(self.wall, self.pos)
pygame.display.flip()
time.sleep(0.05)
nxt_mvs = self.all_mvs
for cell in self.matrix:
if (self.matrix[cell] in nxt_mvs and
tuple(self.matrix[cell]) not in self.explored):
self.frontier[cell].extend([self.matrix[cell],
self.pos])
self.maze.append(self.pos)
return self.maze
if __name__ == '__main__':
prims = Prims()
prims.run_maze_loop(prims.maze_structure)
| mit | Python | |
b7ff7ee179f7d973051ca7f70a04f27322c07cf2 | Create Redis.py | wolfdale/Redis | Redis.py | Redis.py | #Intro to Redis (NoSql)
import redis
| mit | Python | |
7d8ad6124cd838f3b2507e43c2c89e8a4873465e | fix case with no provider | simonvh/genomepy | test.py | test.py | from genomepy.provider import ProviderBase
from genomepy import Genome
import sys
genome_name = "ce10"
genome = Genome(genome_name)
tax_id = genome.tax_id
#p = ProviderBase.create("UCSC")
#print(p.assembly_accession("ci3"))
#sys.exit()
p = ProviderBase.create("Ensembl")
name, accession, *rest = [row for row in p.search(tax_id)][0]
print(name, tax_id)
if accession == genome.assembly_accession:
print(f"Ensembl {name} matches {genome_name} by accession")
else:
print(f"Could not find a matching genome in Ensembl")
| mit | Python | |
eabe6103860fd7b04e52f2e5181affbb55e93273 | add wsgi script | Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok,Cal-CS-61A-Staff/ok | wsgi.py | wsgi.py | #!/usr/bin/env python3
# To run:
# gunicorn -b 0.0.0.0:5000 wsgi:app
import os
from server import create_app, generate
from server.models import db, User
env = os.environ.get('OK_ENV', 'dev')
app = create_app('settings/%s.py' % env)
| apache-2.0 | Python | |
89c4f92d56ab445d86dfdd321bca6c7e0f30855e | Create admin.py | yuhengqiu/mezzanine-weibopub | weibopub/admin.py | weibopub/admin.py | from __future__ import unicode_literals
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import truncatechars
from mezzanine.weibopub import get_auth_settings
FORMFIELD_HTML = """
<div class='send_weibo_container'>
<input id='id_send_weibo' name='send_weibo' type='checkbox'>
<label class='vCheckboxLabel' for='id_send_weibo'>%s</label>
</div>
"""
class WeiboAdminMixin(object):
"""
Admin mixin that adds a "Send to Weibo" checkbox to the add/change
views, which when checked, will send a weibo with the title、pic and link
to the object being saved.
"""
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Adds the "Send to Weibo" checkbox after the "status" field,
provided by any ``Displayable`` models. The approach here is
quite a hack, however the sane approach of using a custom
form with a boolean field defined, and then adding it to the
formssets attribute of the admin class fell apart quite
horrifically.
"""
formfield = super(WeiboAdminMixin,
self).formfield_for_dbfield(db_field, **kwargs)
if db_field.name == "status" and get_auth_settings():
def wrapper(render):
def wrapped(*args, **kwargs):
rendered = render(*args, **kwargs)
label = _("Pub to Weibo")
return mark_safe(rendered + FORMFIELD_HTML % label)
return wrapped
formfield.widget.render = wrapper(formfield.widget.render)
return formfield
def save_model(self, request, obj, form, change):
"""
Sends a weibo with the title/pic/short_url if applicable.
"""
super(WeiboAdminMixin, self).save_model(request, obj, form, change)
if request.POST.get("send_weibo", False):
auth_settings = get_auth_settings()
obj.set_short_url()
message = truncatechars(obj, 140 - len(obj.short_url) - 1)
api = Api(*auth_settings)
api.update.post(u'%s。[阅读全文:%s]'%(message,obj.short_url),pic=open('/Users/test.png'))
| mit | Python | |
626662f0f3ef2ce7de63c424da89263443243e97 | Fix SpiderState bug in Windows platforms | yarikoptic/scrapy,godfreyy/scrapy,coderabhishek/scrapy,coderabhishek/scrapy,yidongliu/scrapy,yusofm/scrapy,hectoruelo/scrapy,csalazar/scrapy,Timeship/scrapy,wangjun/scrapy,Zephor5/scrapy,profjrr/scrapy,Geeglee/scrapy,olafdietsche/scrapy,Geeglee/scrapy,beni55/scrapy,kimimj/scrapy,mlyundin/scrapy,amboxer21/scrapy,ramiro/scrapy,lacrazyboy/scrapy,agusc/scrapy,Allianzcortex/scrapy,yarikoptic/scrapy,yidongliu/scrapy,legendtkl/scrapy,famorted/scrapy,arush0311/scrapy,olorz/scrapy,WilliamKinaan/scrapy,agusc/scrapy,fafaman/scrapy,IvanGavran/scrapy,moraesnicol/scrapy,elacuesta/scrapy,1yvT0s/scrapy,csalazar/scrapy,TarasRudnyk/scrapy,KublaikhanGeek/scrapy,finfish/scrapy,stenskjaer/scrapy,godfreyy/scrapy,shaform/scrapy,CENDARI/scrapy,dgillis/scrapy,hwsyy/scrapy,pablohoffman/scrapy,Partoo/scrapy,rahul-c1/scrapy,Zephor5/scrapy,dacjames/scrapy,CENDARI/scrapy,johnardavies/scrapy,curita/scrapy,tntC4stl3/scrapy,gbirke/scrapy,foromer4/scrapy,profjrr/scrapy,eLRuLL/scrapy,umrashrf/scrapy,redapple/scrapy,moraesnicol/scrapy,carlosp420/scrapy,dacjames/scrapy,Slater-Victoroff/scrapy,arush0311/scrapy,songfj/scrapy,cyberplant/scrapy,heamon7/scrapy,KublaikhanGeek/scrapy,tntC4stl3/scrapy,sigma-random/scrapy,tliber/scrapy,hwsyy/scrapy,wzyuliyang/scrapy,barraponto/scrapy,WilliamKinaan/scrapy,jiezhu2007/scrapy,agusc/scrapy,emschorsch/scrapy,chekunkov/scrapy,gnemoug/scrapy,nguyenhongson03/scrapy,xiao26/scrapy,dangra/scrapy,zackslash/scrapy,kalessin/scrapy,arush0311/scrapy,darkrho/scrapy-scrapy,rolando-contrib/scrapy,moraesnicol/scrapy,nowopen/scrapy,snowdream1314/scrapy,fpy171/scrapy,pawelmhm/scrapy,elijah513/scrapy,hansenDise/scrapy,nguyenhongson03/scrapy,tagatac/scrapy,huoxudong125/scrapy,agreen/scrapy,Slater-Victoroff/scrapy,Adai0808/scrapy-1,eliasdorneles/scrapy,dangra/scrapy,ENjOyAbLE1991/scrapy,Djlavoy/scrapy,zorojean/scrapy,fqul/scrapy,fontenele/scrapy,aivarsk/scrapy,avtoritet/scrapy,joshlk/scrapy,yusofm/scrapy,Slater-Victoroff/scrapy,joshlk/scrapy,mgedmin/scrapy,WilliamKinaan/scrapy,zjuwangg/scrapy,legendtkl/scrapy,zhangtao11/scrapy,wenyu1001/scrapy,ssh-odoo/scrapy,finfish/scrapy,wenyu1001/scrapy,pablohoffman/scrapy,dhenyjarasandy/scrapy,rdowinton/scrapy,ENjOyAbLE1991/scrapy,dhenyjarasandy/scrapy,dracony/scrapy,pablohoffman/scrapy,Allianzcortex/scrapy,z-fork/scrapy,Djlavoy/scrapy,cyrixhero/scrapy,devGregA/scrapy,bmess/scrapy,smaty1/scrapy,xiao26/scrapy,beni55/scrapy,haiiiiiyun/scrapy,scrapy/scrapy,nowopen/scrapy,cursesun/scrapy,Timeship/scrapy,codebhendi/scrapy,pfctdayelise/scrapy,wangjun/scrapy,jiezhu2007/scrapy,finfish/scrapy,zjuwangg/scrapy,elijah513/scrapy,elijah513/scrapy,kazitanvirahsan/scrapy,taito/scrapy,rahulsharma1991/scrapy,devGregA/scrapy,rklabs/scrapy,nikgr95/scrapy,pfctdayelise/scrapy,pombredanne/scrapy,nfunato/scrapy,Parlin-Galanodel/scrapy,ssh-odoo/scrapy,fpy171/scrapy,kimimj/scrapy,snowdream1314/scrapy,songfj/scrapy,legendtkl/scrapy,scorphus/scrapy,GregoryVigoTorres/scrapy,dracony/scrapy,tntC4stl3/scrapy,nfunato/scrapy,cleydson/scrapy,kashyap32/scrapy,scrapy/scrapy,yarikoptic/scrapy,stenskjaer/scrapy,Adai0808/scrapy-1,ramiro/scrapy,zhangtao11/scrapy,olorz/scrapy,Zephor5/scrapy,starrify/scrapy,godfreyy/scrapy,w495/scrapy,cleydson/scrapy,rootAvish/scrapy,umrashrf/scrapy,Allianzcortex/scrapy,dgillis/scrapy,nikgr95/scrapy,nfunato/scrapy,jdemaeyer/scrapy,raphaelfruneaux/scrapy,livepy/scrapy,cyrixhero/scrapy,sardok/scrapy,foromer4/scrapy,Ryezhang/scrapy,jc0n/scrapy,Parlin-Galanodel/scrapy,beni55/scrapy,w495/scrapy,KublaikhanGeek/scrapy,Cnfc19932/scrapy,tagatac/scrapy,tliber/scrapy,raphaelfruneaux/scrapy,Digenis/scrapy,jeffreyjinfeng/scrapy,1yvT0s/scrapy,shaform/scrapy,hbwzhsh/scrapy,ENjOyAbLE1991/scrapy,agreen/scrapy,wujuguang/scrapy,URXtech/scrapy,redapple/scrapy,rolando/scrapy,crasker/scrapy,Adai0808/scrapy-1,wzyuliyang/scrapy,liyy7/scrapy,foromer4/scrapy,nikgr95/scrapy,famorted/scrapy,URXtech/scrapy,liyy7/scrapy,wangjun/scrapy,rahulsharma1991/scrapy,Lucifer-Kim/scrapy,livepy/scrapy,hbwzhsh/scrapy,rootAvish/scrapy,hectoruelo/scrapy,irwinlove/scrapy,1yvT0s/scrapy,codebhendi/scrapy,Bourneer/scrapy,Timeship/scrapy,elacuesta/scrapy,starrify/scrapy,z-fork/scrapy,lacrazyboy/scrapy,bmess/scrapy,scorphus/scrapy,CodeJuan/scrapy,farhan0581/scrapy,YeelerG/scrapy,nguyenhongson03/scrapy,Preetwinder/scrapy,huoxudong125/scrapy,haiiiiiyun/scrapy,webmakin/scrapy,ArturGaspar/scrapy,CodeJuan/scrapy,pombredanne/scrapy,cursesun/scrapy,chekunkov/scrapy,cursesun/scrapy,farhan0581/scrapy,curita/scrapy,TarasRudnyk/scrapy,Preetwinder/scrapy,agreen/scrapy,zackslash/scrapy,barraponto/scrapy,emschorsch/scrapy,wenyu1001/scrapy,rahul-c1/scrapy,redapple/scrapy,fqul/scrapy,jc0n/scrapy,dracony/scrapy,eliasdorneles/scrapy,Djlavoy/scrapy,dhenyjarasandy/scrapy,scrapy/scrapy,bmess/scrapy,fontenele/scrapy,rdowinton/scrapy,URXtech/scrapy,CENDARI/scrapy,kmike/scrapy,Chenmxs/scrapy,OpenWhere/scrapy,smaty1/scrapy,farhan0581/scrapy,taito/scrapy,barraponto/scrapy,jiezhu2007/scrapy,Digenis/scrapy,heamon7/scrapy,kmike/scrapy,jorik041/scrapy,hansenDise/scrapy,github-account-because-they-want-it/scrapy,amboxer21/scrapy,AaronTao1990/scrapy,ssteo/scrapy,johnardavies/scrapy,tagatac/scrapy,zorojean/scrapy,olafdietsche/scrapy,ramiro/scrapy,rolando/scrapy,OpenWhere/scrapy,hyrole/scrapy,yidongliu/scrapy,curita/scrapy,rolando-contrib/scrapy,avtoritet/scrapy,nowopen/scrapy,z-fork/scrapy,w495/scrapy,amboxer21/scrapy,Digenis/scrapy,scorphus/scrapy,rolando/scrapy,pranjalpatil/scrapy,darkrho/scrapy-scrapy,hwsyy/scrapy,rklabs/scrapy,fafaman/scrapy,aivarsk/scrapy,olorz/scrapy,carlosp420/scrapy,GregoryVigoTorres/scrapy,eLRuLL/scrapy,AaronTao1990/scrapy,jorik041/scrapy,ssteo/scrapy,eliasdorneles/scrapy,emschorsch/scrapy,irwinlove/scrapy,huoxudong125/scrapy,Partoo/scrapy,IvanGavran/scrapy,ArturGaspar/scrapy,crasker/scrapy,rahul-c1/scrapy,carlosp420/scrapy,famorted/scrapy,chekunkov/scrapy,rolando-contrib/scrapy,jorik041/scrapy,gnemoug/scrapy,Chenmxs/scrapy,Cnfc19932/scrapy,ssh-odoo/scrapy,kimimj/scrapy,webmakin/scrapy,cyberplant/scrapy,elacuesta/scrapy,darkrho/scrapy-scrapy,avtoritet/scrapy,jdemaeyer/scrapy,joshlk/scrapy,rahulsharma1991/scrapy,devGregA/scrapy,heamon7/scrapy,kalessin/scrapy,OpenWhere/scrapy,smaty1/scrapy,zjuwangg/scrapy,sardok/scrapy,gbirke/scrapy,dangra/scrapy,irwinlove/scrapy,jdemaeyer/scrapy,YeelerG/scrapy,nett55/scrapy,Lucifer-Kim/scrapy,jamesblunt/scrapy,jc0n/scrapy,cyrixhero/scrapy,cyberplant/scrapy,jeffreyjinfeng/scrapy,stenskjaer/scrapy,hansenDise/scrapy,fafaman/scrapy,tliber/scrapy,Parlin-Galanodel/scrapy,nett55/scrapy,github-account-because-they-want-it/scrapy,hyrole/scrapy,hectoruelo/scrapy,zorojean/scrapy,xiao26/scrapy,kazitanvirahsan/scrapy,Bourneer/scrapy,Lucifer-Kim/scrapy,csalazar/scrapy,JacobStevenR/scrapy,rklabs/scrapy,pawelmhm/scrapy,ashishnerkar1/scrapy,Bourneer/scrapy,sigma-random/scrapy,webmakin/scrapy,dacjames/scrapy,umrashrf/scrapy,hbwzhsh/scrapy,jeffreyjinfeng/scrapy,IvanGavran/scrapy,nett55/scrapy,kmike/scrapy,CodeJuan/scrapy,ylcolala/scrapy,raphaelfruneaux/scrapy,pranjalpatil/scrapy,aivarsk/scrapy,hyrole/scrapy,Geeglee/scrapy,crasker/scrapy,Ryezhang/scrapy,songfj/scrapy,YeelerG/scrapy,Partoo/scrapy,ssteo/scrapy,rootAvish/scrapy,mlyundin/scrapy,wujuguang/scrapy,kazitanvirahsan/scrapy,mgedmin/scrapy,livepy/scrapy,kashyap32/scrapy,fontenele/scrapy,github-account-because-they-want-it/scrapy,profjrr/scrapy,fqul/scrapy,starrify/scrapy,snowdream1314/scrapy,TarasRudnyk/scrapy,dgillis/scrapy,zhangtao11/scrapy,coderabhishek/scrapy,pombredanne/scrapy,mlyundin/scrapy,johnardavies/scrapy,liyy7/scrapy,ArturGaspar/scrapy,yusofm/scrapy,wzyuliyang/scrapy,ylcolala/scrapy,shaform/scrapy,codebhendi/scrapy,cleydson/scrapy,pawelmhm/scrapy,rdowinton/scrapy,AaronTao1990/scrapy,lacrazyboy/scrapy,ashishnerkar1/scrapy,taito/scrapy,Cnfc19932/scrapy,Ryezhang/scrapy,kalessin/scrapy,ylcolala/scrapy,haiiiiiyun/scrapy,kashyap32/scrapy,jamesblunt/scrapy,olafdietsche/scrapy,pranjalpatil/scrapy,wujuguang/scrapy,fpy171/scrapy,GregoryVigoTorres/scrapy,mgedmin/scrapy,eLRuLL/scrapy,zackslash/scrapy,JacobStevenR/scrapy,JacobStevenR/scrapy,Chenmxs/scrapy,Preetwinder/scrapy,pfctdayelise/scrapy | scrapy/contrib/spiderstate.py | scrapy/contrib/spiderstate.py | import os, cPickle as pickle
from scrapy import signals
class SpiderState(object):
"""Store and load spider state during a scraping job"""
def __init__(self, jobdir=None):
self.jobdir = jobdir
@classmethod
def from_crawler(cls, crawler):
obj = cls(crawler.settings.get('JOBDIR'))
crawler.signals.connect(obj.spider_closed, signal=signals.spider_closed)
crawler.signals.connect(obj.spider_opened, signal=signals.spider_opened)
return obj
def spider_closed(self, spider):
if self.jobdir:
with open(self.statefn, 'wb') as f:
pickle.dump(spider.state, f, protocol=2)
def spider_opened(self, spider):
if self.jobdir and os.path.exists(self.statefn):
with open(self.statefn, 'rb') as f:
spider.state = pickle.load(f)
else:
spider.state = {}
@property
def statefn(self):
return os.path.join(self.jobdir, 'spider.state')
| import os, cPickle as pickle
from scrapy import signals
class SpiderState(object):
"""Store and load spider state during a scraping job"""
def __init__(self, jobdir=None):
self.jobdir = jobdir
@classmethod
def from_crawler(cls, crawler):
obj = cls(crawler.settings.get('JOBDIR'))
crawler.signals.connect(obj.spider_closed, signal=signals.spider_closed)
crawler.signals.connect(obj.spider_opened, signal=signals.spider_opened)
return obj
def spider_closed(self, spider):
if self.jobdir:
with open(self.statefn, 'wb') as f:
pickle.dump(spider.state, f, protocol=2)
def spider_opened(self, spider):
if self.jobdir and os.path.exists(self.statefn):
with open(self.statefn) as f:
spider.state = pickle.load(f)
else:
spider.state = {}
@property
def statefn(self):
return os.path.join(self.jobdir, 'spider.state')
| bsd-3-clause | Python |
af6eef23e60b88209ce4e9601f0cf457ee421bdc | add zone no crawler | EclipseXuLu/DataHouse,EclipseXuLu/DataHouse | DataHouse/crawler/zone_no_crawler.py | DataHouse/crawler/zone_no_crawler.py | """
not finish yet
"""
import requests
from bs4 import BeautifulSoup
def crawl():
url = 'http://quhao.tianqi.com/'
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Host': 'quhao.tianqi.com',
'Referer': 'https://www.baidu.com/link?url=dt9Ft7DGXOxzDe8CX8pIybsRFMUsEzSbE3udUXkowquCMcMgXumd-ruQM4nr4uBD&wd=&eqid=a75b74b50001af66000000035b082294',
'Upgrade-Insecure-Requests': '1',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36'
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html5lib')
for li in soup.find('div', class_="box").find_all('li'):
print(li.text)
def crawl_detail(place):
url = 'http://quhao.tianqi.com/%s' % place
headers = {
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
'Host': 'quhao.tianqi.com',
'Referer': 'http://quhao.tianqi.com/',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.119 Safari/537.36'
}
response = requests.get(url, headers=headers)
if response.status_code == 200:
soup = BeautifulSoup(response.text, 'html5lib')
if __name__ == '__main__':
crawl()
| mit | Python | |
5fc6d9fc05bc4cae5588489c576744a518155461 | Add Sin, Cos and Tan function classes | jackromo/mathLibPy | trig.py | trig.py | import function
import math
class Sin(function.Function):
def _evaluate(self, x):
return math.sin(x)
class Cos(function.Function):
def _evaluate(self, x):
return math.cos(x)
class Tan(function.Function):
def _evaluate(self, x):
sin = Sin()
cos = Cos()
if cos(x) == 0:
raise ZeroDivisionError()
return sin(x) / cos(x)
def main():
sin = Sin()
cos = Cos()
tan = Tan()
assert(sin(0) == 0)
assert(cos(0) == 1)
assert(tan(0) == 0)
assert((tan + cos + sin)(0) == 1)
if __name__ == "__main__":
main() | mit | Python | |
d101e1bae5b083b436411507ebe73f12f8088075 | Create solution.py | rmotr-curriculum-testing/learn-testing-repo | unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/solutions/solution.py | unit-3-mixed-reading-and-assignment-lessons/lesson-3-assignment-one-code-block/solutions/solution.py | def convert_temperature(temperature, to='celsius'):
def to_fahrenheit():
return (temperature * 9 / 5) + 32
def to_celsius():
return (temperature - 32) * 5 / 9
return (to_celsius if to == 'celsius' else to_fahrenheit)()
| mit | Python | |
f375be2ac11aa9648c77392cec36900db900c6ef | Add Flask server. | vmlaker/wabbit,vmlaker/wabbit,vmlaker/wabbit,vmlaker/wabbit | serve.py | serve.py | import sys
import flask
import sqlalchemy as sa
import coils
import tables
app = flask.Flask(__name__)
@app.route('/')
def root():
# Load configuration file.
CONFIG = sys.argv[1] if len(sys.argv)>=2 else 'wabbit.cfg'
config = coils.Config(CONFIG)
# Connect to database engine.
engine = sa.create_engine(
'mysql://{}:{}@{}/{}'.format(
config['username'], config['password'],
config['host'], config['db_name']))
conn = engine.connect()
# Select and print.
result = ''
s = sa.sql.select([tables.image])
rows = conn.execute(s)
for row in rows:
result += '{:s}<br>'.format(row)
# Close the session.
conn.close()
return result
@app.route('/pics')
def pics():
return 'Pictures!'
if __name__ == '__main__':
app.run()
| mit | Python | |
f3ab43a3c24851ca5a6e68c04e56af8c8f9a0fd1 | add setup.py | jupyter/jupyter-drive,jupyter/jupyter-drive,jupyter/jupyter-drive,Carreau/jupyter-drive,Carreau/jupyter-drive | setup.py | setup.py | from setuptools import setup, find_packages # Always prefer setuptools over distutils
from codecs import open # To use a consistent encoding
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
#with open(path.join(here, 'DESCRIPTION.rst'), encoding='utf-8') as f:
# long_description = f.read()
setup(
name='jupyter-drive',
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/development.html#single-sourcing-the-version
version='0.0.1',
description='Interation of IPython/Jupyter with google drive',
long_description='',
# The project's main homepage.
url='https://github.com/jupyter/jupyter-drive',
# Author details
author='Matthias Bussonnier, Kester Tong, Kyle Kelley, Thomas Kluyver, The IPython team',
author_email='ipython-dev@scipy.org',
# Choose your license
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: BSD License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Framework :: IPython',
],
# What does your project relate to?
keywords='ipython jupyter google drive notebook',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
# List run-time dependencies here. These will be installed by pip when your
# project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/technical.html#install-requires-vs-requirements-files
install_requires=['IPython'],
# have to be included in MANIFEST.in as well.
package_data={
'jupyter-drive': ['*'],
},
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages.
# see http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
data_files=[('jupyter-drive', ['*'])],
},
)
| bsd-2-clause | Python | |
1b77c721b53d59e1b6242906780941d262d070e8 | add basic setup.py | Kyria/EsiPy,a-tal/EsiPy | setup.py | setup.py | from setuptools import setup
from esipy import __version__
# install requirements
install_requirements = [
"requests",
"pyswagger",
"six"
]
# test requirements
test_requirements = [
"coverage",
"coveralls",
"httmock",
"nose",
"mock",
"future",
"python-memcached"
] + install_requirements
setup(
name='EsiPy',
version=__version__,
packages=['esipy'],
url='https://github.com/Kyria/EsiPy',
license='BSD 3-Clause License',
author='Kyria',
author_email='anakhon@gmail.com',
description='Swagger Client for the ESI API for EVE Online',
install_requires=install_requirements,
tests_require=test_requirements,
test_suite='nose.collector',
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: Implementation :: PyPy",
]
)
| bsd-3-clause | Python | |
518e1d56a23eb1d5b4bb31ae46c958e519addfc0 | add setup file | lewismc/topik,kcompher/topik,ContinuumIO/topik,lewismc/topik,kcompher/topik,kcompher/topik,lewismc/topik,ContinuumIO/topik | setup.py | setup.py | #!/usr/bin/env python
from os.path import exists
from setuptools import setup, find_packages
setup(name='topik',
version='0.1.0',
description='A Topic Modeling high-level interface',
url='http://github.com/ContinuumIO/topik/',
author='Christine Doig',
author_email='christine.doig@continuum.io',
license='BSD',
keywords='topic modeling lda nltk gensim',
packages=find_packages(),
install_requires=list(open('requirements.txt').read().strip().split('\n')),
long_description=(open('README.rst').read() if exists('README.rst')
else ''), zip_safe=False)
| bsd-3-clause | Python | |
07cda3fee1215f4d28e3885796c5d5f02ec28918 | add beginning for D4 qcodes wrapper | Rubenknex/SPI-rack,peendebak/SPI-rack | D4/D4.py | D4/D4.py | from qcodes import Instrument
from qcodes.utils.validators import Numbers
from .D4_module import D4_module
class D4(Instrument):
"""
Qcodes driver for the D4 ADC SPI-rack module.
"""
def __init__(self, name, spi_rack, module, **kwargs):
super().__init__(name, **kwargs)
self.d4 = D4_module(spi_rack, module)
for i in range(2):
pass
| mit | Python | |
0cce9a108d97b61bc36c1d6873a801ae5a02ee10 | Add setup.py script. | anthrotype/booleanOperations,moyogo/booleanoperations,typemytype/booleanOperations | setup.py | setup.py | from distutils.core import setup
setup(
name="booleanOperations",
version="0.1",
description="Boolean operations on paths.",
author="Frederik Berlaen",
url="https://github.com/typemytype/booleanOperations",
license="MIT",
packages=["booleanOperations"],
package_dir={"": "Lib"},
package_data={"booleanOperations": ["pyClipper.so"]}
)
| mit | Python | |
6edea0e1f15c3905251793238fe88641a5935fed | Update version to 3.6.2 | zl352773277/django-redis,smahs/django-redis,yanheng/django-redis,lucius-feng/django-redis,GetAmbassador/django-redis | setup.py | setup.py | from setuptools import setup
description = """
Full featured redis cache backend for Django.
"""
setup(
name = "django-redis",
url = "https://github.com/niwibe/django-redis",
author = "Andrei Antoukh",
author_email = "niwi@niwi.be",
version='3.6.2',
packages = [
"redis_cache",
"redis_cache.client"
],
description = description.strip(),
install_requires=[
'redis>=2.9.0',
],
zip_safe=False,
include_package_data = True,
package_data = {
'': ['*.html'],
},
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Environment :: Web Environment",
"Framework :: Django",
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
],
)
| from setuptools import setup
description = """
Full featured redis cache backend for Django.
"""
setup(
name = "django-redis",
url = "https://github.com/niwibe/django-redis",
author = "Andrei Antoukh",
author_email = "niwi@niwi.be",
version='3.6.1',
packages = [
"redis_cache",
"redis_cache.client"
],
description = description.strip(),
install_requires=[
'redis>=2.9.0',
],
zip_safe=False,
include_package_data = True,
package_data = {
'': ['*.html'],
},
classifiers = [
"Development Status :: 5 - Production/Stable",
"Operating System :: OS Independent",
"Environment :: Web Environment",
"Framework :: Django",
"License :: OSI Approved :: BSD License",
"Intended Audience :: Developers",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
],
)
| bsd-3-clause | Python |
709aef34e608eb86dc7cbffd5635f78d8b5f59f3 | add fasta2bed.py | likit/BioUtils,likit/BioUtils | fasta2bed.py | fasta2bed.py | '''Credit: Elijah Lowe'''
'''Read in sequences in FASTA format and print out BED format.'''
import screed, sys
infile = sys.argv[1]
for n, record in enumerate(screed.open(infile)):
print record['name']+"\t0\t",len(record['sequence'])
| bsd-2-clause | Python | |
58e4c8c5cd3c47fb4dc4e6e772b0f300fe890225 | Add missed setup.py | vine/mysql-prefetcher | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='myprefetch',
version='0.1',
description='MySQL Replication Prefetcher',
packages=find_packages(),
long_description=open('README.md').read(),
license=open('LICENSE').read(),
url='https://github.com/vine/mysql-prefetcher',
install_requires=[
'MySQL-python',
],
)
| apache-2.0 | Python | |
46b723ad4b48e29225cf8fcc44fa90bc9cfc3e21 | Enable distutils for package | c-w/GettyArt | setup.py | setup.py | from distutils.core import setup
setup(
name='Getty',
version='0.0.1',
author='Clemens Wolff',
author_email='clemens.wolff+pypi@gmail.com',
packages=['getty'],
url='https://github.com/c-w/Getty',
download_url='http://pypi.python.org/pypi/Getty',
license='LICENSE.txt',
description='Scraper for art available from getty.edu',
long_description=open('README.rst').read(),
)
| mit | Python | |
b4a41b129a33361ebcb45de87a236952943ab3c3 | Create setup.py | jbeliao/swipe-installer,jbeliao/swipe-installer,jbeliao/swipe-installer,jbeliao/swipe-installer | setup.py | setup.py | #!/usr/bin/env python
#
# Copyright (c) 2009-2013 Kyle Gorman
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# setup.py: for building SWIG bridge to Python
# Kyle Gorman
from distutils.core import setup, Extension
setup(name='swipe', version='1.5', author='Kyle Gorman',
description="""SWIPE' pitch estimator""", py_modules=['swipe'],
ext_modules=[Extension('_swipe', sources=['vector.c', 'swipe.c', 'swipe_wrap.c'], libraries=['sndfile', 'lapack', 'fftw3'])])
| mit | Python | |
3af11eab7373a937f8df9858efbc41cdc9cfc940 | Package Linehaul | pypa/linehaul | setup.py | setup.py | from setuptools import setup, find_packages
install_requires = []
with open("requirements/main.in", "r") as fp:
for line in fp:
line.strip()
if line:
install_requires.append(line)
setup(
name="linehaul",
use_scm_version={
"local_scheme": lambda v: "+{.node}{}".format(v, ".dirty" if v.dirty else ""),
"version_scheme": lambda v: "3.{.distance}.0".format(v),
},
packages=find_packages(exclude=["tests*"]),
package_data={"linehaul": ["schema.json"]},
entry_points={"console_scripts": ["linehaul = linehaul.cli:main"]},
install_requires=install_requires,
setup_requires=["setuptools_scm"],
)
| apache-2.0 | Python | |
f743220b6d6868b3a489a1843dda329ed0a7d5c4 | Add Python setup file | Twi/amaya | setup.py | setup.py | from setuptools import setup
setup(name='amaya',
version='0.1',
description='IRCv3 capable bot framework',
url='http://github.com/bookhorse/amaya',
author='Nicole Brennan',
author_email='twipony.ts@gmail.com',
license='ZLib',
packages=['amaya'])
| mit | Python | |
250261038893c7f5b004776c4aec01ebfc1d9012 | Fix a typo | moypray/flocker,AndyHuu/flocker,moypray/flocker,1d4Nf6/flocker,1d4Nf6/flocker,w4ngyi/flocker,hackday-profilers/flocker,LaynePeng/flocker,moypray/flocker,hackday-profilers/flocker,mbrukman/flocker,agonzalezro/flocker,agonzalezro/flocker,agonzalezro/flocker,wallnerryan/flocker-profiles,hackday-profilers/flocker,Azulinho/flocker,w4ngyi/flocker,beni55/flocker,Azulinho/flocker,lukemarsden/flocker,adamtheturtle/flocker,w4ngyi/flocker,mbrukman/flocker,wallnerryan/flocker-profiles,jml/flocker,jml/flocker,LaynePeng/flocker,beni55/flocker,lukemarsden/flocker,beni55/flocker,jml/flocker,Azulinho/flocker,runcom/flocker,runcom/flocker,runcom/flocker,achanda/flocker,adamtheturtle/flocker,AndyHuu/flocker,achanda/flocker,LaynePeng/flocker,mbrukman/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles,achanda/flocker,adamtheturtle/flocker,lukemarsden/flocker,AndyHuu/flocker | setup.py | setup.py | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
#
# Generate a Flocker package that can be deployed onto cluster nodes.
#
import os.path
from setuptools import setup
path = os.path.join(os.path.dirname(__file__), b"flocker/version")
with open(path) as fObj:
version = fObj.read().strip()
del path
setup(
# This is the human-targetted name of the software being packaged.
name="Flocker",
# This is a string giving the version of the software being packaged. For
# simplicity it should be something boring like X.Y.Z.
version=version,
# This identifies the creators of this software. This is left symbolic for
# ease of maintenance.
author="HybridCluster Team",
# This is contact information for the authors.
author_email="support@hybridcluster.com",
# Here is a website where more information about the software is available.
url="http://hybridcluster.com/",
# This defines *Python* packages (in other words, things that can be
# imported) which are part of the package. Most of what they contain will
# be included in the package automatically by virtue of the packages being
# mentioned here. These aren't recursive so each sub-package must also be
# explicitly included.
packages=[
"flocker", "flocker.test",
],
# This defines extra non-source files that live in the source tree that
# need to be included as part of the package.
package_data={
# This is the canonical definition of the source form of the cluster
# version.
"flocker": ["version"],
},
extras_require={
# This extra allows you to build the documentation for Flocker.
"doc": ["Sphinx==1.2", "sphinx-rtd-theme==0.1.6"],
# This extra is for developers who need to work on Flocker itself.
"dev": ["pyflakes==0.8.1"]
},
)
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
#
# Generate a Flocker package that canbe deployed onto cluster nodes.
#
import os.path
from setuptools import setup
path = os.path.join(os.path.dirname(__file__), b"flocker/version")
with open(path) as fObj:
version = fObj.read().strip()
del path
setup(
# This is the human-targetted name of the software being packaged.
name="Flocker",
# This is a string giving the version of the software being packaged. For
# simplicity it should be something boring like X.Y.Z.
version=version,
# This identifies the creators of this software. This is left symbolic for
# ease of maintenance.
author="HybridCluster Team",
# This is contact information for the authors.
author_email="support@hybridcluster.com",
# Here is a website where more information about the software is available.
url="http://hybridcluster.com/",
# This defines *Python* packages (in other words, things that can be
# imported) which are part of the package. Most of what they contain will
# be included in the package automatically by virtue of the packages being
# mentioned here. These aren't recursive so each sub-package must also be
# explicitly included.
packages=[
"flocker", "flocker.test",
],
# This defines extra non-source files that live in the source tree that
# need to be included as part of the package.
package_data={
# This is the canonical definition of the source form of the cluster
# version.
"flocker": ["version"],
},
extras_require={
# This extra allows you to build the documentation for Flocker.
"doc": ["Sphinx==1.2", "sphinx-rtd-theme==0.1.6"],
# This extra is for developers who need to work on Flocker itself.
"dev": ["pyflakes==0.8.1"]
},
)
| apache-2.0 | Python |
b46119ada62fbcb4791cd8ce210e34a43564df5b | Add setup_tools cruft | garethr/mnml,bradleywright/mnml,bradwright/mnml | setup.py | setup.py | from distutils.core import setup
setup(name = 'MNML',
description = 'A very lightweight WSGI Python web framework',
author = 'Bradley Wright',
author_email = 'me@bradleywright.name',
url = 'http://github.com/bradleywright/mnml',
version = '0.1',
py_modules = ['mnml'],
) | mit | Python | |
328204f4158a829c6922019dcd83d3afbca2536d | bump to 0.4 | pbs/zencoder-py,torchbox/zencoder-py,zencoder/zencoder-py | setup.py | setup.py |
from distutils.core import setup
setup(name='zencoder',
version='0.4',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
|
from distutils.core import setup
setup(name='zencoder',
version='0.3',
description='Integration library for Zencoder',
author='Alex Schworer',
author_email='alex.schworer@gmail.com',
url='http://github.com/schworer/zencoder-py',
license="MIT License",
install_requires=['httplib2'],
packages=['zencoder']
)
| mit | Python |
bd41935801fb01d85d7f3f600c6b94f077cdf82f | Add solid motor utilities. | mvernacc/proptools | solid.py | solid.py | ''' Solid rocket motor equations.
Matt Vernacchia
proptools
2016 Aug 22
'''
def chamber_pressure(K_n, a, n, rho_solid, c_star):
''' Chamber pressure due to solid propellant combustion.
See equation 12-6 in Rocket Propulsion Elements 8th edition.
Args:
K_n (scalar): Ratio of burning area to throat area, A_b/A_t [units: none].
a (scalar): Propellant burn rate coefficient [units: meter second**-1 pascal**-n].
n (scalar): Propellant burn rate exponent [units: none].
rho_solid (scalar): Solid propellant density [units: kilogram meter**-3].
c_star (scalar): Propellant combustion charateristic velocity [units: meter second**-1].
Returns:
Chamber pressure [units: pascal].
'''
return (K_n * rho_solid * a * c_star) ** (1 / (1 - n))
| mit | Python | |
62d817cde6a8c58372125f551d8122cc303ac4b5 | Add a new gclient-new-workdir script which clones an existing gclient working directory much like git-new-workdir, but takes into account all sub projects as well. | azunite/chrome_build,smikes/depot_tools,azureplus/chromium_depot_tools,Midrya/chromium,sarvex/depot-tools,Midrya/chromium,Midrya/chromium,kromain/chromium-tools,ajohnson23/depot_tools,chinmaygarde/depot_tools,duanwujie/depot_tools,Chilledheart/depot_tools,SuYiling/chrome_depot_tools,airtimemedia/depot_tools,HackFisher/depot_tools,primiano/depot_tools,ajohnson23/depot_tools,npe9/depot_tools,Chilledheart/depot_tools,cybertk/depot_tools,eatbyte/depot_tools,primiano/depot_tools,duongbaoduy/gtools,yetu/repotools,CoherentLabs/depot_tools,liaorubei/depot_tools,sarvex/depot-tools,hsharsha/depot_tools,fanjunwei/depot_tools,Chilledheart/depot_tools,smikes/depot_tools,michalliu/chromium-depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,gcodetogit/depot_tools,npe9/depot_tools,npe9/depot_tools,disigma/depot_tools,eatbyte/depot_tools,yetu/repotools,kaiix/depot_tools,HackFisher/depot_tools,aleonliao/depot_tools,chinmaygarde/depot_tools,CoherentLabs/depot_tools,G-P-S/depot_tools,fracting/depot_tools,kaiix/depot_tools,xuyuhan/depot_tools,azunite/chrome_build,fanjunwei/depot_tools,HackFisher/depot_tools,Neozaru/depot_tools,duanwujie/depot_tools,Phonebooth/depot_tools,disigma/depot_tools,aleonliao/depot_tools,liaorubei/depot_tools,Chilledheart/depot_tools,mlufei/depot_tools,withtone/depot_tools,hsharsha/depot_tools,eatbyte/depot_tools,duanwujie/depot_tools,kaiix/depot_tools,smikes/depot_tools,npe9/depot_tools,chinmaygarde/depot_tools,primiano/depot_tools,airtimemedia/depot_tools,azunite/chrome_build,Neozaru/depot_tools,Neozaru/depot_tools,xuyuhan/depot_tools,xuyuhan/depot_tools,fanjunwei/depot_tools,fanjunwei/depot_tools,withtone/depot_tools,Neozaru/depot_tools,cybertk/depot_tools,fracting/depot_tools,xuyuhan/depot_tools,azureplus/chromium_depot_tools,sarvex/depot-tools,liaorubei/depot_tools,smikes/depot_tools,disigma/depot_tools,G-P-S/depot_tools,azureplus/chromium_depot_tools,smikes/depot_tools,G-P-S/depot_tools,withtone/depot_tools,eatbyte/depot_tools,gcodetogit/depot_tools,hsharsha/depot_tools,michalliu/chromium-depot_tools,airtimemedia/depot_tools,G-P-S/depot_tools,mlufei/depot_tools,kromain/chromium-tools,Phonebooth/depot_tools,gcodetogit/depot_tools,duongbaoduy/gtools,ajohnson23/depot_tools,Phonebooth/depot_tools,Chilledheart/depot_tools,fracting/depot_tools,SuYiling/chrome_depot_tools,SuYiling/chrome_depot_tools,Neozaru/depot_tools,cybertk/depot_tools,mlufei/depot_tools,airtimemedia/depot_tools,liaorubei/depot_tools,Phonebooth/depot_tools,cybertk/depot_tools,duongbaoduy/gtools,cybertk/depot_tools,sarvex/depot-tools,kromain/chromium-tools,yetu/repotools,michalliu/chromium-depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,michalliu/chromium-depot_tools,kromain/chromium-tools,aleonliao/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,HackFisher/depot_tools | gclient-new-workdir.py | gclient-new-workdir.py | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# Usage:
# gclient-new-workdir.py <repository> <new_workdir> [<branch>]
#
import os
import shutil
import subprocess
import sys
def parse_options(argv):
assert not sys.platform.startswith("win")
if len(argv) != 3:
print("usage: gclient-new-workdir.py <repository> <new_workdir>")
sys.exit(1)
repository = argv[1]
new_workdir = argv[2]
if not os.path.exists(repository):
print("Repository does not exist: " + repository)
sys.exit(1)
if os.path.exists(new_workdir):
print("New workdir already exists: " + new_workdir)
sys.exit(1)
return repository, new_workdir
def main(argv):
repository, new_workdir = parse_options(argv)
gclient = os.path.join(repository, ".gclient")
if not os.path.exists(gclient):
print("No .gclient file: " + gclient)
gclient_entries = os.path.join(repository, ".gclient_entries")
if not os.path.exists(gclient_entries):
print("No .gclient_entries file: " + gclient_entries)
os.mkdir(new_workdir)
os.symlink(gclient, os.path.join(new_workdir, ".gclient"))
os.symlink(gclient_entries, os.path.join(new_workdir, ".gclient_entries"))
for root, dirs, _ in os.walk(repository):
if ".git" in dirs:
workdir = root.replace(repository, new_workdir, 1)
make_workdir(os.path.join(root, ".git"),
os.path.join(workdir, ".git"))
def make_workdir(repository, new_workdir):
print("Creating: " + new_workdir)
os.makedirs(new_workdir)
GIT_DIRECTORY_WHITELIST = [
"config",
"info",
"hooks",
"logs/refs",
"objects",
"packed-refs",
"refs",
"remotes",
"rr-cache",
"svn"
]
for entry in GIT_DIRECTORY_WHITELIST:
make_symlink(repository, new_workdir, entry)
shutil.copy2(os.path.join(repository, "HEAD"),
os.path.join(new_workdir, "HEAD"))
subprocess.check_call(["git", "checkout", "-f"],
cwd=new_workdir.rstrip(".git"))
def make_symlink(repository, new_workdir, link):
if not os.path.exists(os.path.join(repository, link)):
return
link_dir = os.path.dirname(os.path.join(new_workdir, link))
if not os.path.exists(link_dir):
os.makedirs(link_dir)
os.symlink(os.path.join(repository, link), os.path.join(new_workdir, link))
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause | Python | |
a45c78d0fcc6d8cd2d8e702917a2dabd7bfc0444 | Add command createaccount | mociepka/saleor,mociepka/saleor,mociepka/saleor | saleor/account/management/commands/createserviceaccount.py | saleor/account/management/commands/createserviceaccount.py | import json
from typing import Any, Dict, List, Optional
import requests
from django.contrib.auth.models import Permission
from django.contrib.sites.models import Site
from django.core.management import BaseCommand, CommandError
from django.core.management.base import CommandParser
from requests.exceptions import RequestException
from ....core.permissions import get_permissions, get_permissions_enum_list
from ...models import ServiceAccount
class Command(BaseCommand):
help = "Used to create service account"
def add_arguments(self, parser: CommandParser) -> None:
parser.add_argument("name", type=str)
parser.add_argument(
"--permission",
action="append",
default=[],
dest="permissions",
help="Assign new permission to Service Account. "
"Argument can be specified multiple times.",
)
parser.add_argument("--is_active", default=True, dest="is_active")
parser.add_argument(
"--target_url",
dest="target_url",
help="Url which will receive newly created data of service account object.",
)
def validate_permissions(self, required_permissions: List[str]):
permissions = list(map(lambda x: x[1], get_permissions_enum_list()))
for perm in required_permissions:
if perm not in permissions:
raise CommandError(
f"Permisssion: {perm} doesn't exist in Saleor."
f" Avaiable permissions: {permissions}"
)
def clean_permissions(self, required_permissions: List[str]) -> List[Permission]:
permissions = get_permissions(required_permissions)
return permissions
def send_service_account_data(self, target_url, data: Dict[str, Any]):
domain = Site.objects.get_current().domain
headers = {"x-saleor-domain": domain}
try:
response = requests.post(target_url, json=data, headers=headers, timeout=15)
except RequestException as e:
raise CommandError(f"Request failed. Exception: {e}")
if response.status_code != 200:
raise CommandError(
f"Failed to send service account data to {target_url}. " # type: ignore
f"Status code: {response.status_code}, content: {response.content}"
)
def handle(self, *args: Any, **options: Any) -> Optional[str]:
name = options["name"]
is_active = options["is_active"]
target_url = options["target_url"]
permissions = list(set(options["permissions"]))
self.validate_permissions(permissions)
service_account = ServiceAccount.objects.create(name=name, is_active=is_active)
permissions_qs = self.clean_permissions(permissions)
service_account.permissions.add(*permissions_qs)
token_obj = service_account.tokens.create()
data = {
"auth_token": token_obj.auth_token,
"name": name,
"permissions": permissions,
}
if target_url:
self.send_service_account_data(target_url, data)
return json.dumps(data)
| bsd-3-clause | Python | |
b8029643ca5dd5d559b7411b6fbb20896502fd7b | Create solution.py | lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges,lilsweetcaligula/Online-Judges | hackerrank/algorithms/implementation/medium/almost_sorted/py/solution.py | hackerrank/algorithms/implementation/medium/almost_sorted/py/solution.py | def solution(L):
def isAscending(L):
m = 1
while m < len(L):
if L[m] < L[m - 1]:
return False
m += 1
return True
buffer = list(L[:])
i = 1
while i < len(buffer):
if buffer[i] < buffer[i - 1]:
i -= 1
break
i += 1
j = len(buffer) - 1
while j > i:
if buffer[j] < buffer[j - 1]:
j += 1
break
j -= 1
else:
return 'yes', 0, 0
buffer[i], buffer[j - 1] = buffer[j - 1], buffer[i]
if isAscending(buffer):
return 'swap', i + 1, j
else:
buffer[i], buffer[j - 1] = buffer[j - 1], buffer[i]
k = i + 1
while k < j:
if buffer[k] > buffer[k - 1]:
break
k += 1
else:
buffer[i:j] = reversed(buffer[i:j])
if isAscending(buffer):
return 'reverse', i + 1, j
else:
return 'no', 0, 0
return 'no', 0, 0
n = int(input())
L = list(map(int, input().split()))
ans, i, j = solution(L)
if ans == 'no' or ans == 'yes':
print(ans)
else:
print('yes')
print(ans, i, j)
| mit | Python | |
e57c1b157b39eac278552fd6c9a16e004d8be501 | Create task_1_2.py | Mariaanisimova/pythonintask | INBa/2014/Andreev_F_I/task_1_2.py | INBa/2014/Andreev_F_I/task_1_2.py | # Задача 1, Вариант 2
# Напишите программу, которая будет сообщать род деятельности и псевдоним под которым скрывается Мартин Андерсен. После вывода информации программа должна дожидаться пока пользователь нажмет Enter для выхода.
# Андреев Ф.И.
# 23.05.2016
print('Мартин Андерсен известный датский писатель-коммунист, его псевдоним Нексе')
input("Нажмите Enter для выхода")
| apache-2.0 | Python | |
1d55ad8fb8309918e7d41b4f443e16ebefbb1895 | Add script for populating elasticsearch. | eggpi/similarity,eggpi/similarity,eggpi/similarity | populate_elasticsearch.py | populate_elasticsearch.py | #!/usr/bin/env python
import os
import sys
import json
import requests
import multiprocessing
import mwparserfromhell as mwp
ES_URL = 'http://localhost:9200'
SECTIONS_TO_REMOVE = set([
'references', 'see also', 'external links', 'footnotes'
])
def put_document(path):
id = os.path.basename(path)
doc = json.load(file(path))
wdoc = mwp.parse(doc['wikitext'])
for section in wdoc.get_sections(include_headings = True):
try:
title = section.get(0).title.strip().lower()
if title in SECTIONS_TO_REMOVE:
wdoc.remove(section)
except (IndexError, AttributeError):
# No heading or empty section?
pass
doc['wikitext'] = wdoc.strip_code()
response = requests.put(
ES_URL + '/' + sys.argv[2] + '/' + id, json.dumps(doc))
print response.content
pool = multiprocessing.Pool()
pool.map(put_document, [
os.path.join(sys.argv[1], id)
for id in os.listdir(sys.argv[1])])
| mit | Python | |
1362703d4068a03c2970962ef2c05c3b128edcf1 | Create 5function.py | avsingh999/Learn_python | introduction/5function.py | introduction/5function.py | def fun():
name = "av"
age = int(19)
print("%s is %d years old."% (name, age))
return "end"
print(fun())
| mit | Python | |
2d4f6dd040d6f92b0865421cddf6c9ce16eff08e | add sample python module | tangym/autoapi | my_amazing_python_module.py | my_amazing_python_module.py | def some_function(name):
return "Hello, %s!" % name | apache-2.0 | Python | |
ff2b86d90ecbc2da25ddc05b0430555861104cac | Add an example for HybridContentsManager. | quantopian/pgcontents | examples/hybrid_manager_example.py | examples/hybrid_manager_example.py | # This example shows how to configure Jupyter/IPython to use the more complex
# HybridContentsManager.
# A HybridContentsManager implements the contents API by delegating requests to
# other contents managers. Each sub-manager is associated with a root
# directory, and all requests for data within that directory are routed to the
# sub-manager.
# A HybridContentsManager needs two pieces of information at configuration time:
# 1. ``manager_classes``, a map from root directory to the type of contents
# manager to use for that root directory.
# 2. ``manager_kwargs``, a map from root directory to a dict of keywords to
# pass to the associated sub-manager.
from pgcontents.pgmanager import PostgresContentsManager
from pgcontents.hybridmanager import HybridContentsManager
# Using Jupyter (IPython >= 4.0).
# from notebook.services.contents.filemanager import FileContentsManager
# Using Legacy IPython.
from IPython.html.services.contents.filemanager import FileContentsManager
c = get_config()
c.NotebookApp.contents_manager_class = HybridContentsManager
c.HybridContentsManager.manager_classes = {
# Associate the root directory with a PostgresContentsManager.
# This manager will receive all requests that don't fall under any of the
# other managers.
'': PostgresContentsManager,
# Associate /directory with a FileContentsManager.
'directory': FileContentsManager,
# Associate /other_directory with another FileContentsManager.
'other_directory': FileContentsManager,
}
c.HybridContentsManager.manager_kwargs = {
# Args for root PostgresContentsManager.
'': {
'db_url': 'postgresql://ssanderson@/pgcontents_testing',
'user_id': 'my_awesome_username',
'max_file_size_bytes': 1000000, # Optional
},
# Args for the FileContentsManager mapped to /directory
'directory': {
'root_dir': '/home/ssanderson/some_local_directory',
},
# Args for the FileContentsManager mapped to /other_directory
'other_directory': {
'root_dir': '/home/ssanderson/some_other_local_directory',
}
}
| apache-2.0 | Python | |
b53eec31cdb77690bebf17427a30ba6e36156cad | Add some tests for DestroyPool(). | stratis-storage/stratis-cli,stratis-storage/stratis-cli | tests/dbus/manager/test_destroy.py | tests/dbus/manager/test_destroy.py | # Copyright 2016 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Test DestroyPool.
"""
import time
import unittest
from stratis_cli._constants import TOP_OBJECT
from stratis_cli._dbus import Manager
from stratis_cli._dbus import Pool
from stratis_cli._dbus import get_object
from stratis_cli._stratisd_constants import StratisdErrorsGen
from .._constants import _DEVICES
from .._misc import _device_list
from .._misc import Service
class Destroy1TestCase(unittest.TestCase):
"""
Test 'destroy' on empty database.
'destroy' should always succeed on an empty database.
"""
_POOLNAME = 'deadpool'
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
self._proxy = get_object(TOP_OBJECT)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testExecution(self):
"""
Destroy should succeed.
"""
(rc, message) = Manager(self._proxy).DestroyPool(
self._POOLNAME
)
self.assertEqual(rc, StratisdErrorsGen.get_object().STRATIS_OK)
self.assertEqual(type(rc), int)
self.assertEqual(type(message), str)
(_, rc1, _) = Manager(self._proxy).GetPoolObjectPath(
self._POOLNAME
)
expected_rc = StratisdErrorsGen.get_object().STRATIS_POOL_NOTFOUND
self.assertEqual(rc1, expected_rc)
class Destroy2TestCase(unittest.TestCase):
"""
Test 'destroy' on database which contains the given pool.
"""
_POOLNAME = 'deadpool'
def setUp(self):
"""
Start the stratisd daemon with the simulator.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
self._proxy = get_object(TOP_OBJECT)
Manager(self._proxy).CreatePool(
self._POOLNAME,
[d.device_node for d in _device_list(_DEVICES, 1)],
0
)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
def testExecution(self):
"""
The pool was just created, so must be destroyable.
"""
(rc, message) = Manager(self._proxy).DestroyPool(
self._POOLNAME
)
self.assertEqual(rc, StratisdErrorsGen.get_object().STRATIS_OK)
self.assertEqual(type(rc), int)
self.assertEqual(type(message), str)
(_, rc1, _) = Manager(self._proxy).GetPoolObjectPath(
self._POOLNAME
)
expected_rc = StratisdErrorsGen.get_object().STRATIS_POOL_NOTFOUND
self.assertEqual(rc1, expected_rc)
class Destroy3TestCase(unittest.TestCase):
"""
Test 'destroy' on database which contains the given pool and a volume.
"""
_POOLNAME = 'deadpool'
_VOLNAME = 'vol'
def setUp(self):
"""
Start the stratisd daemon with the simulator.
Create a pool and a filesystem.
"""
self._service = Service()
self._service.setUp()
time.sleep(1)
self._proxy = get_object(TOP_OBJECT)
(poolpath, _, _) = Manager(self._proxy).CreatePool(
self._POOLNAME,
[d.device_node for d in _device_list(_DEVICES, 1)],
0
)
(_, _, _) = Pool(get_object(poolpath)).CreateVolumes(
[(self._VOLNAME, '', '')]
)
def tearDown(self):
"""
Stop the stratisd simulator and daemon.
"""
self._service.tearDown()
@unittest.expectedFailure
def testExecution(self):
"""
This should fail since it has a filesystem on it.
"""
(rc, message) = Manager(self._proxy).DestroyPool(
self._POOLNAME
)
self.assertNotEqual(rc, StratisdErrorsGen.get_object().STRATIS_OK)
self.assertEqual(type(rc), int)
self.assertEqual(type(message), str)
| apache-2.0 | Python | |
d68109c2fb7bb324c93506d26a1a7cf996134da3 | Allow `soulmate_finder` to be imported | erkghlerngm44/r-anime-soulmate-finder | soulmate_finder/__init__.py | soulmate_finder/__init__.py | # allow `soulmate_finder` to be imported
# FIXME: This is bad
from .__main__ import *
| mit | Python | |
79fe576ec71552633c7e5a2a646567beecfa3b5b | Add a test | KaiSforza/pywer | test/pkgbuildtest.py | test/pkgbuildtest.py | #!/usr/bin/env python3
import sys
import os
import unittest
sys.path[0] = os.path.abspath('..')
import libaur.PKGBUILD as P
class PkgbuildTest(unittest.TestCase):
KNOWN_VALUES = [
('''pkgname=foobar\n''',
{'pkgname':['foobar']}),
('''pkgname=(foobar)\n''',
{'pkgname':['foobar']}),
('''pkgname=('foobar' 'pacman')\n''',
{'pkgname':['foobar', 'pacman']}),
# This one fails. Need better parsing for non-quoted strings
#('''pkgname=(foobar pacman)\n''',
# {'pkgname':['foobar', 'pacman']}),
('''pkgver=123
456
''', {'pkgver':['123']}),
('''depends=('foobar' 'pacman')\n''',
{'depends':['foobar', 'pacman']}),
('''depends=("foobar" 'pacman')\n''',
{'depends':['foobar', 'pacman']}),
('''depends=("foobar" "pacman")\n''',
{'depends':['foobar', 'pacman']}),
('''depends=(
'foobar'
'pacman')\n''',
{'depends':['foobar', 'pacman']}),
('''depends=(
'foobar'
'pacman'
)\n''',
{'depends':['foobar', 'pacman']}),
('''depends=(
# One dep
'foobar'
# Two dep
'pacman'
)\n''',
{'depends':['foobar', 'pacman']}),
('''source=(git://foobar.git#branch=git
)\n''',
{'source':['git://foobar.git#branch=git']}),
('''source=(git://foobar.git#some comment
)\n''',
{'source':['git://foobar.git']}),
]
def test_known_values(self):
'''parse_pkgbuild should return the values listed above'''
for pkgbuild, output in self.KNOWN_VALUES:
# This is a default .Add it to the known output dictionary.
output['epoch'] = ['0']
self.assertDictEqual(P.parse_pkgbuild(full_str=pkgbuild), output)
if __name__ == '__main__':
unittest.main()
| mit | Python | |
44a785b456ad1d1bd9c866b79cadaec4c1d5bab5 | Add sample template file (#24) | tianhao64/vsphere-automation-sdk-python,tianhao64/vsphere-automation-sdk-python,pgbidkar/vsphere-automation-sdk-python,pgbidkar/vsphere-automation-sdk-python | samples/vsphere/common/sample_template.py | samples/vsphere/common/sample_template.py | #!/usr/bin/env python
"""
* *******************************************************
* Copyright (c) VMware, Inc. 2017. All Rights Reserved.
* SPDX-License-Identifier: MIT
* *******************************************************
*
* DISCLAIMER. THIS PROGRAM IS PROVIDED TO YOU "AS IS" WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, WHETHER ORAL OR WRITTEN,
* EXPRESS OR IMPLIED. THE AUTHOR SPECIFICALLY DISCLAIMS ANY IMPLIED
* WARRANTIES OR CONDITIONS OF MERCHANTABILITY, SATISFACTORY QUALITY,
* NON-INFRINGEMENT AND FITNESS FOR A PARTICULAR PURPOSE.
"""
__author__ = 'VMware, Inc.'
__vcenter_version__ = '6.5+'
import atexit
from com.vmware.vcenter_client import VM
from samples.vsphere.common import vapiconnect
from samples.vsphere.common.sample_util import parse_cli_args
class Sample:
"""
Demonstrates getting list of VMs present in vCenter
Sample Prerequisites:
- vCenter
"""
def __init__(self):
self.vm_service = None # Service used by the sample code.
self.cleardata = None
def setup(self):
server, username, password, cleardata, skip_verification = \
parse_cli_args()
stub_config = vapiconnect.connect(server, username, password,
skip_verification)
self.vm_service = VM(stub_config)
self.cleardata = cleardata
atexit.register(vapiconnect.logout, stub_config)
def run(self):
vms = self.vm_service.list()
print(vms)
def cleanup(self):
if self.cleardata:
pass
def main():
sample = Sample()
sample.setup()
sample.run()
sample.cleanup()
# Start program
if __name__ == '__main__':
main()
| mit | Python | |
7d5407a98ef8b0d025532d9675b7109a4c4713f4 | add send sms python post code | fullstackpython/blog-code-examples,fullstackpython/blog-code-examples,fullstackpython/blog-code-examples | send-sms-text-messages-python/send_sms.py | send-sms-text-messages-python/send_sms.py | # we import the Twilio client from the dependency we just installed
from twilio.rest import TwilioRestClient
# the following line needs your Twilio Account SID and Auth Token
client = TwilioRestClient("ACxxxxxxxxxxxxxx", "zzzzzzzzzzzzz")
# change the "from_" number to your Twilio number and the "to" number
# to the phone number you signed up for Twilio with, or upgrade your
# account to send SMS to any phone number
client.messages.create(to="+19732644152", from_="+12023358536",
body="Hello from Python!")
| mit | Python | |
9e200c1e5d666c3ab151c96fcc1190c70ddcb02c | Add pagination utility. | kurtraschke/cadors-parse,kurtraschke/cadors-parse | src/cadorsfeed/views/pagination.py | src/cadorsfeed/views/pagination.py | from werkzeug import cached_property
from flask import url_for
class Pagination(object):
def __init__(self, db, key, per_page, page, endpoint):
self.db = db
self.query = key
self.per_page = per_page
self.page = page
self.endpoint = endpoint
@cached_property
def count(self):
return self.db.zcard(self.query)
@cached_property
def entries(self):
start = (self.page - 1) * self.per_page
return self.db.zrevrange(self.query, start, (start + self.per_page) - 1)
has_previous = property(lambda x: x.page > 1)
has_next = property(lambda x: x.page < x.pages)
previous = property(lambda x: url_for(x.endpoint, page=x.page - 1))
next = property(lambda x: url_for(x.endpoint, page=x.page + 1))
pages = property(lambda x: max(0, x.count - 1) // x.per_page + 1)
| mit | Python | |
9f3c1ee68e6bb40c519e898a8a5aedd1ec42bc1f | add radar proxy | MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI,MaxMorgenstern/EmeraldAI | testing/ROS/serial_reader_radar.py | testing/ROS/serial_reader_radar.py | #!/usr/bin/env python
from __future__ import division
import serial
import rospy
import os
import sys
import math
import tf
from sensor_msgs.msg import Range
def RadianToDegree(rad):
return (rad * 4068) / 71.0
def DegreeToRadian(deg):
return (deg * 71) / 4068.0
if __name__=="__main__":
uid = str(os.getpid())
rospy.init_node("serial_reader_{0}".format(uid))
rospy.loginfo("ROS Serial Python Node '{0}'".format(uid))
ultasonicPubFront = rospy.Publisher('/radar/Ultrasonic/Front', Range, queue_size=10)
ultasonicPubBack = rospy.Publisher('/radar/Ultrasonic/Back', Range, queue_size=10)
transformBroadcaster = tf.TransformBroadcaster()
port_name = "/dev/ttyUSB0"
#baud = 57600 # 230400
baud = 230400
if len(sys.argv) >= 2 :
port_name = sys.argv[1]
if len(sys.argv) >= 3 :
baud = int(sys.argv[2])
ser = serial.Serial(port_name, baud)
rangeParentFrameID = "/radar_ultrasonic_mount"
rangeFrameID = "/radar_ultrasonic_{0}"
rangeMsg = Range()
rangeMsg.radiation_type = 0
rangeMsg.min_range = 0.05
rangeMsg.max_range = 2.50
rangeMsg.field_of_view = (math.pi/4/45*10) # 10deg
rangeMsg.radiation_type = 0
while True:
line = ser.readline().rstrip()
if(len(line) <= 1):
continue
data = line.split("|")
if(len(data) <= 1):
continue
#print data
# we expect 4 values from the ultrasonic node
if(len(data) > 4):
continue
messageType = data[0]
moduleName = data[1].lower()
modulePosition = int(data[2])
moduleRange = int(data[3])
rangeMsg.header.frame_id = rangeFrameID.format(moduleName)
rangeMsg.range = moduleRange / 100.0
rangeMsg.header.stamp = rospy.Time.now()
#rospy.loginfo(rangeMsg)
if moduleName == "front":
ultasonicPubFront.publish(rangeMsg)
if moduleName == "back":
ultasonicPubBack.publish(rangeMsg)
# translation (x,y,z), rotation(yaw-pitch-roll (ZYX) ), time, child, parent
transformBroadcaster.sendTransform((0, 0, 0),
tf.transformations.quaternion_from_euler(0, 0, DegreeToRadian(modulePosition)),
rospy.Time.now(),
rangeMsg.header.frame_id,
rangeParentFrameID)
print "Bye!"
| apache-2.0 | Python | |
c03b1c16938572aea70dd22b838459aeec585b0d | add tests for `dvc tag` | dmpetrov/dataversioncontrol,efiop/dvc,dmpetrov/dataversioncontrol,dataversioncontrol/dvc,efiop/dvc,dataversioncontrol/dvc | tests/test_tag.py | tests/test_tag.py | import os
import shutil
import filecmp
from dvc.main import main
from dvc.logger import logger
from tests.basic_env import TestDvc
from tests.utils import reset_logger_standard_output, reset_logger_error_output
from tests.utils.logger import MockLoggerHandlers, ConsoleFontColorsRemover
class TestTag(TestDvc):
def test(self):
fname = "file"
shutil.copyfile(self.FOO, fname)
stages = self.dvc.add(fname)
self.assertEqual(len(stages), 1)
stage = stages[0]
self.assertTrue(stage is not None)
ret = main(["tag", "add", "v1", stage.path])
self.assertEqual(ret, 0)
os.unlink(fname)
shutil.copyfile(self.BAR, fname)
ret = main(["repro", stage.path])
self.assertEqual(ret, 0)
ret = main(["tag", "add", "v2", stage.path])
self.assertEqual(ret, 0)
ret = main(["tag", "list", stage.path])
self.assertEqual(ret, 0)
ret = main(["checkout", stage.path + "@v1"])
self.assertEqual(ret, 0)
self.assertTrue(filecmp.cmp(fname, self.FOO, shallow=False))
ret = main(["checkout", stage.path + "@v2"])
self.assertEqual(ret, 0)
self.assertTrue(filecmp.cmp(fname, self.BAR, shallow=False))
class TestTagAll(TestDvc):
def test(self):
ret = main(["add", self.FOO, self.BAR])
self.assertEqual(ret, 0)
with MockLoggerHandlers(logger), ConsoleFontColorsRemover():
reset_logger_standard_output()
ret = main(["tag", "list"])
self.assertEqual(ret, 0)
self.assertEqual("", logger.handlers[0].stream.getvalue())
ret = main(["tag", "add", "v1"])
self.assertEqual(ret, 0)
with MockLoggerHandlers(logger), ConsoleFontColorsRemover():
reset_logger_standard_output()
ret = main(["tag", "list"])
self.assertEqual(ret, 0)
self.assertEqual(
logger.handlers[0].stream.getvalue(),
"bar.dvc:\n"
" bar:\n"
" v1:\n"
" md5: 8978c98bb5a48c2fb5f2c4c905768afa\n"
"foo.dvc:\n"
" foo:\n"
" v1:\n"
" md5: acbd18db4cc2f85cedef654fccc4a4d8\n"
"\n",
)
ret = main(["tag", "remove", "v1"])
self.assertEqual(ret, 0)
with MockLoggerHandlers(logger), ConsoleFontColorsRemover():
reset_logger_standard_output()
ret = main(["tag", "list"])
self.assertEqual(ret, 0)
self.assertEqual("", logger.handlers[0].stream.getvalue())
class TestTagAddNoChecksumInfo(TestDvc):
def test(self):
ret = main(["run", "-o", self.FOO, "--no-exec"])
self.assertEqual(ret, 0)
with MockLoggerHandlers(logger), ConsoleFontColorsRemover():
reset_logger_error_output()
ret = main(["tag", "add", "v1", "foo.dvc"])
self.assertEqual(ret, 0)
self.assertEqual(
"Warning: missing checksum info for 'foo'\n",
logger.handlers[1].stream.getvalue(),
)
class TestTagRemoveNoTag(TestDvc):
def test(self):
ret = main(["add", self.FOO])
self.assertEqual(ret, 0)
with MockLoggerHandlers(logger), ConsoleFontColorsRemover():
reset_logger_error_output()
ret = main(["tag", "remove", "v1", "foo.dvc"])
self.assertEqual(ret, 0)
self.assertEqual(
"Warning: tag 'v1' not found for 'foo'\n",
logger.handlers[1].stream.getvalue(),
)
| apache-2.0 | Python | |
3506cb01b0ce03d834c61ff28dd5d35785b999d3 | add initial coverage implementation (#39) | crobby/oshinko-cli,radanalyticsio/oshinko-cli,radanalyticsio/oshinko-cli,radanalyticsio/oshinko-cli,tmckayus/oshinko-cli,tmckayus/oshinko-cli,crobby/oshinko-cli,crobby/oshinko-cli,tmckayus/oshinko-cli | tools/coverage.py | tools/coverage.py | #!/bin/env python
"""coverage.py
This script is for checking the code coverage of unit tests in the
oshinko-rest project. It is meant to be invoked from the top level of the
repository.
Example invocation:
$ tools/coverage.py -h
"""
import argparse
import copy
import re
import subprocess
oshinko_repo = 'github.com/redhatanalytics/oshinko-rest/'
oshinko_test_package = oshinko_repo + 'tests/unit'
coverage_packages = [
'handlers',
'helpers/authentication',
'helpers/containers',
'helpers/deploymentconfigs',
'helpers/info',
'helpers/logging',
'helpers/podtemplates',
'helpers/services',
'helpers/uuid',
'version',
]
def main(args):
def run_and_print(cmd):
proc = subprocess.Popen(cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
match = re.search('[0-9]{1,3}\.[0-9]%', proc.stdout.read())
if match is not None:
print(' ' + match.group(0))
else:
print(' unknown')
print('starting coverage scan')
base_cmd = ['go', 'test']
if args.coverprofile is not None:
base_cmd = base_cmd + ['-coverprofile', args.coverprofile]
if args.individual is True:
for pkg in coverage_packages:
print(' - scanning ' + pkg)
cmd = base_cmd + ['-coverpkg', oshinko_repo+pkg,
oshinko_test_package]
run_and_print(cmd)
else:
print(' - scanning all packages')
pkg_list = ','.join([oshinko_repo+p for p in coverage_packages])
cmd = base_cmd + ['-coverpkg', pkg_list, oshinko_test_package]
run_and_print(cmd)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Run coverage analysis.')
parser.add_argument('-i', '--individual', dest='individual',
action='store_true',
help='Print coverage analysis for each package.')
parser.add_argument('-c', '--coverprofile', dest='coverprofile',
action='store',
help='Write coverage profile to this file.')
args = parser.parse_args()
main(args)
| apache-2.0 | Python | |
63eaadad7a5169ec6219d33f9b39ce27859684c2 | Add script to automate notebooks testing | openfisca/openfisca-tunisia,openfisca/openfisca-tunisia | notebooks/test_notebooks.py | notebooks/test_notebooks.py | # -*- coding: utf-8 -*-
'''
Checks notebook execution result.
Equal to this command + error management:
jupyter nbconvert --to notebook --execute --ExecutePreprocessor.timeout=60 --output executed_notebook.ipynb demo.ipynb
For jupyter configuration information, run: jupyter --path
'''
# Dependencies: nbformat, nbconvert, jupyter-client, ipykernel
import io
import nbformat
from nbconvert.preprocessors import ExecutePreprocessor
from nbconvert.preprocessors import CellExecutionError
notebook_filename = 'demo.ipynb'
run_path = '.'
notebook_filename_out = 'executed_notebook.ipynb'
with io.open(notebook_filename) as f:
nb = nbformat.read(f, as_version=4)
ep = ExecutePreprocessor(timeout=600, kernel_name='python')
try:
out = ep.preprocess(nb, {'metadata': {'path': run_path}})
except CellExecutionError:
out = None
msg = 'Error executing the notebook "%s".\n\n' % notebook_filename
msg += 'See notebook "%s" for the traceback.' % notebook_filename_out
print(msg)
raise
finally:
with io.open(notebook_filename_out, mode='wt') as f: # io.open avoids UnicodeEncodeError
nbformat.write(nb, f)
| agpl-3.0 | Python | |
37c151de6b2241e68f7287349b43f6dce1150093 | add an API module for core.ui | dmsurti/mayavi,alexandreleroux/mayavi,dmsurti/mayavi,liulion/mayavi,alexandreleroux/mayavi,liulion/mayavi | enthought/mayavi/core/ui/api.py | enthought/mayavi/core/ui/api.py | from enthought.mayavi.tools.mlab_scene_model import MlabSceneModel
from enthought.mayavi.core.ui.mayavi_scene import MayaviScene
from enthought.tvtk.pyface.scene_editor import SceneEditor
from enthought.mayavi.core.ui.engine_view import EngineView
from enthought.mayavi.core.ui.engine_rich_view import EngineRichView
| bsd-3-clause | Python | |
4fc098ade74a9b46f11937e229cba75b83f0c9a4 | Create Utils.py | abulbasar/machine-learning,abulbasar/machine-learning,abulbasar/machine-learning | Utils.py | Utils.py | class Batchable:
def __init__(self, X, y, batch_size = 256, seed = None):
import math
import numpy as np
if seed:
np.random.seed(seed)
idx = np.arange(X.shape[1])
np.random.shuffle(idx)
self.X = X[:, idx]
self.y = y[:, idx]
self.start = 0
self.batch_size = batch_size
self.num_batches = math.ceil(X.shape[0] / batch_size)
def next(self):
end = self.start + self.batch_size
if end > self.X.shape[1]:
end = self.X.shape[1] - 1
return self.X[:, self.start: (end + 1)], self.y[:, self.start: (end + 1)]
| apache-2.0 | Python | |
65bc771c1cff47fcd49486915a0ff27d635b4056 | Add Missionaries_and_cannibals_problem | ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,arijitkar98/al-go-rithms,arijitkar98/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,arijitkar98/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,arijitkar98/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,EUNIX-TRIX/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,EUNIX-TRIX/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,arijitkar98/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,EUNIX-TRIX/al-go-rithms,EUNIX-TRIX/al-go-rithms,arijitkar98/al-go-rithms,Cnidarias/al-go-rithms,arijitkar98/al-go-rithms,EUNIX-TRIX/al-go-rithms,ZoranPandovski/al-go-rithms,arijitkar98/al-go-rithms,Cnidarias/al-go-rithms,arijitkar98/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,arijitkar98/al-go-rithms,Deepak345/al-go-rithms,EUNIX-TRIX/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,EUNIX-TRIX/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms | data_structures/dictionary/missionaries_and_cannibals_problem.py | data_structures/dictionary/missionaries_and_cannibals_problem.py | shore = {1:['m1','m2','m3','c1','c2','c3'],2:[]}
boat = {1:True,2:False}
boat_cap = 0
boat_hold = []
choice = 'y'
count = 0
glob = 1
def pick():
print("Pick a person to put on boat/or press enter")
return input()
def check(person,flag,avail_p):
if(person in shore[flag] or person == "" or person in boat_hold):
return True
else:
return False
def check_conditions(shore,flag,cflag,boatf):
num_m = 0
num_c = 0
true = 0
for i in shore[flag]:
if(i[0] == 'm'):
num_m = num_m + 1
else:
num_c = num_c + 1
print("1num_m",num_m)
print("1num_c",num_c)
if(num_m < num_c):
return False
else:
true = 1
num_m = 0
num_c = 0
for i in shore[cflag]+boatf:
if(i[0] == 'm'):
num_m = num_m + 1
else:
num_c = num_c + 1
print("2num_m",num_m)
print("2num_c",num_c)
if(num_m < num_c):
return False
elif(true == 1):
return true
while(choice == 'Y' or choice == 'y'):
count = 0
win = 0
while(count <= 25):
def again(shore,boat_hold):
print("People On Shore "+str(flag)+ " are: ")
print()
print(shore[flag] + boat_hold)
shore[flag] = shore[flag] + boat_hold
avail_p = shore[flag] + boat_hold
boat_hold =[]
for i in range(2):
print("For Person " + str(i+1))
print()
person = pick()
if(i == 0 and person == ""):
while(person == ""):
print("Boat Cannot Be Empty")
person = pick()
if((not person == "") and person in avail_p):
boat_hold.append(person)
del avail_p[avail_p.index(person)]
del shore[flag][shore[flag].index(person)]
elif(person not in avail_p and not person == ""):
print("Invalid Choice")
person = pick()
while(not check(person,flag,avail_p)):
person = pick()
if(person == ""):
break
glob = 0
return boat_hold
if(boat[1]):
flag = 1
cflag = 2
else:
flag = 2
cflag = 1
if(glob == 1):
boat_hold = again(shore,boat_hold)
while(not check_conditions(shore,flag,cflag,boat_hold)):
print()
print("** Invalid Move,Try Again **")
print()
boat_hold = again(shore,boat_hold)
print("Sending Boat")
print()
avail_p = shore[cflag] + boat_hold
boat[cflag] = True
boat[flag] = False
glob = 1
if(len(shore[1]) == 0):
win = 1
print("Congratulation, You Solved The Problem.")
break
count = count + 1
if(win == 0):
print()
print("*** Sorry, Number of moves exceeeded ***")
print()
print("Do You Want To Replay? Y/N",end=" ")
choice = input()
| cc0-1.0 | Python | |
87564dcb55e1130a0c4c306a241649d9f9bfd378 | Add categorize_forwarded_files_from_Sumologic.py | BinhMisfit/Sumologic-plugins | categorize_forwarded_files_from_Sumologic.py | categorize_forwarded_files_from_Sumologic.py | # --------------------------------------------------
# Author: Binh Nguyen
# Email: "binh@misfitwearables.com" or ntbinhptnk@gmail.com
# Feel free to ask me any question.
# --------------------------------------------------
# Description:
# When working with Sumologic, one usually uses the feature "Data Forwarding" for backing log data up.
# For example:
# + Data Forwarding
# Data Forwarding is active and uploading to your AWS S3 bucket.
# Bucket Name: my_sumologic_logs
# Description: Forward all logs from all collectors to your AWS S3 bucket.
# Status Active
# + Each log file forwarded to the bucket "my_sumologic_logs" has the following name format:
# 1393151887000-1393151889000--9223372036853041134.csv.gz
# + How can we categorize these logs:
# - Firstly, we can group all log files by day. For example, all log files from the day 2014/07/31 will store in a folder "2014-07-31"
# - Secondly, we will unzip them and store them in another folder named "unzipped_logs/2014-07-31"
import boto
import pymongo
import traceback
import sys
import bson
from datetime import *
import os
from collections import OrderedDict
from datetime import datetime, timedelta
from optparse import OptionParser
import re
import shutil
import getopt
import ast
import gzip
import ast
import os.path
import datetime
from boto.s3.connection import S3Connection
import StringIO
import cStringIO
import gzip
from boto.s3.key import Key
S3_ACCESS_KEY="your_S3_access_key"
S3_SECRET_KEY="your_S3_secret_key"
def parsing_forwarded_filename(filename):
return re.findall(r'\b\d+\b', filename)
def date_info(filepath):
# Input: s3://my_sumologic_logs/1393151887000-1393151889000--9223372036853041134.csv.gz -> Output: from timestamp 1393151887000 -> Date ?
numeric_words=parsing_forwarded_filename(filepath)
size=len(numeric_words)
return datetime.datetime.fromtimestamp(int(numeric_words[size-3])/1000).strftime('%Y-%m-%d')
def copy_file(src_bucket_name,dst_bucket_name,filekey,newfilekey,connection):
src_bucket = connection.get_bucket(src_bucket_name)
dst_bucket = connection.get_bucket(dst_bucket_name)
dst_bucket.copy_key(newfilekey, src_bucket_name, filekey)
def move_file(src_bucket_name,dst_bucket_name,filekey,newfilekey,connection):
src_bucket = connection.get_bucket(src_bucket_name)
dst_bucket = connection.get_bucket(dst_bucket_name)
dst_bucket.copy_key(newfilekey, src_bucket_name, filekey)
src_bucket.delete_key(filekey)
def extract_filename(filepath):
numeric_words=parsing_forwarded_filename(filepath)
size=len(numeric_words)
return ''.join([numeric_words[size-3],'-',numeric_words[size-2],'--',numeric_words[size-1],'.csv.gz'])
def extract_unzipped_filename(filepath):
numeric_words=parsing_forwarded_filename(filepath)
size=len(numeric_words)
return ''.join([numeric_words[size-3],'-',numeric_words[size-2],'--',numeric_words[size-1],'.csv'])
def S3_decompress(connection,src_bucket_name,srcFileKey,dst_bucket_name,dstFileKey):
src_bucket = connection.get_bucket(src_bucket_name)
dst_bucket = connection.get_bucket(dst_bucket_name)
src_key=src_bucket.get_key(srcFileKey)
f = cStringIO.StringIO()
src_key.get_file(f)
f.seek(0) #This is crucial
gzf = gzip.GzipFile(fileobj=f)
file_content = gzf.read()
dst_key=dst_bucket.new_key(dstFileKey)
dst_key.set_contents_from_string(file_content)
gzf.close()
f.close()
def check_not_empty_bucket(conn,bucket_name):
bucket=conn.get_bucket(bucket_name)
rs = bucket.list()
not_empty=0
for key in rs:
not_empty=1
if not_empty:
return not_empty
return not_empty
if __name__ == "__main__":
source_bucket="my_sumologic_logs"
target_bucket="my_categorized_sumologic_logs_by_day"
uncompress_folder="unzipped_logs"
conn = boto.connect_s3(S3_ACCESS_KEY,S3_SECRET_KEY)
bucket = conn.lookup(source_bucket)
#check_empty_bucket="test_empty"
#while check_not_empty_bucket(conn,source_bucket):
while True:
bucket = conn.lookup(source_bucket)
for key in bucket:
srcFilePath=key.name
dest_folder=''.join([uncompress_folder,"/",date_info(key.name)])
srcFileName=extract_filename(srcFilePath)
dest_FileKey=''.join([dest_folder,"/",extract_unzipped_filename(srcFilePath)])
S3_decompress(conn,source_bucket,srcFilePath,target_bucket,dest_FileKey)
dest_move_FileKey=''.join([date_info(key.name),"/",extract_filename(srcFilePath)])
move_file(source_bucket,target_bucket,srcFilePath,dest_move_FileKey,conn)
| mit | Python | |
4083cac3c0ec107df68cdecb8fc52c00e2684b08 | Add b3 format benchmark tests (#1489) | open-telemetry/opentelemetry-python,open-telemetry/opentelemetry-python | propagator/opentelemetry-propagator-b3/tests/performance/benchmarks/trace/propagation/test_benchmark_b3_format.py | propagator/opentelemetry-propagator-b3/tests/performance/benchmarks/trace/propagation/test_benchmark_b3_format.py | # Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import opentelemetry.propagators.b3 as b3_format
import opentelemetry.sdk.trace as trace
from opentelemetry.trace.propagation.textmap import DictGetter
FORMAT = b3_format.B3Format()
def test_extract_single_header(benchmark):
benchmark(
FORMAT.extract,
DictGetter(),
{
FORMAT.SINGLE_HEADER_KEY: "bdb5b63237ed38aea578af665aa5aa60-c32d953d73ad2251-1-11fd79a30b0896cd285b396ae102dd76"
},
)
def test_inject_empty_context(benchmark):
tracer = trace.TracerProvider().get_tracer("sdk_tracer_provider")
with tracer.start_as_current_span("Root Span"):
with tracer.start_as_current_span("Child Span"):
benchmark(
FORMAT.inject,
dict.__setitem__,
{
FORMAT.TRACE_ID_KEY: "bdb5b63237ed38aea578af665aa5aa60",
FORMAT.SPAN_ID_KEY: "00000000000000000c32d953d73ad225",
FORMAT.PARENT_SPAN_ID_KEY: "11fd79a30b0896cd285b396ae102dd76",
FORMAT.SAMPLED_KEY: "1",
},
)
| apache-2.0 | Python | |
4a5767c18b3d75420c5498341012fa98e74edba6 | Create 2string_join.py | avsingh999/Learn_python | string/2string_join.py | string/2string_join.py | s1="hello"
s2="world"
print(s1+s2)
print("\n"," ".join(s2),"\n")
print("length of s1=",len(s1))
| mit | Python | |
c1f02399ad3ce9c4009e297be58aa6e1f10337cb | Add utility getrecord.py to retrieve single records from lib | olympiag3/olypy,olympiag3/olypy,olympiag3/olypy,olympiag3/olypy | getrecord.py | getrecord.py | #!/usr/bin/python
#
# retrieve specified record from lib
#
import sys
import olypy.oio as oio
from olypy.oid import to_oid, to_int
import olypy.dbck as dbck
import pathlib
from jinja2 import Environment, PackageLoader, select_autoescape
from olymap.loc import build_complete_loc_dict
from olymap.ship import build_complete_ship_dict
from olymap.char import build_complete_char_dict
from olymap.item import build_complete_item_dict
from olymap.skill import build_complete_skill_dict
from olymap.storm import build_complete_storm_dict
from olymap.player import build_complete_player_dict
import olymap.utilities as u
import olymap.reports as reports
from olymap.maps import write_index, write_map_leaves, write_top_map, write_bitmap
from olymap.legacy import create_map_matrix, write_legacy_bitmap, write_legacy_top_map, write_legacy_map_leaves
inlib = sys.argv[1]
data = oio.read_lib(inlib)
dbck.check_db(data, fix=True, checknames=True)
rec_id = ' '
rec_id = input('Enter record id ("0" to exit): ')
while rec_id != '0':
try:
rec_id_conv = to_int(rec_id)
try:
print(data[rec_id_conv])
except:
print('Invalid key')
except:
print('Invalid key')
rec_id = input('Enter record id ("0" to exit): ')
| apache-2.0 | Python | |
502a95b4bcf54792b5755c9ea6f03a8f9572a271 | test resize | embali/imgpy | tests/test_resize.py | tests/test_resize.py | from tempfile import TemporaryFile
import pytest
from imgpy import Img
@pytest.mark.parametrize('image', ({
'sub': 'anima/bordered.gif',
'size': (100, 100)
}, {
'sub': 'anima/clear.gif',
'size': (100, 100)
}, {
'sub': 'fixed/bordered.jpg',
'size': (100, 100)
}, {
'sub': 'fixed/clear.jpg',
'size': (100, 100)
}, ))
def test_resize(path, image):
with Img(fp=path(image['sub'])) as src:
src.resize(image['size'])
with TemporaryFile() as tf:
src.save(fp=tf)
tf.seek(0)
with Img(fp=tf) as dest:
res = (dest.width, dest.height)
assert res == image['size']
| mit | Python | |
f4bb4d17214f4e359455cf7b5fb7ab973508049b | Add missing module for merge script | comphist/cora,comphist/cora,comphist/cora,comphist/cora,comphist/cora | bin/diffMatcher.py | bin/diffMatcher.py | #!/usr/bin/python
# coding=utf-8
import subprocess
class DiffMatcher(object):
def __init__(self, listA, listB):
self.listA = listA
self.listB = listB
def create_diff(self, listA, listB,case_sensitive):
new_list = []
#compare the two files
try:
if (case_sensitive):
#ignore case sensitiveness
inp = subprocess.check_output(['diff', '-iy', listA.name, listB.name])
else:
inp = subprocess.check_output(['diff', '-y', listA.name, listB.name])
# diff exits with 1 if outputs mismatch... grml
except subprocess.CalledProcessError, e:
inp = e.output
inp = inp.decode("utf-8").split("\n")
#create list of difference
for entry in inp:
g = entry.replace("\t"," ")
g = g.split()
new_list.append(g)
del new_list[-1]
return new_list
| mit | Python | |
7b0044d3ccb617e92ee8523be949966f1188c742 | add unittest | elkingtonmcb/nupic,arhik/nupic,cngo-github/nupic,blueburningcoder/nupic,numenta-ci/nupic,passiweinberger/nupic,neuroidss/nupic,mcanthony/nupic,lscheinkman/nupic,neuroidss/nupic,rcrowder/nupic,pap/nupic,go-bears/nupic,numenta-ci/nupic,breznak/nupic,SaganBolliger/nupic,glorizen/nupic,blueburningcoder/nupic,go-bears/nupic,runt18/nupic,badlogicmanpreet/nupic,metaml/nupic,fergalbyrne/nupic,alfonsokim/nupic,blueburningcoder/nupic,marionleborgne/nupic,cogmission/nupic,sambitgaan/nupic,BeiLuoShiMen/nupic,cogmission/nupic,subutai/nupic,pap/nupic,subutai/nupic,elkingtonmcb/nupic,pulinagrawal/nupic,neuroidss/nupic,eranchetz/nupic,rayNymous/nupic,fergalbyrne/nupic,rayNymous/nupic,alfonsokim/nupic,vitaly-krugl/nupic,scottpurdy/nupic,scottpurdy/nupic,virneo/nupic,breznak/nupic,go-bears/nupic,mcanthony/nupic,EricSB/nupic,rcrowder/nupic,ben-hopps/nupic,markneville/nupic,SaganBolliger/nupic,scottpurdy/nupic,elkingtonmcb/nupic,akhilaananthram/nupic,numenta-ci/nupic,cogmission/nupic,GeraldLoeffler/nupic,markneville/nupic,cngo-github/nupic,rhyolight/nupic,rhyolight/nupic,chanceraine/nupic,glorizen/nupic,pulinagrawal/nupic,marionleborgne/nupic,lscheinkman/nupic,brev/nupic,wanghaven/nupic,loretoparisi/nupic,numenta/nupic,wanghaven/nupic,lscheinkman/nupic,BoltzmannBrain/nupic,arhik/nupic,darshanthaker/nupic,breznak/nupic,EricSB/nupic,virneo/nupic,badlogicmanpreet/nupic,fergalbyrne/nupic,glorizen/nupic,darshanthaker/nupic,runt18/nupic,BoltzmannBrain/nupic,chen0031/nupic,chen0031/nupic,metaml/nupic,pap/nupic,allanino/nupic,vitaly-krugl/nupic,rcrowder/nupic,loretoparisi/nupic,jcasner/nupic,cngo-github/nupic,ywcui1990/nupic,alfonsokim/nupic,vamsirajendra/nupic,vitaly-krugl/nupic,jcasner/nupic,BoltzmannBrain/nupic,subutai/nupic,eranchetz/nupic,passiweinberger/nupic,rayNymous/nupic,akhilaananthram/nupic,passiweinberger/nupic,numenta/nupic,darshanthaker/nupic,markneville/nupic,allanino/nupic,sambitgaan/nupic,ywcui1990/nupic,SaganBolliger/nupic,virneo/nupic,vamsirajendra/nupic,brev/nupic,arhik/nupic,BeiLuoShiMen/nupic,ben-hopps/nupic,chanceraine/nupic,pulinagrawal/nupic,sambitgaan/nupic,BeiLuoShiMen/nupic,marionleborgne/nupic,numenta/nupic,akhilaananthram/nupic,ywcui1990/nupic,rhyolight/nupic,eranchetz/nupic,ben-hopps/nupic,loretoparisi/nupic,runt18/nupic,GeraldLoeffler/nupic,jcasner/nupic,wanghaven/nupic,mcanthony/nupic,allanino/nupic,metaml/nupic,chanceraine/nupic,GeraldLoeffler/nupic,vamsirajendra/nupic,badlogicmanpreet/nupic,brev/nupic,chen0031/nupic,EricSB/nupic | tests/unit/py2/nupic/encoders/utility_test.py | tests/unit/py2/nupic/encoders/utility_test.py | #!/usr/bin/env python
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2013, Numenta, Inc. Unless you have purchased from
# Numenta, Inc. a separate commercial license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import unittest2 as unittest
from nupic.encoders.scalar import ScalarEncoder
from nupic.encoders.vector import VectorEncoder
from nupic.encoders.utility import UtilityEncoder
class UtilityEncoderTest(unittest.TestCase):
"""testing Utility encoder"""
def setUp(self):
self.data = [1,2,3]
# encoder for score: 0..100, fine-grained to 0.5
self.scoreEnc = ScalarEncoder(3, 0, 100, resolution=0.5, name='score')
# encoder for the input (data) part
elem = ScalarEncoder(1,0,3,resolution=1)
self.dataEnc = VectorEncoder(len(self.data), elem, typeCastFn=int, name='data')
# utility encoder
def sumAll(list):
return sum(list)
self.fn = sumAll
self.utilityEnc = None
def testInitialization(self):
"""creating a utility encoder"""
util = UtilityEncoder(self.dataEnc, self.scoreEnc, feval=self.fn, name='starter')
assert True==isinstance(util, UtilityEncoder)
##########################################################
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | Python | |
2ac66bef27652dec67b90cb428031e4954da8e21 | Create download-search.py | DataViva/dataviva-scripts,DataViva/dataviva-scripts | lattes/download-search.py | lattes/download-search.py | import urllib2
import codecs
import os
try: os.makedirs('data')
except: pass
inc = 10000
offset = 0
limit = 211056
while (offset < 211057):
urlpath = 'http://buscatextual.cnpq.br/buscatextual/busca.do?metodo=forwardPaginaResultados®istros=' + str(offset) + ';' + str(inc) + '&query=%28+%2Bidx_particao%3A1+%2Bidx_nacionalidade%3Ae%29+or+%28+%2Bidx_particao%3A1+%2Bidx_nacionalidade%3Ab%29&analise=cv&tipoOrdenacao=null&paginaOrigem=index.do&mostrarScore=false&mostrarBandeira=true&modoIndAdhoc=null'
response = urllib2.urlopen(urlpath)
html = response.read().decode('ISO-8859-1') # read and decode the response
f = codecs.open('data/' + str(offset) + '-' + str(inc) + '.html', 'w', 'utf-8')
f.write(html)
f.close()
offset += inc
exit(1)
prog = re.compile("abreDetalhe\((.*?)\)")
for a in prog.finditer(html):
print a.group(1).split(',')
prog = re.compile("<img alt='(.*?)'")
for a in prog.finditer(html):
print a.group(1)
| mit | Python | |
6a95d0df59f5ab03cb8537014e0102e5300a544a | Add Docker driver | redixin/rally-ci,redixin/rally-ci,redixin/rally-ci,aarexer/rally-ci,aarexer/rally-ci | docker.py | docker.py |
import random, string
import sys, subprocess
from log import logging
LOG = logging.getLogger(__name__)
class Driver(object):
def __init__(self, name, dockerfilepath):
self.name = name
self.dockerfilepath = dockerfilepath
self.tag = "rallyci:" + dockerfilepath
self.number = 0
self.current = self.tag
self.names = []
def _run(self, cmd, stdout, stdin=None):
pipe = subprocess.Popen(cmd, stdin=stdin,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
for line in iter(pipe.stdout.readline, b''):
stdout.write(line)
return pipe.returncode
def build(self, stdout):
cmd = ["docker", "build", "-t", self.tag, self.dockerfilepath]
LOG.debug("Building image %r" % cmd)
return self._run(cmd, stdout)
def run(self, cmd, stdout, stdin=None):
name = "".join(random.sample(string.letters, 12))
self.names.append(name)
command = ["docker", "run", "--name", name]
if stdin:
command += ["-i"]
command += [self.current]
command += cmd.split(" ")
LOG.debug("Running command %r" % command)
returncode = self._run(command, stdout, stdin=stdin)
self.current = subprocess.check_output(
["docker", "commit", name]).strip()
return returncode
def cleanup(self):
for name in self.names:
subprocess.check_output(["docker", "rm", name])
subprocess.check_output(["docker", "rmi", self.current])
| apache-2.0 | Python | |
f3c622a3dc9573c8244bf01408c1ff4620080c99 | Create views module | mpiannucci/crosswynds-promo,mpiannucci/crosswynds-promo,mpiannucci/crosswynds-promo,mpiannucci/crosswynds-promo | views/__init__.py | views/__init__.py | mit | Python | ||
07148136d8dcc165fc72d3ef264d721c652db025 | Test cases for ZPool | Xaroth/libzfs-python,Xaroth/libzfs-python,Xaroth/libzfs-python | test/002_test_zpool.py | test/002_test_zpool.py | import unittest
import os
from .test_utils import _LibZFSHandleCase
from libzfs.zpool import ZPool, zpool_prop_t
LIBZFS_TEST_POOL = os.environ.get("LIBZFS_TEST_POOL", False)
@unittest.skipUnless(LIBZFS_TEST_POOL, "LIBZFS_TEST_POOL not set, so we do not test to a specific pool")
class Test_ZPool(_LibZFSHandleCase):
def test_001_iter_zpools(self):
pools = ZPool.list()
assert len(pools) > 0
def test_002_get_zpool(self):
pool = ZPool.get(name=LIBZFS_TEST_POOL)
assert pool is not None
def test_003_get_zpool_properties(self):
pool = ZPool.get(name=LIBZFS_TEST_POOL)
props = pool.properties
assert len(props.keys()) > 0
assert props.get(zpool_prop_t.ZPOOL_PROP_NAME) == LIBZFS_TEST_POOL
assert props.get(zpool_prop_t.ZPOOL_PROP_SIZE) > 0
def test_004_get_zpool_config(self):
pool = ZPool.get(name=LIBZFS_TEST_POOL)
config = pool.config
pool.refresh_stats()
oldconfig = pool.old_config
assert len(config.keys()) > 0
assert len(oldconfig.keys()) > 0
assert config == oldconfig
| mit | Python | |
ba8be59db72c958e2ff20b9ae7fe81c400b40f9c | Make start of ongoing and deadline activities just a date | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/time_based/migrations/0008_auto_20201023_1443.py | bluebottle/time_based/migrations/0008_auto_20201023_1443.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.17 on 2020-10-23 12:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('time_based', '0007_auto_20201023_1433'),
]
operations = [
migrations.AlterField(
model_name='ongoingactivity',
name='start',
field=models.DateField(blank=True, null=True, verbose_name='Start of activity'),
),
migrations.AlterField(
model_name='withadeadlineactivity',
name='start',
field=models.DateField(blank=True, null=True, verbose_name='Start of activity'),
),
]
| bsd-3-clause | Python | |
b869748e4bc0ee6986fa280aa69027aaf8607dcb | allow set_ev_handler without dispatchers | unifycore/ryu,diogommartins/ryu,torufuru/OFPatchPanel,jalilm/ryu,gopchandani/ryu,darjus-amzn/ryu,umkcdcrg01/ryu_openflow,elahejalalpour/ELRyu,openvapour/ryu,lzppp/mylearning,fkakuma/ryu,torufuru/oolhackathon,ykaneko/ryu,torufuru/oolhackathon,TakeshiTseng/ryu,alyosha1879/ryu,jazzmes/ryu,fujita/ryu,umkcdcrg01/ryu_openflow,ysywh/ryu,StephenKing/summerschool-2015-ryu,diogommartins/ryu,hisaharu/ryu,alanquillin/ryu,iwaseyusuke/ryu,samrussell/ryu,yamt/ryu,umkcdcrg01/ryu_openflow,torufuru/oolhackathon,sivaramakrishnansr/ryu,zyq001/ryu,pichuang/ryu,darjus-amzn/ryu,StephenKing/summerschool-2015-ryu,gareging/SDN_Framework,mikhaelharswanto/ryu,TakeshiTseng/ryu,osrg/ryu,hisaharu/ryu,ysywh/ryu,shinpeimuraoka/ryu,yamt/ryu,elahejalalpour/ELRyu,fujita/ryu,openvapour/ryu,ttsubo/ryu,takahashiminoru/ryu,umkcdcrg01/ryu_openflow,fkakuma/ryu,lsqtongxin/ryu,John-Lin/ryu,OpenState-SDN/ryu,ntts-clo/mld-ryu,alanquillin/ryu,OpenState-SDN/ryu,fkakuma/ryu,John-Lin/ryu,umkcdcrg01/ryu_openflow,ynkjm/ryu,darjus-amzn/ryu,gopchandani/ryu,haniehrajabi/ryu,TakeshiTseng/ryu,takahashiminoru/ryu,iwaseyusuke/ryu,ttsubo/ryu,zyq001/ryu,citrix-openstack-build/ryu,lsqtongxin/ryu,gareging/SDN_Framework,Zouyiran/ryu,osrg/ryu,pichuang/ryu,Zouyiran/ryu,osrg/ryu,lagopus/ryu-lagopus-ext,lagopus/ryu-lagopus-ext,zyq001/ryu,ykaneko/ryu,muzixing/ryu,diogommartins/ryu,Tesi-Luca-Davide/ryu,ynkjm/ryu,yamada-h/ryu,o3project/ryu-oe,haniehrajabi/ryu,Zouyiran/ryu,fujita/ryu,Zouyiran/ryu,lsqtongxin/ryu,osrg/ryu,sivaramakrishnansr/ryu,alanquillin/ryu,Tejas-Subramanya/RYU_MEC,lzppp/mylearning,TakeshiTseng/ryu,openvapour/ryu,torufuru/OFPatchPanel,gopchandani/ryu,zangree/ryu,elahejalalpour/ELRyu,evanscottgray/ryu,fkakuma/ryu,jazzmes/ryu,Zouyiran/ryu,ntts-clo/mld-ryu,StephenKing/summerschool-2015-ryu,iwaseyusuke/ryu,alyosha1879/ryu,StephenKing/summerschool-2015-ryu,pichuang/ryu,muzixing/ryu,lagopus/ryu-lagopus-ext,Tejas-Subramanya/RYU_MEC,jalilm/ryu,sivaramakrishnansr/ryu,John-Lin/ryu,StephenKing/ryu,iwaseyusuke/ryu,Tejas-Subramanya/RYU_MEC,darjus-amzn/ryu,gareging/SDN_Framework,unifycore/ryu,Tesi-Luca-Davide/ryu,openvapour/ryu,Tejas-Subramanya/RYU_MEC,fkakuma/ryu,castroflavio/ryu,elahejalalpour/ELRyu,jkoelker/ryu,evanscottgray/ryu,yamt/ryu,Tesi-Luca-Davide/ryu,lzppp/mylearning,hisaharu/ryu,takahashiminoru/ryu,yamt/ryu,alanquillin/ryu,lagopus/ryu-lagopus-ext,castroflavio/ryu,TakeshiTseng/ryu,habibiefaried/ryu,haniehrajabi/ryu,StephenKing/summerschool-2015-ryu,John-Lin/ryu,evanscottgray/ryu,OpenState-SDN/ryu,zangree/ryu,zangree/ryu,takahashiminoru/ryu,haniehrajabi/ryu,habibiefaried/ryu,mikhaelharswanto/ryu,ysywh/ryu,takahashiminoru/ryu,zangree/ryu,haniehrajabi/ryu,o3project/ryu-oe,Tesi-Luca-Davide/ryu,sivaramakrishnansr/ryu,citrix-openstack/build-ryu,zyq001/ryu,StephenKing/ryu,samrussell/ryu,shinpeimuraoka/ryu,jazzmes/ryu,pichuang/ryu,ntts-clo/ryu,alyosha1879/ryu,lsqtongxin/ryu,habibiefaried/ryu,muzixing/ryu,citrix-openstack/build-ryu,alyosha1879/ryu,gopchandani/ryu,lzppp/mylearning,elahejalalpour/ELRyu,habibiefaried/ryu,ntts-clo/ryu,darjus-amzn/ryu,muzixing/ryu,hisaharu/ryu,ynkjm/ryu,StephenKing/ryu,John-Lin/ryu,iwaseyusuke/ryu,Tesi-Luca-Davide/ryu,jkoelker/ryu,OpenState-SDN/ryu,ttsubo/ryu,jalilm/ryu,sivaramakrishnansr/ryu,citrix-openstack-build/ryu,StephenKing/ryu,zangree/ryu,ttsubo/ryu,diogommartins/ryu,gareging/SDN_Framework,diogommartins/ryu,shinpeimuraoka/ryu,zyq001/ryu,openvapour/ryu,Tejas-Subramanya/RYU_MEC,shinpeimuraoka/ryu,habibiefaried/ryu,lzppp/mylearning,fujita/ryu,gareging/SDN_Framework,yamt/ryu,ysywh/ryu,jalilm/ryu,fujita/ryu,muzixing/ryu,ysywh/ryu,lagopus/ryu-lagopus-ext,ynkjm/ryu,jalilm/ryu,OpenState-SDN/ryu,yamada-h/ryu,osrg/ryu,hisaharu/ryu,lsqtongxin/ryu,ttsubo/ryu,alanquillin/ryu,gopchandani/ryu,StephenKing/ryu,jkoelker/ryu,ynkjm/ryu,pichuang/ryu,shinpeimuraoka/ryu,castroflavio/ryu | ryu/controller/handler.py | ryu/controller/handler.py | # Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import logging
from ryu.controller import ofp_event
LOG = logging.getLogger('ryu.controller.handler')
# just represent OF datapath state. datapath specific so should be moved.
HANDSHAKE_DISPATCHER = "handshake"
CONFIG_DISPATCHER = "config"
MAIN_DISPATCHER = "main"
DEAD_DISPATCHER = "dead"
# should be named something like 'observe_event'
def set_ev_cls(ev_cls, dispatchers=None):
def _set_ev_cls_dec(handler):
handler.ev_cls = ev_cls
handler.dispatchers = _listify(dispatchers)
handler.observer = ev_cls.__module__
return handler
return _set_ev_cls_dec
def set_ev_handler(ev_cls, dispatchers=None):
def _set_ev_cls_dec(handler):
handler.ev_cls = ev_cls
handler.dispatchers = _listify(dispatchers)
return handler
return _set_ev_cls_dec
def _is_ev_cls(meth):
return hasattr(meth, 'ev_cls')
def _listify(may_list):
if may_list is None:
may_list = []
if not isinstance(may_list, list):
may_list = [may_list]
return may_list
def register_instance(i):
for _k, m in inspect.getmembers(i, inspect.ismethod):
# LOG.debug('instance %s k %s m %s', i, _k, m)
if _is_ev_cls(m):
i.register_handler(m.ev_cls, m)
| # Copyright (C) 2011, 2012 Nippon Telegraph and Telephone Corporation.
# Copyright (C) 2011, 2012 Isaku Yamahata <yamahata at valinux co jp>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import inspect
import logging
from ryu.controller import ofp_event
LOG = logging.getLogger('ryu.controller.handler')
# just represent OF datapath state. datapath specific so should be moved.
HANDSHAKE_DISPATCHER = "handshake"
CONFIG_DISPATCHER = "config"
MAIN_DISPATCHER = "main"
DEAD_DISPATCHER = "dead"
# should be named something like 'observe_event'
def set_ev_cls(ev_cls, dispatchers=None):
def _set_ev_cls_dec(handler):
handler.ev_cls = ev_cls
handler.dispatchers = _listify(dispatchers)
handler.observer = ev_cls.__module__
return handler
return _set_ev_cls_dec
def set_ev_handler(ev_cls, dispatchers):
def _set_ev_cls_dec(handler):
handler.ev_cls = ev_cls
handler.dispatchers = _listify(dispatchers)
return handler
return _set_ev_cls_dec
def _is_ev_cls(meth):
return hasattr(meth, 'ev_cls')
def _listify(may_list):
if may_list is None:
may_list = []
if not isinstance(may_list, list):
may_list = [may_list]
return may_list
def register_instance(i):
for _k, m in inspect.getmembers(i, inspect.ismethod):
# LOG.debug('instance %s k %s m %s', i, _k, m)
if _is_ev_cls(m):
i.register_handler(m.ev_cls, m)
| apache-2.0 | Python |
7f49a34e605d701168ee88c8cff0e3b8ed9a68d6 | Add GPG-related minor migration | artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service | storage_service/locations/migrations/0017_gpg_space_minor_migration.py | storage_service/locations/migrations/0017_gpg_space_minor_migration.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('locations', '0016_mirror_location_aip_replication'),
]
operations = [
migrations.AlterField(
model_name='space',
name='access_protocol',
field=models.CharField(
help_text='How the space can be accessed.',
max_length=8,
verbose_name='Access protocol',
choices=[
(b'ARKIVUM', 'Arkivum'),
(b'DV', 'Dataverse'),
(b'DC', 'DuraCloud'),
(b'DSPACE', 'DSpace via SWORD2 API'),
(b'FEDORA', 'FEDORA via SWORD2'),
(b'GPG', 'GPG encryption on Local Filesystem'),
(b'FS', 'Local Filesystem'),
(b'LOM', 'LOCKSS-o-matic'),
(b'NFS', 'NFS'),
(b'PIPE_FS', 'Pipeline Local Filesystem'),
(b'SWIFT', 'Swift')]),
),
]
| agpl-3.0 | Python | |
579d21e001f5cd61702dc086d36c1a5f764ffb45 | Add app.wsgi to run under Apache | johnmarcampbell/is-democracy-on-fire,johnmarcampbell/is-democracy-on-fire | app.wsgi | app.wsgi | import app
site = app.create_app()
site.run()
| mit | Python | |
194abec2ae2066a4ab77db2d46822115350d8086 | Add script brand-exclusion.py | SnakeHunt2012/word2vec,SnakeHunt2012/word2vec,SnakeHunt2012/word2vec,SnakeHunt2012/word2vec,SnakeHunt2012/word2vec | brand-exclusion.py | brand-exclusion.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from sys import getsizeof
from time import time
from codecs import open
from numpy import array, matrix
from numpy.random import random
from argparse import ArgumentParser
from itertools import combinations
from heapq import nlargest
BRAND_EXCLUDED = False
def duration(start, end):
second = (end - start) % 60
minute = (end - start) % 3600 / 60
hour = (end - start) / 3600
return "%d:%02d:%02d" % (hour, minute, second)
def load_dict(dict_file):
brand_set = set()
category_dict = {}
with open(dict_file, 'r') as fd:
for line in fd:
splited_line = line.split()
if len(splited_line) < 2:
continue
category = int(splited_line.pop(0))
brand = " ".join(splited_line)
brand_set.add(brand)
if brand in category_dict:
category_dict[brand].add(category)
else:
category_dict[brand] = set([category])
return brand_set, category_dict
def main():
parser = ArgumentParser()
parser.add_argument("dict_file", help = "category-brand file in space splited file format")
parser.add_argument("sim_file", help = "query to bid sim file in tsv format")
args = parser.parse_args()
dict_file = args.dict_file
sim_file = args.sim_file
print "loading dict_file ..."
start_time = time()
brand_set, category_dict = load_dict(dict_file)
end_time = time()
print "loading dict_file done", duration(start_time, end_time)
with open(sim_file, 'r') as fd:
for line in fd:
splited_line = line.strip().split("\t")
if len(splited_line) < 2:
continue
query = splited_line.pop(0)
bidword_list = "".join(splited_line).strip(";").split(";")
query_brand_set = set()
query_category_set = set()
for query_seg in query.split():
if query_seg in category_dict:
query_brand_set.add(query_seg)
query_category_set |= category_dict[query_seg]
res_list = []
exc_list = []
for bidword in bidword_list:
if len(set(bidword_seg for bidword_seg in bidword.split() if bidword_seg in category_dict) & query_brand_set) > 0:
res_list.append(bidword)
break
is_exclusive = False
for bidword_seg in bidword.split():
if bidword_seg in category_dict:
if len(category_dict[bidword_seg] & query_category_set) > 0:
is_exclusive = True
exc_list.append(bidword)
break
if not is_exclusive:
res_list.append(bidword)
if BRAND_EXCLUDED:
if len(exc_list) > 0:
print "%s\t%s" % (query, ";".join(exc_list))
else:
if len(res_list) > 0:
print "%s\t%s" % (query, ";".join(res_list))
if __name__ == "__main__":
main()
| apache-2.0 | Python | |
38090ac06a48a4205cbc2318e3ad9296d5b08ea5 | Add migration to populate Broadcast.base_language | pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro | temba/msgs/migrations/0069_populate_broadcast_base_lang.py | temba/msgs/migrations/0069_populate_broadcast_base_lang.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from collections import defaultdict
from django.db import migrations
from temba.utils import chunk_list
def do_populate(Broadcast, FlowStep):
BroadcastSteps = FlowStep.broadcasts.through
broadcast_ids = list(Broadcast.objects.values_list('id', flat=True).order_by('org_id'))
num_processed = 0
if broadcast_ids:
print("Starting population of Broadcast.base_language for %d total broadcasts..." % len(broadcast_ids))
for id_batch in chunk_list(broadcast_ids, 1000):
broadcast_steps = BroadcastSteps.objects.filter(broadcast_id__in=id_batch).distinct('broadcast_id')
broadcast_steps = broadcast_steps.prefetch_related('flowstep__run__flow')
# dict of language codes to lists of broadcast ids
broadcasts_by_lang = defaultdict(list)
for broadcast_step in broadcast_steps:
flow = broadcast_step.flowstep.run.flow
if flow.base_language:
broadcasts_by_lang[flow.base_language].append(broadcast_step.broadcast_id)
# update each set of broadcasts associated with a particular flow
num_updated = 0
for lang, bcast_ids in broadcasts_by_lang.items():
Broadcast.objects.filter(id__in=bcast_ids).update(base_language=lang)
num_updated += len(bcast_ids)
num_processed += len(id_batch)
print(" > Processed %d of %d broadcasts (updated %d with %d different languages)"
% (num_processed, len(broadcast_ids), num_updated, len(broadcasts_by_lang)))
if broadcast_ids:
print("Finished population of Broadcast.base_language for %d total broadcasts" % len(broadcast_ids))
def apply_as_migration(apps, schema_editor):
Broadcast = apps.get_model('msgs', 'Broadcast')
FlowStep = apps.get_model('flows', 'FlowStep')
do_populate(Broadcast, FlowStep)
def apply_offline():
from temba.flows.models import FlowStep
from temba.msgs.models import Broadcast
do_populate(Broadcast, FlowStep)
class Migration(migrations.Migration):
dependencies = [
('msgs', '0068_broadcast_base_language'),
]
operations = [
migrations.RunPython(apply_as_migration)
]
| agpl-3.0 | Python | |
42cc997aea3f71d9b0db37d36a895e68994616ea | Add Jansson (#2287) | TheTimmy/spack,krafczyk/spack,tmerrick1/spack,TheTimmy/spack,iulian787/spack,iulian787/spack,tmerrick1/spack,LLNL/spack,LLNL/spack,lgarren/spack,mfherbst/spack,tmerrick1/spack,krafczyk/spack,EmreAtes/spack,LLNL/spack,matthiasdiener/spack,matthiasdiener/spack,lgarren/spack,EmreAtes/spack,lgarren/spack,TheTimmy/spack,matthiasdiener/spack,krafczyk/spack,mfherbst/spack,krafczyk/spack,skosukhin/spack,TheTimmy/spack,mfherbst/spack,mfherbst/spack,LLNL/spack,EmreAtes/spack,EmreAtes/spack,tmerrick1/spack,skosukhin/spack,tmerrick1/spack,matthiasdiener/spack,lgarren/spack,LLNL/spack,matthiasdiener/spack,TheTimmy/spack,krafczyk/spack,iulian787/spack,iulian787/spack,mfherbst/spack,lgarren/spack,iulian787/spack,EmreAtes/spack,skosukhin/spack,skosukhin/spack,skosukhin/spack | var/spack/repos/builtin/packages/jansson/package.py | var/spack/repos/builtin/packages/jansson/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Jansson(CMakePackage):
"""Jansson is a C library for encoding, decoding and manipulating JSON
data."""
homepage = "http://www.digip.org/jansson/"
url = "https://github.com/akheron/jansson/archive/v2.9.tar.gz"
version('2.9', 'd2db25c437b359fc5a065ed938962237')
depends_on('cmake', type='build')
| lgpl-2.1 | Python | |
9cbcf5ce7b6363e267e8923fe88e4ebf78107f8a | Add extractor for synthesis of Symbtr scores | MTG/pycompmusic | compmusic/extractors/makam/scoresynthesis.py | compmusic/extractors/makam/scoresynthesis.py | import json
import urllib2
import compmusic.dunya.conn
import compmusic.dunya.docserver
import compmusic.extractors
import pydub
from symbtrsynthesis.adaptivesynthesizer import AdaptiveSynthesizer
from symbtrsynthesis.musicxmlreader import MusicXMLReader
class ScoreSynthesis(compmusic.extractors.ExtractorModule):
_version = "0.1"
_sourcetype = "symbtrxml"
_slug = "synthesis"
def set_settings(self, recid='aeu'):
self._output = {recid: {'mp3': {"extension": "mp3",
"mimetype": "audio/mp3"},
'onsets': {"extension": "json",
"mimetype": "application/json"}}
}
def set_parameters(self, measures, bpm, stablenotes, tnc_sym):
self.measures = measures
self.bpm = bpm
self.stablenotes = stablenotes
self.tnc_sym = tnc_sym
def run(self, musicbrainzid, fname):
try:
# synthesize
audio_karplus, onsets_karplus = AdaptiveSynthesizer.synth_from_tuning(
measures=self.measures, bpm=self.bpm,
stable_notes=self.stablenotes, tonic_sym=self.tnc_sym,
synth_type='karplus', verbose=True)
# mp3 conversion
audio_obj = pydub.AudioSegment(data=audio_karplus)
audio_mp3 = audio_obj.export()
return {musicbrainzid: {'mp3': audio_mp3,
'onsets': onsets_karplus}
}
except compmusic.dunya.conn.HTTPError:
print(workid, 'is not exist')
token = ''
compmusic.dunya.conn.set_token(token)
# fetch symbtrs
symbtrs = compmusic.dunya.get_collection('makam-symbtr')['documents']
response = urllib2.urlopen('https://raw.githubusercontent.com/MTG/otmm_tuning_intonation_dataset/master/dataset.json')
dataset = json.loads(response.read())
MAKAMS = dataset.keys()
synthesizer = ScoreSynthesis()
for xx, symbtr in enumerate(symbtrs):
workid = symbtr['external_identifier']
try:
# get metadata
metadata = json.loads(compmusic.dunya.docserver.file_for_document(
workid, 'scoreanalysis', 'metadata'))
makam = metadata['makam']['attribute_key']
# fetch score
musicxml = compmusic.dunya.docserver.file_for_document(
recordingid=workid, thetype='score', subtype='xmlscore')
# read musicxml
(measures, makam, usul, form, time_sigs, keysig, work_title,
composer, lyricist, bpm, tnc_sym) = MusicXMLReader.read(musicxml)
# check if it is in selected makams
if makam in MAKAMS:
recids = dataset[makam]
for recid in recids:
stablenotes = json.loads(compmusic.dunya.file_for_document(
recid, 'audioanalysis', 'note_models'))
synthesizer.set_settings(recid)
synthesizer.set_parameters(measures=measures, bpm=bpm,
stablenotes=stablenotes,
tnc_sym=tnc_sym)
print(xx, makam, recid)
synthesizer.run(recid, None)
print('aeu', makam)
synthesizer.set_settings()
synthesizer.set_parameters(measures=measures, bpm=bpm,
stablenotes=None, tnc_sym=tnc_sym)
synthesizer.run('aeu', None)
except compmusic.dunya.conn.HTTPError:
print(workid, 'is not exist')
| agpl-3.0 | Python | |
e63a623452d9aa64c2dd392442f1f09f8e0924ef | make it work on python 2.6 | vstoykov/django-pipeline,lexqt/django-pipeline,mweibel/django-pipeline,pombredanne/django-pipeline-1,theatlantic/django-pipeline,cyberdelia/django-pipeline,Tekco/django-pipeline,perdona/django-pipeline,edwinlunando/django-pipeline,edwinlunando/django-pipeline,caioariede/django-pipeline,zapier/django-pipeline,novapost/django-pipeline,zapier/django-pipeline,Kobold/django-pipeline,almost/django-pipeline,demux/django-pipeline,sjhewitt/django-pipeline,caioariede/django-pipeline,camilonova/django-pipeline,skirsdeda/django-pipeline,fahhem/django-pipeline,simudream/django-pipeline,floppym/django-pipeline,sideffect0/django-pipeline,TwigWorld/django-pipeline,pombredanne/django-pipeline-1,leonardoo/django-pipeline,jazzband/django-pipeline,sjhewitt/django-pipeline,theatlantic/django-pipeline,chipx86/django-pipeline,yuvadm/django-pipeline,apendleton/django-pipeline,ei-grad/django-pipeline,perdona/django-pipeline,floppym/django-pipeline,Kami/django-pipeline,sideffect0/django-pipeline,vbabiy/django-pipeline,wienczny/django-pipeline,d9pouces/django-pipeline,caioariede/django-pipeline,sideffect0/django-pipeline,tayfun/django-pipeline,lexqt/django-pipeline,necaris/django-pipeline,vbabiy/django-pipeline,jensenbox/django-pipeline,chipx86/django-pipeline,leonardoo/django-pipeline,jazzband/django-pipeline,TwigWorld/django-pipeline,skirsdeda/django-pipeline,edwinlunando/django-pipeline,mgorny/django-pipeline,leonardoo/django-pipeline,skolsuper/django-pipeline,airtonix/django-pipeline,adamcharnock/django-pipeline,mweibel/django-pipeline,theatlantic/django-pipeline,jensenbox/django-pipeline,adamcharnock/django-pipeline,apendleton/django-pipeline,botify-labs/django-pipeline,demux/django-pipeline,ei-grad/django-pipeline,lydell/django-pipeline,yuvadm/django-pipeline,zapier/django-pipeline,almost/django-pipeline,hyperoslo/django-pipeline,Nivl/django-pipeline,kronion/django-pipeline,jensenbox/django-pipeline,almost/django-pipeline,novapost/django-pipeline,necaris/django-pipeline,lydell/django-pipeline,Nivl/django-pipeline,adamcharnock/django-pipeline,teozkr/django-pipeline,TwigWorld/django-pipeline,Kobold/django-pipeline,sjhewitt/django-pipeline,skolsuper/django-pipeline,joshkehn/django-pipeline,cyberdelia/django-pipeline,Kami/django-pipeline,botify-labs/django-pipeline,pdr/django-pipeline,kronion/django-pipeline,beedesk/django-pipeline,chipx86/django-pipeline,hyperoslo/django-pipeline,camilonova/django-pipeline,wienczny/django-pipeline,skolsuper/django-pipeline,jwatson/django-pipeline,novapost/django-pipeline,letolab/django-pipeline,simudream/django-pipeline,beedesk/django-pipeline,ei-grad/django-pipeline,botify-labs/django-pipeline,pdr/django-pipeline,cyberdelia/django-pipeline,tayfun/django-pipeline,vstoykov/django-pipeline,kronion/django-pipeline,Tekco/django-pipeline,Tekco/django-pipeline,wienczny/django-pipeline,joshkehn/django-pipeline,Kami/django-pipeline,Kobold/django-pipeline,floppym/django-pipeline,lexqt/django-pipeline,letolab/django-pipeline,simudream/django-pipeline,tayfun/django-pipeline,lydell/django-pipeline,jazzband/django-pipeline,beedesk/django-pipeline,camilonova/django-pipeline,d9pouces/django-pipeline,yuvadm/django-pipeline,vbabiy/django-pipeline,perdona/django-pipeline,fabiosantoscode/django-pipeline,mgorny/django-pipeline,apendleton/django-pipeline,fahhem/django-pipeline,edx/django-pipeline,jwatson/django-pipeline,mgorny/django-pipeline,edx/django-pipeline,edx/django-pipeline,airtonix/django-pipeline,fabiosantoscode/django-pipeline,demux/django-pipeline,d9pouces/django-pipeline,hyperoslo/django-pipeline,joshkehn/django-pipeline,jwatson/django-pipeline,teozkr/django-pipeline,skirsdeda/django-pipeline | compress/filters/css_url_replace/__init__.py | compress/filters/css_url_replace/__init__.py | import re
from django.conf import settings
from compress.filter_base import FilterBase
CSS_REPLACE = getattr(settings, 'COMPRESS_CSS_URL_REPLACE', [])
class CSSURLReplace(FilterBase):
def filter_css(self, css):
for pattern, repl in CSS_REPLACE.iteritems():
css = re.sub(pattern, repl, css)
if self.verbose:
print 'Replaced "%s" with "%s"' % (pattern, repl)
return css
| import re
from django.conf import settings
from compress.filter_base import FilterBase
CSS_REPLACE = getattr(settings, 'COMPRESS_CSS_URL_REPLACE', [])
class CSSURLReplace(FilterBase):
def filter_css(self, css):
for pattern, repl in CSS_REPLACE.iteritems():
css = re.sub(pattern, repl, css, flags=re.UNICODE | re.IGNORECASE)
if self.verbose:
print 'Replaced "%s" with "%s"' % (pattern, repl)
return css
| mit | Python |
feee17d37fdef9b2b511366f42599ceb1b7fdd50 | Add migration | qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq | corehq/apps/sms/migrations/0019_add_new_registration_invitation_fields.py | corehq/apps/sms/migrations/0019_add_new_registration_invitation_fields.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import jsonfield.fields
class Migration(migrations.Migration):
dependencies = [
('sms', '0018_check_for_phone_number_migration'),
]
operations = [
migrations.AddField(
model_name='selfregistrationinvitation',
name='android_only',
field=models.BooleanField(default=False),
preserve_default=True,
),
migrations.AddField(
model_name='selfregistrationinvitation',
name='custom_user_data',
field=jsonfield.fields.JSONField(default=dict),
preserve_default=True,
),
migrations.AddField(
model_name='selfregistrationinvitation',
name='require_email',
field=models.BooleanField(default=False),
preserve_default=True,
),
]
| bsd-3-clause | Python | |
0002ec236c63bf64a112eb3767137588ebe83403 | Add band-pass fourier filter. | rshkarin/afm-particle-analysis | band_pass.py | band_pass.py | import numpy as np
import matplotlib.pyplot as plt
def next_length_pow2(x):
return 2 ** np.ceil(np.log2(abs(x)))
def filter(fft_data, filter_large_dia, filter_small_dia):
fft_data_shape = fft_data.shape
side_len = fft_data_shape[0]
filter_large = 2.0 * filter_large_dia / side_len
filter_small = 2.0 * filter_small_dia / side_len
filter_data = np.ones_like(fft_data, dtype=np.float32)
#calculate factor in exponent of Gaussian from filterLarge / filterSmall
scale_large = filter_large ** 2
scale_small = filter_small ** 2
fft_data = fft_data.flatten()
filter_data = filter_data.flatten()
for j in np.arange(1, side_len/2):
row = j * side_len
backrow = (side_len - j) * side_len
row_fact_large = np.exp(-(j*j) * scale_large);
row_fact_small = np.exp(-(j*j) * scale_small);
for col in np.arange(1, side_len/2):
backcol = side_len - col
col_fact_large = np.exp(-(col*col) * scale_large)
col_fact_small = np.exp(-(col*col) * scale_small)
factor = (1 - row_fact_large*col_fact_large) * row_fact_small*col_fact_small
fft_data[]
for (int j=1; j<maxN/2; j++) {
row = j * maxN;
backrow = (maxN-j)*maxN;
rowFactLarge = (float) Math.exp(-(j*j) * scaleLarge);
rowFactSmall = (float) Math.exp(-(j*j) * scaleSmall);
// loop over columns
for (col=1; col<maxN/2; col++){
backcol = maxN-col;
colFactLarge = (float) Math.exp(- (col*col) * scaleLarge);
colFactSmall = (float) Math.exp(- (col*col) * scaleSmall);
factor = (1 - rowFactLarge*colFactLarge) * rowFactSmall*colFactSmall;
switch (stripesHorVert) {
case 1: factor *= (1 - (float) Math.exp(- (col*col) * scaleStripes)); break;// hor stripes
case 2: factor *= (1 - (float) Math.exp(- (j*j) * scaleStripes)); // vert stripes
}
fht[col+row] *= factor;
fht[col+backrow] *= factor;
fht[backcol+row] *= factor;
fht[backcol+backrow] *= factor;
filter[col+row] *= factor;
filter[col+backrow] *= factor;
filter[backcol+row] *= factor;
filter[backcol+backrow] *= factor;
}
}
def main():
data_path = "E:\\fiji-win64\\AllaData\\data_16bit_512x512.raw"
data = np.memmap(data_path, dtype=np.int16, shape=(512,512), mode='r')
height, width = data.shape
pad_height, pad_width = next_length_pow2(height + 1), next_length_pow2(width + 1)
padded_data = np.zeros((pad_height, pad_width), dtype=np.int16)
pad_offset_y, pad_offset_x = pad_height/2 - height/2, pad_width/2 - width/2
padded_data[pad_offset_y:pad_offset_y + height, \
pad_offset_x:pad_offset_x + height] = data
fft_data = np.fft.fft2(padded_data)
fft_data = np.fft.fftshift(fft_data)
plt.imshow(padded_data)
plt.show()
if __name__ == "__main__":
main()
| mit | Python | |
c54f7c8157b390aa73d3a395bd00a00de3b632c8 | Add twisted bot | honza/nigel | ircLogBot.py | ircLogBot.py | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
An example IRC log bot - logs a channel's events to a file.
If someone says the bot's name in the channel followed by a ':',
e.g.
<foo> logbot: hello!
the bot will reply:
<logbot> foo: I am a log bot
Run this script with two arguments, the channel name the bot should
connect to, and file to log to, e.g.:
$ python ircLogBot.py test test.log
will log channel #test to the file 'test.log'.
"""
# twisted imports
from twisted.words.protocols import irc
from twisted.internet import reactor, protocol
from twisted.python import log
# system imports
import time, sys
class MessageLogger:
"""
An independent logger class (because separation of application
and protocol logic is a good thing).
"""
def __init__(self, file):
self.file = file
def log(self, message):
"""Write a message to the file."""
timestamp = time.strftime("[%H:%M:%S]", time.localtime(time.time()))
self.file.write('%s %s\n' % (timestamp, message))
self.file.flush()
def close(self):
self.file.close()
class LogBot(irc.IRCClient):
"""A logging IRC bot."""
nickname = "twistedbot"
def connectionMade(self):
irc.IRCClient.connectionMade(self)
self.logger = MessageLogger(open(self.factory.filename, "a"))
self.logger.log("[connected at %s]" %
time.asctime(time.localtime(time.time())))
def connectionLost(self, reason):
irc.IRCClient.connectionLost(self, reason)
self.logger.log("[disconnected at %s]" %
time.asctime(time.localtime(time.time())))
self.logger.close()
# callbacks for events
def signedOn(self):
"""Called when bot has succesfully signed on to server."""
self.join(self.factory.channel)
def joined(self, channel):
"""This will get called when the bot joins the channel."""
self.logger.log("[I have joined %s]" % channel)
def privmsg(self, user, channel, msg):
"""This will get called when the bot receives a message."""
user = user.split('!', 1)[0]
self.logger.log("<%s> %s" % (user, msg))
# Check to see if they're sending me a private message
if channel == self.nickname:
msg = "It isn't nice to whisper! Play nice with the group."
self.msg(user, msg)
return
# Otherwise check to see if it is a message directed at me
if msg.startswith(self.nickname + ":"):
msg = "%s: I am a log bot" % user
self.msg(channel, msg)
self.logger.log("<%s> %s" % (self.nickname, msg))
def action(self, user, channel, msg):
"""This will get called when the bot sees someone do an action."""
user = user.split('!', 1)[0]
self.logger.log("* %s %s" % (user, msg))
# irc callbacks
def irc_NICK(self, prefix, params):
"""Called when an IRC user changes their nickname."""
old_nick = prefix.split('!')[0]
new_nick = params[0]
self.logger.log("%s is now known as %s" % (old_nick, new_nick))
# For fun, override the method that determines how a nickname is changed on
# collisions. The default method appends an underscore.
def alterCollidedNick(self, nickname):
"""
Generate an altered version of a nickname that caused a collision in an
effort to create an unused related name for subsequent registration.
"""
return nickname + '^'
class LogBotFactory(protocol.ClientFactory):
"""A factory for LogBots.
A new protocol instance will be created each time we connect to the server.
"""
def __init__(self, channel, filename):
self.channel = channel
self.filename = filename
def buildProtocol(self, addr):
p = LogBot()
p.factory = self
return p
def clientConnectionLost(self, connector, reason):
"""If we get disconnected, reconnect to server."""
connector.connect()
def clientConnectionFailed(self, connector, reason):
print "connection failed:", reason
reactor.stop()
if __name__ == '__main__':
# initialize logging
log.startLogging(sys.stdout)
# create factory protocol and application
f = LogBotFactory(sys.argv[1], sys.argv[2])
# connect factory to this host and port
reactor.connectTCP("irc.freenode.net", 6667, f)
# run bot
reactor.run()
| bsd-2-clause | Python | |
1cee75aec336b6ba7d21cb9aa18b238e68ce2fd0 | add script to export data from database | DeepController/tellina,DeepController/tellina,DeepController/tellina | website/scripts/export_pairs.py | website/scripts/export_pairs.py | from website.models import Annotation
nl_list = []
cm_list = []
for annotation in Annotation.objects.all():
nl_list.append(annotation.nl.str)
cm_list.append(annotation.cmd.str)
with open('nl.txt', 'w') as o_f:
for nl in nl_list:
o_f.write('{}\n'.format(nl.strip()))
with open('cm.txt', 'w') as o_f:
for cm in cm_list:
o_f.write('{}\n'.format(cm.strip())) | mit | Python | |
74df88d572c8b6efabcde5e1803245d1bf31cc39 | Switch to GitHub-esque event names | mvaled/sentry,JamesMura/sentry,looker/sentry,jokey2k/sentry,alexm92/sentry,boneyao/sentry,Kryz/sentry,TedaLIEz/sentry,llonchj/sentry,fotinakis/sentry,nicholasserra/sentry,wong2/sentry,beeftornado/sentry,imankulov/sentry,JackDanger/sentry,kevinastone/sentry,argonemyth/sentry,zenefits/sentry,ifduyue/sentry,imankulov/sentry,gg7/sentry,JackDanger/sentry,JamesMura/sentry,daevaorn/sentry,beeftornado/sentry,BuildingLink/sentry,ngonzalvez/sentry,JackDanger/sentry,zenefits/sentry,jean/sentry,fuziontech/sentry,JTCunning/sentry,vperron/sentry,kevinlondon/sentry,imankulov/sentry,Kryz/sentry,boneyao/sentry,mvaled/sentry,ngonzalvez/sentry,Kryz/sentry,jean/sentry,TedaLIEz/sentry,pauloschilling/sentry,1tush/sentry,korealerts1/sentry,Natim/sentry,daevaorn/sentry,Natim/sentry,songyi199111/sentry,jean/sentry,fuziontech/sentry,jean/sentry,pauloschilling/sentry,fuziontech/sentry,gg7/sentry,wong2/sentry,BuildingLink/sentry,korealerts1/sentry,fotinakis/sentry,drcapulet/sentry,wujuguang/sentry,JamesMura/sentry,TedaLIEz/sentry,llonchj/sentry,kevinastone/sentry,Natim/sentry,gg7/sentry,jean/sentry,drcapulet/sentry,kevinlondon/sentry,felixbuenemann/sentry,daevaorn/sentry,argonemyth/sentry,gencer/sentry,mvaled/sentry,wujuguang/sentry,gencer/sentry,zenefits/sentry,songyi199111/sentry,argonemyth/sentry,ewdurbin/sentry,llonchj/sentry,BayanGroup/sentry,nicholasserra/sentry,camilonova/sentry,BayanGroup/sentry,ewdurbin/sentry,mvaled/sentry,vperron/sentry,kevinlondon/sentry,hongliang5623/sentry,drcapulet/sentry,mvaled/sentry,zenefits/sentry,JamesMura/sentry,ewdurbin/sentry,looker/sentry,alexm92/sentry,BuildingLink/sentry,1tush/sentry,ifduyue/sentry,ngonzalvez/sentry,wujuguang/sentry,vperron/sentry,looker/sentry,mitsuhiko/sentry,daevaorn/sentry,camilonova/sentry,hongliang5623/sentry,alexm92/sentry,ifduyue/sentry,ifduyue/sentry,looker/sentry,wong2/sentry,boneyao/sentry,korealerts1/sentry,fotinakis/sentry,BayanGroup/sentry,mvaled/sentry,beeftornado/sentry,jokey2k/sentry,camilonova/sentry,BuildingLink/sentry,fotinakis/sentry,nicholasserra/sentry,1tush/sentry,jokey2k/sentry,JTCunning/sentry,JTCunning/sentry,zenefits/sentry,looker/sentry,pauloschilling/sentry,JamesMura/sentry,gencer/sentry,mitsuhiko/sentry,felixbuenemann/sentry,hongliang5623/sentry,gencer/sentry,BuildingLink/sentry,kevinastone/sentry,felixbuenemann/sentry,ifduyue/sentry,songyi199111/sentry,gencer/sentry | src/sentry/models/auditlogentry.py | src/sentry/models/auditlogentry.py | """
sentry.models.auditlogentry
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from sentry.db.models import (
Model, BoundedPositiveIntegerField, GzippedDictField,
sane_repr
)
class AuditLogEntryEvent(object):
MEMBER_INVITE = 1
MEMBER_ADD = 2
MEMBER_ACCEPT = 3
MEMBER_EDIT = 4
MEMBER_REMOVE = 5
class AuditLogEntry(Model):
organization = models.ForeignKey('sentry.Organization')
actor = models.ForeignKey('sentry.User', related_name='audit_actors')
target_object = BoundedPositiveIntegerField(null=True)
target_user = models.ForeignKey('sentry.User', null=True, related_name='audit_targets')
event = models.CharField(max_length=64, choices=(
# We emulate github a bit with event naming
(AuditLogEntryEvent.MEMBER_INVITE, _('org.invite-member')),
(AuditLogEntryEvent.MEMBER_ADD, _('org.add-member')),
(AuditLogEntryEvent.MEMBER_ACCEPT, _('org.accept-invite')),
(AuditLogEntryEvent.MEMBER_REMOVE, _('org.rempve-member')),
(AuditLogEntryEvent.MEMBER_EDIT, _('org.edit-member')),
))
data = GzippedDictField()
datetime = models.DateTimeField(default=timezone.now)
class Meta:
app_label = 'sentry'
db_table = 'sentry_auditlogentry'
__repr__ = sane_repr('organization_id', 'type')
| """
sentry.models.auditlogentry
~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from django.db import models
from django.utils import timezone
from django.utils.translation import ugettext_lazy as _
from sentry.db.models import (
Model, BoundedPositiveIntegerField, GzippedDictField,
sane_repr
)
class AuditLogEntryEvent(object):
MEMBER_INVITE = 1
MEMBER_ADD = 2
MEMBER_ACCEPT = 3
MEMBER_EDIT = 4
MEMBER_REMOVE = 5
class AuditLogEntry(Model):
organization = models.ForeignKey('sentry.Organization')
actor = models.ForeignKey('sentry.User', related_name='audit_actors')
target_object = BoundedPositiveIntegerField(null=True)
target_user = models.ForeignKey('sentry.User', null=True, related_name='audit_targets')
event = BoundedPositiveIntegerField(choices=(
(AuditLogEntryEvent.MEMBER_INVITE, _('Invited member')),
(AuditLogEntryEvent.MEMBER_ADD, _('Added member')),
(AuditLogEntryEvent.MEMBER_ACCEPT, _('Accepted Invite')),
(AuditLogEntryEvent.MEMBER_REMOVE, _('Removed member')),
(AuditLogEntryEvent.MEMBER_EDIT, _('Edited member')),
))
data = GzippedDictField()
datetime = models.DateTimeField(default=timezone.now)
class Meta:
app_label = 'sentry'
db_table = 'sentry_auditlogentry'
__repr__ = sane_repr('organization_id', 'type')
| bsd-3-clause | Python |
2457eaf72e0379d0e8915906ba7f7faa9b3f2f03 | add plotter for glitch offset | zlongshen/InertialNav,zlongshen/InertialNav,flyingk/InertialNav,zlongshen/InertialNav,ek99800/InertialNav,flyingk/InertialNav,vergil1874/InertialNav,flyingk/InertialNav,ek99800/InertialNav,priseborough/InertialNav,vergil1874/InertialNav,vergil1874/InertialNav,AlexHAHA/InertialNav,priseborough/InertialNav,AlexHAHA/InertialNav,priseborough/InertialNav,AlexHAHA/InertialNav,priseborough/InertialNav,ek99800/InertialNav,flyingk/InertialNav | code/plot_glitchOffset.py | code/plot_glitchOffset.py | #!/bin/python
import matplotlib as mpl
import matplotlib.pyplot as plt
import matplotlib.cbook as cbook
import numpy as np
import math
data = np.genfromtxt('GlitchOffsetOut.txt', delimiter=' ', skip_header=1,
skip_footer=1, names=['time', 'north', 'east'])
fig = plt.figure()
ax1 = fig.add_subplot(211)
ax1.set_title("GPS glitch offset")
#ax1.set_xlabel('time (s)')
ax1.set_ylabel('north position (m)')
ax1.plot(data['time'], data['north'], color='b', label='north')
ax2 = fig.add_subplot(212)
ax2.set_xlabel('time (s)')
ax2.set_ylabel('east position (m)')
ax2.plot(data['time'], data['east'], color='b', label='east')
plt.show()
| bsd-3-clause | Python | |
526faad8c83d1385cc31ed3db85249a9f5882893 | Create myproject.py | nickczj/puts,nickczj/puts,nickczj/puts | myproject.py | myproject.py | from flask import Flask, render_template
app = Flask(__name__)
@app.route("/")
def home():
return render_template("home.html")
if __name__ == "__main__":
app.run(host='0.0.0.0', debug=True)
| mit | Python | |
c6fbea313571cff4383ce57c689e5aac25537144 | add command to run VCLWriter | tsuru/varnishapi,tsuru/varnishapi | run_vcl_writer.py | run_vcl_writer.py | # Copyright 2014 varnishapi authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
import argparse
from feaas import api, vcl_writer
def run(storage):
parser = argparse.ArgumentParser("VCL Writer runner")
parser.add_argument("-i", "--interval",
help="Interval for running VCLWriter (in seconds)",
default=10, type=int)
parser.add_argument("-n", "--max-items",
help="Maximum number of units to process at a time",
type=int)
args = parser.parse_args()
writer = vcl_writer.VCLWriter(storage, args.interval, args.max_items)
writer.loop()
if __name__ == "__main__":
manager = api.get_manager()
run(manager.storage)
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.