commit stringlengths 40 40 | subject stringlengths 1 1.49k | old_file stringlengths 4 311 | new_file stringlengths 4 311 | new_contents stringlengths 1 29.8k | old_contents stringlengths 0 9.9k | lang stringclasses 3 values | proba float64 0 1 |
|---|---|---|---|---|---|---|---|
f069a3feda43ebc436e404dad66dfaa06055e35a | Add h5sh python package (#14001) | var/spack/repos/builtin/packages/py-h5sh/package.py | var/spack/repos/builtin/packages/py-h5sh/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyH5sh(PythonPackage):
"""Shell-like environment for HDF5."""
homepage = "https://pypi.python.org/pypi/h5sh"
url = "https://github.com/sethrj/h5sh/archive/v0.1.1.tar.gz"
maintainers = ['sethrj']
version('0.1.1', sha256='111989d8200d1da8e150aee637a907e524ca0f98d5005a55587cba0d94d9c4a0')
depends_on('py-setuptools', type=('build', 'run'))
depends_on('py-h5py', type=('build', 'run'))
depends_on('py-numpy', type=('build', 'run'))
depends_on('py-prompt-toolkit@2:', type=('build', 'run'))
depends_on('py-pygments', type=('build', 'run'))
depends_on('py-six', type=('build', 'run'))
depends_on('py-pytest', type='test')
| Python | 0 | |
5d3a774587bef551553a731d4f9dcfeefdb70ed3 | Add package for scalpel@0.5.3 (#5901) | var/spack/repos/builtin/packages/scalpel/package.py | var/spack/repos/builtin/packages/scalpel/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Scalpel(MakefilePackage):
"""Scalpel is a software package for detecting INDELs (INsertions and
DELetions) mutations in a reference genome which has been sequenced
with next-generation sequencing technology.
"""
homepage = "http://scalpel.sourceforge.net/index.html"
url = "https://downloads.sourceforge.net/project/scalpel/scalpel-0.5.3.tar.gz"
version('0.5.3', '682c9f1cd6ab2cb11c6866f303c673f0')
depends_on('cmake')
depends_on('perl@5.10.0:')
# bamtools needs to build before the others.
parallel = False
@run_before('install')
def filter_sbang(self):
"""Run before install so that the standard Spack sbang install hook
can fix up the path to the perl|python binary.
"""
with working_dir(self.stage.source_path):
kwargs = {'ignore_absent': True, 'backup': False, 'string': False}
match = '^#!/usr/bin/env perl'
perl = self.spec['perl'].command
substitute = "#!{perl}".format(perl=perl)
files = ['FindDenovos.pl', 'scalpel-export',
'scalpel-discovery', 'FindVariants.pl',
'FindSomatic.pl']
filter_file(match, substitute, *files, **kwargs)
# Scalpel doesn't actually *have* an install step. The authors
# expect you to unpack the tarball, build it in the resulting
# directory, and add that directory to your PATH. The Perl
# scripts use `FindBin` to discover the directory in which they
# live and they run their own dedicated copies of {bam,sam}tools
# and etc... by explicitly naming the executables in their directory.
#
# Rather than trying to fix their code I just copied the juicy
# bits into prefix.bin. It's not normal, but....
#
def install(self, spec, prefix):
destdir = prefix.bin # see the note above....
mkdirp(destdir)
files = ['FindSomatic.pl', 'HashesIO.pm', 'MLDBM.pm',
'scalpel-export', 'Utils.pm', 'FindDenovos.pl',
'FindVariants.pl', 'scalpel-discovery',
'SequenceIO.pm', 'Usage.pm']
for f in files:
install(f, destdir)
dirs = ['Text', 'MLDBM', 'Parallel', ]
for d in dirs:
install_tree(d, join_path(destdir, d))
install_tree('bamtools-2.3.0/bin',
join_path(destdir, 'bamtools-2.3.0', 'bin'))
install_tree('bamtools-2.3.0/lib',
join_path(destdir, 'bamtools-2.3.0', 'lib'))
mkdirp(join_path(destdir, 'bcftools-1.1'))
install('bcftools-1.1/bcftools', join_path(destdir, 'bcftools-1.1'))
mkdirp(join_path(destdir, 'Microassembler'))
install('Microassembler/Microassembler',
join_path(destdir, 'Microassembler'))
mkdirp(join_path(destdir, 'samtools-1.1'))
install('samtools-1.1/samtools', join_path(destdir, 'samtools-1.1'))
| Python | 0.000002 | |
1f731dcbfcff76ba63e4aea4fc05a15dd5021daa | Test for overriding sys.stdout. | tests/io/sys_stdio_override.py | tests/io/sys_stdio_override.py | try:
import uio as io
except ImportError:
try:
import io
except ImportError:
print("SKIP")
raise SystemExit
import sys
try:
sys.stdout = sys.stdout
except AttributeError:
print("SKIP")
raise SystemExit
buf = io.StringIO()
sys.stdout = buf
print(1, "test", 10 + 20)
| Python | 0.000009 | |
855d10b768fbfec7772f8e5df4c181d971fe0dd4 | add tests. | tests/test_primesieve_array.py | tests/test_primesieve_array.py | from primesieve.array import n_primes, primes
def assert_array_equal(have, want):
assert list(have) == want
def test_primes_array():
assert_array_equal(primes(10), [2,3,5,7])
assert_array_equal(primes(10, 20), [11,13,17,19])
def test_n_primes_array():
assert_array_equal(n_primes(7), [2,3,5,7,11,13,17])
assert_array_equal(n_primes(5, 100), [101,103,107,109,113])
| Python | 0 | |
9de475e8007b209d005ed222686cb46bddef053d | Integrate LLVM at llvm/llvm-project@9e37b1e5a0c1 | third_party/llvm/workspace.bzl | third_party/llvm/workspace.bzl | """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "9e37b1e5a0c15f36c5642406d5aa02a657a0b19c"
LLVM_SHA256 = "e2cca91a76ee6b44a6af91874e429af582b248b96ccd139373fec69ed0b0215f"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:toolchains.patch",
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| """Provides the repository macro to import LLVM."""
load("//third_party:repo.bzl", "tf_http_archive")
def repo(name):
"""Imports LLVM."""
LLVM_COMMIT = "ab85996e475ceddfda82255c314229ac0c0f4994"
LLVM_SHA256 = "140b4198fa4f0ec1917a0e252feec5e19ccd9d7e96fc818c555b5551c796ec5b"
tf_http_archive(
name = name,
sha256 = LLVM_SHA256,
strip_prefix = "llvm-project-{commit}".format(commit = LLVM_COMMIT),
urls = [
"https://storage.googleapis.com/mirror.tensorflow.org/github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
"https://github.com/llvm/llvm-project/archive/{commit}.tar.gz".format(commit = LLVM_COMMIT),
],
build_file = "//third_party/llvm:llvm.BUILD",
patch_file = [
"//third_party/llvm:infer_type.patch", # TODO(b/231285230): remove once resolved
"//third_party/llvm:build.patch",
"//third_party/llvm:toolchains.patch",
],
link_files = {"//third_party/llvm:run_lit.sh": "mlir/run_lit.sh"},
)
| Python | 0.000003 |
c6b0b5a8cef752481d5ec6672313ec8829d4299f | Create saving.py | Webpage/cgi-bin/saving.py | Webpage/cgi-bin/saving.py | #!/usr/bin/python3
import os
import os.path
import cgi, cgitb
import re
import pickle
#own packages
import dbcPattern
def dbc_main(): # NEW except for the call to processInput
form = cgi.FieldStorage() # standard cgi script lines to here!
# use format of next two lines with YOUR names and default data
pagedata = form['webpage'].value
SRate = form['SampleRates'].value
StartVal = form['StartVal'].value
if pagedata:
contents = processInput(pagedata, SRate, StartVal) # process input into a page
print(contents)
return -1
def processInput(pagedata, SRate, StartVal):
i=0
file=open("../saved.txt","w")
file.write(pagedata)
if SRate:
SRates_list=SRate.split(',')
if StartVal:
StartVal_list=StartVal.split(',')
file.write("\nEndeHTML")
for rate in SRates_list:
file.write('SampleRate '+i+": "+rate)
i++
file.write("\nEndeRates")
for values in StartVal_list:
file.write('StartValue '+i+": "+rate)
i++
file.write("\nEndeValues")
file.close()
return createHTML()
def createHTML(sig_num, sig_list):
signale=""
i=0
file=open("Header_Saved.html")
html_string = file.read()
file.close()
savings=open("Header_Saved.html")
for line in savings:
if re.match("EndeHTML",line):
break
else:
html_string+=line
savings.close()
return html_string
#Muss später ins Hauptprogramm kopiert werden
try: # NEW
cgitb.enable()
print("Content-Type: text/html;charset:UTF-8") # say generating html
print("\n\n")
dbc_main()
except:
cgi.print_exception() # catch and print errors
| Python | 0.000001 | |
8c6335c7ba7ebb34566603eb2943752fd3f524db | Add Exercise 9.13. | Kane1985/Chapter5/Ex9.13.py | Kane1985/Chapter5/Ex9.13.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 9.13 from Kane 1985."""
from __future__ import division
from sympy import expand, symbols
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dynamicsymbols
from util import msprint, partial_velocities
from util import function_from_partials, generalized_active_forces
q1, q2 = q = dynamicsymbols('q1:3')
q1d, q2d = qd = dynamicsymbols('q1:3', level=1)
u1, u2 = u = dynamicsymbols('u1:3')
# L' is the natural length of the springs
alpha, beta, L1, L2, k1, k2 = symbols('α β L1 L2 k1 k2',
real=True, positive=True)
# reference frames
N = ReferenceFrame('N')
# define points
pO = Point('O') # point O is fixed on the wall
pB1 = pO.locatenew('B1', (L1 + q1)*N.x) # treat block 1 as a point mass
pB2 = pB1.locatenew('B2', (L2 + q2)*N.x) # treat block 2 as a point mass
pB1.set_vel(N, pB1.pos_from(pO).dt(N))
pB2.set_vel(N, pB2.pos_from(pO).dt(N))
# kinematic differential equations
kde_map = dict(zip(map(lambda x: x.diff(), q), u))
# forces
#spring_forces = [(pB1, -k1 * q1 * N.x),
# (pB1, k2 * q2 * N.x),
# (pB2, -k2 * q2 * N.x)]
dashpot_forces = [(pB1, beta * q2d * N.x),
(pB2, -beta * q2d * N.x),
(pB2, -alpha * (q1d + q2d) * N.x)]
#forces = spring_forces + dashpot_forces
partials_c = partial_velocities(zip(*dashpot_forces)[0], u, N, kde_map)
Fr_c, _ = generalized_active_forces(partials_c, dashpot_forces)
#print('generalized active forces due to dashpot forces')
#for i, fr in enumerate(Fr_c, 1):
# print('(F{0})c = {1} = -∂ℱ/∂u{0}'.format(i, msprint(fr)))
dissipation_function = function_from_partials(
map(lambda x: -x.subs(kde_map), Fr_c), u, zero_constants=True)
print('ℱ = {0}'.format(msprint(dissipation_function)))
dissipation_function_expected = (alpha*u1**2 + 2*alpha*u1*u2 +
(alpha + beta)*u2**2)/2
assert expand(dissipation_function - dissipation_function_expected) == 0
| Python | 0.000001 | |
5a2f8967ac09b3aa1fc1cda21fd6dc5cf1d3f896 | Add gesture recognition prototype | gesture_recognition/__init__.py | gesture_recognition/__init__.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import pygame
from pygame import camera
from pygame.constants import QUIT, K_ESCAPE, KEYDOWN
import numpy as np
class Capture(object):
def __init__(self):
camera.init()
self.size = (640, 480, )
# create a display surface. standard pygame stuff
self.display = pygame.display.set_mode(self.size, 0)
# this is the same as what we saw before
self.clist = pygame.camera.list_cameras()
if not self.clist:
raise ValueError("Sorry, no cameras detected.")
self.cam = pygame.camera.Camera(self.clist[0], self.size)
self.cam.start()
# create a surface to capture to. for performance purposes
# bit depth is the same as that of the display surface.
self.snapshot = pygame.surface.Surface(self.size, 0, self.display)
self.thresholded = pygame.surface.Surface(self.size, 0, self.display)
self.previous_pixels = None
def get_and_flip(self):
# if you don't want to tie the framerate to the camera, you can check
# if the camera has an image ready. note that while this works
# on most cameras, some will never return true.
if self.cam.query_image():
self.snapshot = self.cam.get_image(self.snapshot)
pixels = pygame.surfarray.array3d(self.snapshot).astype(np.int) # np.int to make it signed
if self.previous_pixels is not None:
# Get image difference
p = np.subtract(pixels, self.previous_pixels)
# Reset all pixels below threshold
threshold = 30
bool_matrix = np.logical_and(p < threshold, p > -threshold)
p[bool_matrix] = 0
# p[np.invert(bool_matrix)] = 200
# Show differential image
self.snapshot = pygame.surfarray.make_surface(p)
self.previous_pixels = pixels
# blit it to the display surface. simple!
self.display.blit(self.snapshot, (0,0))
pygame.display.flip()
def main(self):
going = True
while going:
events = pygame.event.get()
for e in events:
if e.type == QUIT or (e.type == KEYDOWN and e.key == K_ESCAPE):
# close the camera safely
self.cam.stop()
going = False
self.get_and_flip()
Capture().main()
| Python | 0.000002 | |
41ee54414845c3d8c1592048fe2f7cee57153eee | Add Python Numpy and Pandas cheatsheet | pythonCheatsheet.py | pythonCheatsheet.py | #!/usr/local/bin/python
# Python Numpy and Pandas Cheatsheet
| Python | 0.000001 | |
a813d79ccd63c9ff40afaf3fda4e2c8c0a37ee25 | Add wsgi file | readthedocs/wsgi.py | readthedocs/wsgi.py | import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| Python | 0.000001 | |
3d0ac61fa03ab27c567155f989db0ceb2134c9e0 | adding test_hello | test_hello.py | test_hello.py | print "Hello World"
| Python | 0.999096 | |
f0e80e3ab82b5edae6ee4e71ecc243cb7c49a4a0 | Revert "move to directory" | pathmark/medbookAdapterPATHMARK.py | pathmark/medbookAdapterPATHMARK.py | #!/usr/bin/env python
"""
medbookAdapterPATHMARK.py
by Robert Baertsch
"""
import logging, math, os, random, re, shutil, sys, types, zipfile
from copy import deepcopy
from optparse import OptionParser
## logger
logging.basicConfig(filename = "medbook-pathmark.log", level = logging.INFO)
## executables
print "startup"
bin_dir = os.path.dirname(os.path.abspath(__file__))
print "bindir ", bin_dir
signature_exec = os.path.join(bin_dir, "signature.py")
pathmark_exec = os.path.join(bin_dir, "PATHMARK.py")
print "signature.py and pathmark.py ok"
## functions
def zipDirectory(directory, zip):
for root, dirs, files in os.walk(directory):
for file in files:
zip.write(os.path.join(root, file))
def main():
jobtree_dir = bin_dir + ".."
sys.path.append(jobtree_dir)
## check for fresh run
if os.path.exists(".jobTree") or os.path.exists("jobTree"):
logging.warning("WARNING: '.jobTree' directory found, remove it first to start a fresh run\n")
## parse arguments
parser = OptionParser(usage = "%prog [options] data_matrix phenotype_matrix pathway_file")
parser.add_option("-b", "--bootstrap", dest = "bootstrap_size", default = 0,
help = "number of bootstrap samples to estimate subnetwork robustness")
parser.add_option("-n", "--null", dest = "null_size", default = 0,
help = "number of null samples to estimate subnetwork signifiance")
parser.add_option("-p", "--permute", dest = "null_permute", default = "paradigm",
help = "permutation method for generation of null samples")
parser.add_option("-m", "--method", dest = "signature_method", default = "sam",
help = "differential method for computing signatures")
parser.add_option("-f", "--filter", dest = "filter_parameters", default = "0.0;0.0",
help = "filter threshold coefficients")
parser.add_option("-t", "--heat", dest = "heat_diffusion", default = "0.0",
help = "diffusion time for heat diffusion of signature scores across the network")
parser.add_option("-u", "--hub", dest = "hub_filter", action = "store_true", default = False,
help = "apply hub filter that includes hubs with high representation of its children")
parser.add_option("-z", "--seed", dest = "seed", default = None,
help = "random seed used for bootstrap and null generation")
parser.add_option("--bs", "--batchSystem", dest = "batch_system", default = None,
help = "override default batch system used by jobTree")
parser.add_option("--oz", "--output-zip", dest = "output_zip", default = None,
help = "output files into a zipfile")
parser.add_option("--os", "--output-signature", dest = "output_signature", default = None,
help = "output signature file")
options, args = parser.parse_args()
logging.info("options: %s" % (str(options)))
work_dir = os.path.abspath("./")
if len(args) != 3:
logging.error("ERROR: incorrect number of arguments\n")
sys.exit(1)
data_file = os.path.abspath(args[0])
phenotype_file = os.path.abspath(args[1])
pathway_file = os.path.abspath(args[2])
## run signature.py
cmd = "%s %s" % (sys.executable, signature_exec)
print "cmd", cmd
if options.batch_system is not None:
cmd += " --batchSystem %s" % (options.batch_system)
cmd += " -b %s" % (options.bootstrap_size)
cmd += " -n %s" % (options.null_size)
cmd += " -p %s" % (options.null_permute)
cmd += " -m %s" % (options.signature_method)
if options.seed is not None:
cmd += " -z %s" % (options.seed)
cmd += " %s %s" % (data_file, phenotype_file)
os.system(cmd)
if os.path.exists(".jobTree_previous"):
shutil.move(".jobTree_previous", ".jobTree_signature")
elif os.path.exists("jobTree_previous"):
shutil.move("jobTree_previous", "jobTree_signature")
logging.info("system: %s" % (cmd))
## run PATHMARK.py
cmd = "%s %s" % (sys.executable, pathmark_exec)
if options.batch_system is not None:
cmd += " --batchSystem %s" % (options.batch_system)
if os.path.exists("null_signature.tab"):
cmd += " -n %s" % ("null_signature.tab")
if os.path.exists("bootstrap_signature.tab"):
cmd += " -b %s" % ("bootstrap_signature.tab")
cmd += " -f \"%s\"" % (options.filter_parameters)
cmd += " -t %s" % (options.heat_diffusion)
if options.hub_filter:
cmd += " -u"
cmd += " signature.tab %s" % (pathway_file)
os.system(cmd)
if os.path.exists(".jobTree_previous"):
shutil.move(".jobTree_previous", ".jobTree_pathmark")
elif os.path.exists("jobTree_previous"):
shutil.move("jobTree_previous", "jobTree_pathmark")
logging.info("system: %s" % (cmd))
## prepare outputs
report_dir = "report"
if options.output_zip is not None:
zip_file = zipfile.ZipFile("report.zip", "w")
zipDirectory(report_dir, zip_file)
zip_file.close()
shutil.copy(os.path.join(work_dir, "report.zip"), options.output_zip)
if options.output_signature is not None:
shutil.copy(os.path.join(work_dir, "signature.tab"), options.output_signature)
print "work_dir", work_dir
print "output signature", options.output_signature
from os import listdir
from os.path import isfile, join
onlyfiles = [ f for f in listdir(report_dir) if isfile(join(report_dir,f)) ]
outlist = open('report.list', 'w')
for f in onlyfiles:
file = os.path.join(work_dir, "report", f)
outlist.write(file)
outlist.write('\n')
outlist.close()
print "reports", onlyfiles
if __name__ == "__main__":
main()
| Python | 0 | |
7e9794dc98a268479f0f57128effc67f88586c8f | Add default message for list pages | bvspca/core/migrations/0025_auto_20180202_1214.py | bvspca/core/migrations/0025_auto_20180202_1214.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.9 on 2018-02-02 19:14
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0024_contentindexpage_empty_message'),
]
operations = [
migrations.AlterField(
model_name='contentindexpage',
name='empty_message',
field=models.CharField(default='Empty', max_length=200),
),
]
| Python | 0.000001 | |
63db1dc6c23c4afd41bca5cf06207e383c982b51 | structure of the commandprocessor | app/core/commandprocessor.py | app/core/commandprocessor.py | class CommandProcessor:
def parseCommand(self):
pass
def constructUrl(self):
pass
def processCommand(self):
pass
| Python | 0.000015 | |
2c687118a9aa248d6e6f28259d8a81217ee9cb1d | add solution for Number of Digit One | algorithms/numberOfDigitOne/numberOfDigitOne.py | algorithms/numberOfDigitOne/numberOfDigitOne.py | class Solution:
# @param {integer} n
# @return {integer}
def countDigitOne(self, n):
res = prev = 0
x = 1
while n > 0: # n = 23[y]xxx
y = n % 10
n /= 10
if y > 1:
res += x # 23[2]xxx
elif y == 1:
res += prev + 1 # 23[1]xxx
res += n * x # 0[1]xxx ~ 22[1]xxx
prev += y * x
x *= 10
return res
| Python | 0.999957 | |
67dfcd5abb73aff7fd416f665de0d8461ba3e8b4 | Create Subset.py | Tests/Subset.py | Tests/Subset.py | __author__ = 'Marius Wirtz'
from TM1py import TM1Queries, Subset
import uuid
import json
import unittest
class TestAnnotationMethods(unittest.TestCase):
q = TM1Queries(ip='', port=8008, user='admin', password='apple', ssl=True)
random_string1 = str(uuid.uuid4()).replace('-', '_')
random_string2 = str(uuid.uuid4()).replace('-', '_')
# 1. create subset
def test_create_subset(self):
s = Subset(dimension_name='plan_business_unit', subset_name=self.random_string1,
elements=['10110', '10300', '10210', '10000'])
response = self.q.create_subset(s)
print(response)
response_as_dict = json.loads(response)
name_in_response = response_as_dict['Name']
self.assertEqual(self.random_string1, name_in_response)
s = Subset(dimension_name='plan_business_unit', subset_name=self.random_string2,
expression='{ HIERARCHIZE( {TM1SUBSETALL( [plan_business_unit] )} ) }')
response = self.q.create_subset(s)
response_as_dict = json.loads(response)
name_in_response = response_as_dict['Name']
self.assertEqual(self.random_string2, name_in_response)
# 2. get subset
def test_get_subset(self):
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='static_subset_for_unit_test')
self.assertIsInstance(s, Subset)
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='dynamic_subset_for_unit_test')
self.assertIsInstance(s, Subset)
# 3. update subset
def test_update_subset(self):
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='static_subset_for_unit_test')
s.add_elements(['10110'])
self.q.update_subset(s)
s = self.q.get_subset(name_dimension='plan_business_unit', name_subset='dynamic_subset_for_unit_test')
s.set_expression('{ HIERARCHIZE( {TM1SUBSETALL( [plan_business_unit] )} ) }')
self.q.update_subset(s)
# 4. delete subset
def test_delete_subset(self):
response = self.q.delete_subset('plan_business_unit', self.random_string1)
self.assertEqual(response, '')
response = self.q.delete_subset('plan_business_unit', self.random_string2)
self.assertEqual(response, '')
if __name__ == '__main__':
unittest.main()
| Python | 0.000001 | |
93d6915c0e45d1873a48c298749d6956edbc337e | add remote ssh capability to fs-drift.py | ssh_thread.py | ssh_thread.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
'''
ssh_thread.py -- manages parallel execution of shell commands on remote hosts
Copyright 2012 -- Ben England
Licensed under the Apache License at http://www.apache.org/licenses/LICENSE-2.0
See Appendix on this page for instructions pertaining to license.
'''
import threading
import os
# this class is just used to create a python thread
# for each remote host that we want to use as a workload generator
# the thread just executes an ssh command to run this program on a remote host
class ssh_thread(threading.Thread):
ssh_prefix = 'ssh -x -o StrictHostKeyChecking=no '
def __str__(self):
return 'ssh-thread:%s:%s:%s' % \
(self.remote_host, str(self.status), self.remote_cmd)
def __init__(self, remote_host, remote_cmd_in):
threading.Thread.__init__(self)
self.remote_host = remote_host
self.remote_cmd = '%s %s "%s"' % \
(self.ssh_prefix, self.remote_host, remote_cmd_in)
# print('thread cmd %s'%self.remote_cmd)
self.status = None
def run(self):
self.status = os.system(self.remote_cmd)
| Python | 0.000001 | |
88e05bd1fe0f2e46e740a3d8d631d4a810c155a6 | Complete P8 | Quiz/Problem8_satisfiesF.py | Quiz/Problem8_satisfiesF.py | def satisfiesF(L):
"""
Assumes L is a list of strings
Assume function f is already defined for you and it maps a string to a Boolean
Mutates L such that it contains all of the strings, s, originally in L such
that f(s) returns True, and no other elements
Returns the length of L after mutation
"""
Lclone = L[:]
for i in Lclone:
if not f(i):
L.remove(i)
return len(L)
#-----used for submitting-----
# run_satisfiesF(L, satisfiesF)
#-----test case-----
# def f(s):
# return 'a' in s
#
# L = ['a', 'b', 'bc', 'c', 'ab']
# print satisfiesF(L)
# print L | Python | 0 | |
820fe44762f0037eaacba9b7bf4129a29e25e799 | add migration | accelerator/migrations/0036_add_user_deferrable_modal.py | accelerator/migrations/0036_add_user_deferrable_modal.py | # Generated by Django 2.2.10 on 2021-03-03 17:08
from django.conf import settings
from django.db import (
migrations,
models,
)
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('accelerator', '0035_add_deferrable_modal_model'),
]
operations = [
migrations.CreateModel(
name='UserDeferrableModal',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('created_at', models.DateTimeField(
auto_now_add=True,
null=True)),
('updated_at', models.DateTimeField(
auto_now=True,
null=True)),
('is_deferred', models.BooleanField(default=False)),
('deferred_to', models.DateTimeField(
blank=True,
null=True)),
('deferrable_modal', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.ACCELERATOR_DEFERRABLEMODAL_MODEL)),
('user', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to=settings.AUTH_USER_MODEL)),
],
options={
'verbose_name': 'User Deferrable Modal',
'abstract': False,
'managed': True,
'swappable': None,
},
),
]
| Python | 0 | |
a5cabf4b778d03cac472e22b0e62bc262796b5ff | Add tests for `cms.templatetags.pagination`. | cms/tests/test_templatetags_pagination.py | cms/tests/test_templatetags_pagination.py | from django.http import Http404
from django.test import RequestFactory, TestCase
from ..templatetags.pagination import paginate, pagination, pagination_url
class Object(object):
paginator = None
class PaginationTest(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.request = self.factory.get('/')
def test_paginate(self):
paginate_response = paginate({'request': self.request}, [])
self.assertEqual(repr(paginate_response), '<Page 1 of 1>')
with self.assertRaises(Http404):
self.request = self.factory.get('/?page=2')
paginate({'request': self.request}, [])
def test_pagination(self):
obj = Object()
pagination_response = pagination({'request': self.request}, obj)
self.assertDictEqual(pagination_response, {
'paginator': None,
'pagination_key': 'page',
'page_obj': obj,
'request': self.request,
})
def test_pagination_url(self):
self.assertEqual(pagination_url({'request': self.request}, 1), '/')
self.assertEqual(pagination_url({'request': self.request}, 2), '/?page=2')
| Python | 0 | |
aa218407a9efdde9daa53d638fdfdacff873f14b | test change | clients/python/flask-server/tests/client_tests.py | clients/python/flask-server/tests/client_tests.py | """
integration test in python
!!!! NEED PYTHON 2.7.8
"""
import unittest
import urllib2
import urllib
import thread
import time
import json
from client import app
from multiprocessing import Process
class ServerHandlerTest(unittest.TestCase):
server = Process(target=app.run)
@classmethod
def setUpClass(cls):
cls.server.start()
time.sleep(1)
@classmethod
def tearDownClass(cls):
cls.server.terminate()
cls.server.join()
def assertContent(self, content, response):
for line in response.readlines():
if line == content:
found = True
self.assertTrue(found)
def test_should_call_get(self):
response = urllib2.urlopen("http://localhost:5000/")
self.assertContent('hello world', response)
def test_should_call_post_ping(self):
data = urllib.urlencode({'q': 'Ping'})
response = urllib2.urlopen("http://localhost:5000/ping", data)
self.assertContent('pong', response)
def test_should_call_post_order(self):
req = urllib2.Request('http://localhost:5000/order')
req.add_header('Content-Type', 'application/json')
response = urllib2.urlopen(req, json.dumps({'q': 'Path'}))
self.assertEqual(json.loads(response.read()), {u'total' : 1000})
@unittest.expectedFailure
def test_should_call_post_unknown(self):
data = urllib.urlencode({'answer': 'hello'})
urllib2.urlopen("http://localhost:5000/unknown", data)
if __name__ == '__main__':
unittest.main()
| Python | 0.000002 | |
e2ed85ae1bb3f647095abb00b118cf06ae7549aa | add setup (even if not really needed) | 0_Python/setup.py | 0_Python/setup.py | #!/usr/bin/python
from distutils.core import setup
from Cython.Distutils import build_ext
from distutils.extension import Extension
cy_mod = Extension("inside_polygon",
sources= ["inside_polygon.pyx"])
setup(ext_modules=[cy_mod],
cmdclass={'build_ext': build_ext})
| Python | 0 | |
cd1c88c519a7079b2cef752473e5da3ddb4224e3 | Add stress package (#3695) | var/spack/repos/builtin/packages/stress/package.py | var/spack/repos/builtin/packages/stress/package.py | ##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Stress(AutotoolsPackage):
"""stress is a deliberately simple workload generator for POSIX systems.
It imposes a configurable amount of CPU, memory, I/O, and disk stress on
the system. It is written in C, and is free software licensed under the
GPLv2."""
homepage = "https://people.seas.harvard.edu/~apw/stress/"
url = "https://people.seas.harvard.edu/~apw/stress/stress-1.0.4.tar.gz"
version('1.0.4', '890a4236dd1656792f3ef9a190cf99ef')
| Python | 0 | |
9ce90bc43cfcc5a56be958671f304e7929eb0446 | Add missing migration step dua changes in model | cmsplugin_collapse/migrations/0002_auto_20160210_0651.py | cmsplugin_collapse/migrations/0002_auto_20160210_0651.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('cmsplugin_collapse', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='accordionheader',
name='show_first',
field=models.BooleanField(default=True, help_text='If selected, the first collapsible will be displayed in the open state.'),
preserve_default=True,
),
]
| Python | 0 | |
b0ea743fa320f0df6e35b4381e6bd778906a5532 | Add caching mechanism | labmanager/rlms/caches.py | labmanager/rlms/caches.py | import calendar
from cachecontrol import CacheControl
from cachecontrol.caches import FileCache
from cachecontrol.heuristics import LastModified, TIME_FMT
from email.utils import formatdate, parsedate, parsedate_tz
class LastModifiedNoDate(LastModified):
""" This takes the original LastModified implementation of
cachecontrol, but defaults the date in case it is not provided.
"""
def __init__(self, require_date = True, error_margin = None):
if error_margin is None:
if require_date:
self.error_margin = 0.1
else:
self.error_margin = 0.2
else:
self.error_margin = error_margin
self.require_date = require_date
def update_headers(self, resp):
headers = resp.headers
if 'expires' in headers:
return {}
if 'cache-control' in headers and headers['cache-control'] != 'public':
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if 'last-modified' not in headers:
return {}
parsed_date = parsedate_tz(headers.get('date'))
if self.require_date and parsed_date is None:
return {}
if parsed_date is None:
date = time.time()
faked_date = True
else:
date = calendar.timegm(parsed_date)
faked_date = False
last_modified = parsedate(headers['last-modified'])
if last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta * self.error_margin, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
new_headers = {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
if faked_date:
new_headers['date'] = time.strftime(TIME_FMT, time.gmtime(date))
return new_headers
def warning(self, resp):
return None
def get_cached_session():
CACHE_DIR = 'web_cache'
return CacheControl(requests.Session(),
cache=FileCache(CACHE_DIR), heuristic=LastModifiedNoDate(require_date=False))
| Python | 0.000001 | |
b6ac6a73cf10372be3384dbeb99b82b137a9daa2 | Use chevrons instead of arrows in sortable links | oscar/templatetags/sorting_tags.py | oscar/templatetags/sorting_tags.py | # This is a rewrite of django-sorting but with added support for i18n title
# strings.
# See https://github.com/directeur/django-sorting
from django import template
from django.conf import settings
register = template.Library()
DEFAULT_SORT_UP = getattr(
settings, 'DEFAULT_SORT_UP',
'<i class="icon-chevron-up"></i>')
DEFAULT_SORT_DOWN = getattr(
settings, 'DEFAULT_SORT_DOWN',
'<i class="icon-chevron-down"></i>')
sort_directions = {
'asc': {'icon': DEFAULT_SORT_UP, 'inverse': 'desc'},
'desc': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
'': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
}
def anchor(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"anchor tag takes at least 1 argument")
try:
title = bits[2]
except IndexError:
title = bits[1].capitalize()
return SortAnchorNode(bits[1].strip(), title.strip())
class SortAnchorNode(template.Node):
def __init__(self, field, title):
self.field = template.Variable(field)
self.title = template.Variable(title)
def render(self, context):
field = self.field.resolve(context)
title = self.title.resolve(context)
request = context['request']
get_vars = request.GET.copy()
sort_field = get_vars.pop('sort', [None])[0]
icon = ''
if sort_field == field:
# We are already sorting on this field, so we set the inverse
# direction within the GET params that get used within the href.
direction = get_vars.pop('dir', [''])[0]
get_vars['dir'] = sort_directions[direction]['inverse']
icon = sort_directions[direction]['icon']
href = u'%s?sort=%s' % (request.path, field)
if len(get_vars) > 0:
href += "&%s" % get_vars.urlencode()
if icon:
title = u"%s %s" % (title, icon)
return u'<a href="%s">%s</a>' % (href, title)
anchor = register.tag(anchor)
| # This is a rewrite of django-sorting but with added support for i18n title
# strings.
# See https://github.com/directeur/django-sorting
from django import template
from django.conf import settings
register = template.Library()
DEFAULT_SORT_UP = getattr(settings, 'DEFAULT_SORT_UP', '↑')
DEFAULT_SORT_DOWN = getattr(settings, 'DEFAULT_SORT_DOWN', '↓')
sort_directions = {
'asc': {'icon': DEFAULT_SORT_UP, 'inverse': 'desc'},
'desc': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
'': {'icon': DEFAULT_SORT_DOWN, 'inverse': 'asc'},
}
def anchor(parser, token):
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"anchor tag takes at least 1 argument")
try:
title = bits[2]
except IndexError:
title = bits[1].capitalize()
return SortAnchorNode(bits[1].strip(), title.strip())
class SortAnchorNode(template.Node):
def __init__(self, field, title):
self.field = template.Variable(field)
self.title = template.Variable(title)
def render(self, context):
field = self.field.resolve(context)
title = self.title.resolve(context)
request = context['request']
get_vars = request.GET.copy()
sort_field = get_vars.pop('sort', [None])[0]
icon = ''
if sort_field == field:
# We are already sorting on this field, so we set the inverse
# direction within the GET params that get used within the href.
direction = get_vars.pop('dir', [''])[0]
get_vars['dir'] = sort_directions[direction]['inverse']
icon = sort_directions[direction]['icon']
href = u'%s?sort=%s' % (request.path, field)
if len(get_vars) > 0:
href += "&%s" % get_vars.urlencode()
if icon:
title = u"%s %s" % (title, icon)
return u'<a href="%s">%s</a>' % (href, title)
anchor = register.tag(anchor)
| Python | 0 |
38f28bd0e5d4ea5af69ac7ccc553403a85ac61be | add problem 053 | problem_053.py | problem_053.py | #!/usr/bin/env python
#-*-coding:utf-8-*-
'''
There are exactly ten ways of selecting three from five, 12345:
123, 124, 125, 134, 135, 145, 234, 235, 245, and 345
In combinatorics, we use the notation, 5C3 = 10.
In general,
nCr =
n!
r!(n−r)!
,where r ≤ n, n! = n×(n−1)×...×3×2×1, and 0! = 1.
It is not until n = 23, that a value exceeds one-million: 23C10 = 1144066.
How many, not necessarily distinct, values of nCr,
for 1 ≤ n ≤ 100, are greater than one-million?
'''
from math import factorial
import timeit
def calc(nlim, lim):
cnt = 0
for n in range(nlim+1):
for r in range(n+1):
ncr = factorial(n)/(factorial(r)*factorial(n-r))
if ncr > lim:
cnt += 1
return cnt
if __name__ == '__main__':
print calc(100, 1000000)
print timeit.Timer('problem_053.calc(100, 1000000)', 'import problem_053').timeit(1)
| Python | 0.000383 | |
ac2d5c10e7895515acd63e2ca91924e99ec17003 | add (Failing) test | test/test_writing.py | test/test_writing.py | import RMF
RMF.set_log_level("trace")
path = RMF._get_temporary_file_path("writing.rmf")
print path
fh = RMF.create_rmf_file(path)
fh.add_frame("frame", RMF.FRAME)
fn = fh.get_root_node().add_child("frag", RMF.REPRESENTATION)
pf = RMF.ParticleFactory(fh)
ff = RMF.FragmentFactory(fh)
pf.get(fn).set_radius(1.0)
pf.get(fn).set_mass(2.0)
pf.get(fn).set_coordinates([1,2,3])
ff.get(fn).set_indexes([1,2,3,4])
del fh
fh = RMF.open_rmf_file_read_only(path)
fh.set_current_frame(RMF.FrameID(0))
fn = fh.get_root_node().get_children()[0]
pf = RMF.ParticleFactory(fh)
assert(pf.get_is(fn))
| Python | 0.000033 | |
3c7b4f727f5f4a061e3e2d8bcabdc007175ab4db | Add cache structures from pycrest (as base work) | esipy/cache.py | esipy/cache.py | # -*- encoding: utf-8 -*-
import hashlib
import zlib
import os
try:
import pickle
except ImportError: # pragma: no cover
import cPickle as pickle
import logging
logger = logging.getLogger("esipy.cache")
class BaseCache(object):
""" Base cache 'abstract' object that defined
the cache methods used in esipy
"""
def put(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
def invalidate(self, key):
raise NotImplementedError
def _hash(self, data):
h = hashlib.new('md5')
h.update(pickle.dumps(data))
# prefix allows possibility of multiple applications
# sharing same keyspace
return 'pyc_' + h.hexdigest()
class FileCache(BaseCache):
""" BaseCache implementation using files to store the data.
This implementation is 'persistent' as data are stored on the
disc and not only in the memory
"""
def __init__(self, path):
self._cache = {}
self.path = path
if not os.path.isdir(self.path):
os.mkdir(self.path, 0o700)
def _getpath(self, key):
return os.path.join(self.path, self._hash(key) + '.cache')
def put(self, key, value):
with open(self._getpath(key), 'wb') as f:
f.write(
zlib.compress(
pickle.dumps(value,
pickle.HIGHEST_PROTOCOL)))
self._cache[key] = value
def get(self, key):
if key in self._cache:
return self._cache[key]
try:
with open(self._getpath(key), 'rb') as f:
return pickle.loads(zlib.decompress(f.read()))
except IOError as ex:
logger.debug('IOError: {0}'.format(ex))
if ex.errno == 2: # file does not exist (yet)
return None
else: # pragma: no cover
raise
def invalidate(self, key):
self._cache.pop(key, None)
try:
os.unlink(self._getpath(key))
except OSError as ex:
if ex.errno == 2: # does not exist
pass
else: # pragma: no cover
raise
class DictCache(BaseCache):
""" BaseCache implementation using Dict to store the cached data. """
def __init__(self):
self._dict = {}
def get(self, key):
return self._dict.get(key, None)
def put(self, key, value):
self._dict[key] = value
def invalidate(self, key):
self._dict.pop(key, None)
class DummyCache(BaseCache):
""" Base cache implementation that provide a fake cache that
allows a "no cache" use without breaking everything """
def __init__(self):
self._dict = {}
def get(self, key):
return None
def put(self, key, value):
pass
def invalidate(self, key):
pass
class MemcachedCache(BaseCache):
""" Base cache implementation for memcached. """
def __init__(self, memcache_client):
""" memcache_client must be an instance of memcache.Client().
"""
import memcache
if not isinstance(memcache_client, memcache.Client):
raise ValueError('cache must be an instance of memcache.Client')
self._mc = memcache_client
def get(self, key):
return self._mc.get(self._hash(key))
def put(self, key, value):
return self._mc.set(self._hash(key), value)
def invalidate(self, key):
return self._mc.delete(self._hash(key))
| Python | 0 | |
492d90e1197803f2dbce0b07417d12497c9031fe | Implement away-notify | txircd/modules/ircv3/awaynotify.py | txircd/modules/ircv3/awaynotify.py | from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class AwayNotify(ModuleData):
implements(IPlugin, IModuleData)
name = "AwayNotify"
def actions(self):
return [ ("usermetadataupdate", 10, self.sendAwayNotice),
("capabilitylist", 10, self.addCapability) ]
def load(self):
if "unloading-away-notify" in self.ircd.dataCache:
del self.ircd.dataCache["unloading-away-notify"]
return
if "cap-add" in self.ircd.functionCache:
self.ircd.functionCache["cap-add"]("away-notify")
def unload(self):
self.ircd.dataCache["unloading-away-notify"] = True
def fullUnload(self):
del self.ircd.dataCache["unloading-away-notify"]
if "cap-del" in self.ircd.functionCache:
self.ircd.functionCache["cap-del"]("away-notify")
def addCapability(self, capList):
capList.append("away-notify")
def sendAwayNotice(self, user, key, oldValue, value, visibility, setByUser, fromServer):
if key != "away":
return
if value:
for noticeUser in self.ircd.users.itervalues():
if "capabilities" in noticeUser.cache and "away-notify" in noticeUser.cache["capabilities"]:
noticeUser.sendMessage("AWAY", value, sourceuser=user)
else:
for noticeUser in self.ircd.users.itervalues():
if "capabilities" in noticeUser.cache and "away-notify" in noticeUser.cache["capabilities"]:
noticeUser.sendMessage("AWAY", sourceuser=user)
awayNotify = AwayNotify() | Python | 0.999988 | |
4d883c28053ff40802433d8ad8efe463e9c05638 | add tiny migration management command | cyder/management/commands/tiny_migrate.py | cyder/management/commands/tiny_migrate.py | #!/usr/bin/env python
from django.core.management.base import LabelCommand
from django.core.exceptions import ValidationError
from cyder.models import (Domain, TXT, AddressRecord, Nameserver, MX, CNAME,
SOA, PTR, SRV, StaticInterface, View)
import subprocess
"""
' txt
'fqdn:s:ttl:timestamp:lo
+ a record without ptr
+fqdn:ip:ttl:timestamp:lo
. nameserver + ar + soa
.fqdn:ip:x:ttl:timestamp:lo
: ?????
:fqdn:n:rdata:ttl:timestamp:lo
@ mx
@fqdn:ip:x:dist:ttl:timestamp:lo
C cname
Cfqdn:p:ttl:timestamp:lo
Z soa
Zfqdn:mname:rname:ser:ref:ret:exp:min:ttl:timestamp:lo
^ ptr
^fqdn:p:ttl:timestamp:lo
"""
public = View.objects.get(name="public")
private = View.objects.get(name="private")
def ensure_domain(name):
parts = name.split('.')
parts, dom = parts[:-1], parts[-1]
while parts:
Domain.objects.get_or_create(name=dom)
parts, dom = parts[:-1], ".".join([parts[-1], dom])
dom, _ = Domain.objects.get_or_create(name=dom)
return dom
def diglet(rdtype, target, ns='ns1.oregonstate.edu'):
cmd = "dig %s %s @%s +short +norecurse" % (rdtype, target, ns)
result = subprocess.check_output(cmd.split(' ')).strip()
return result or None
def get_label_domain(fqdn):
conflict_objects = [MX, StaticInterface, AddressRecord, CNAME]
label, domain_name = tuple(fqdn.split('.', 1))
objs = []
for obj_type in conflict_objects:
objs.extend(list(obj_type.objects
.filter(fqdn=domain_name).exclude(label="")))
if objs:
for obj in objs:
obj.label = "not_a_real_label_please_delete"
obj.save()
domain, _ = Domain.objects.get_or_create(name=domain_name)
for obj in objs:
obj.label = ""
obj.domain = domain
obj.save()
objs = []
for obj_type in conflict_objects:
objs.extend(list(obj_type.objects
.filter(fqdn=domain_name).exclude(label="")))
assert not objs
ensure_domain(domain_name)
domain, _ = Domain.objects.get_or_create(name=domain_name)
return label, domain
def tiny2txt(fqdn, s, ttl=3600):
ttl = int(ttl)
label, domain = get_label_domain(fqdn)
s = s.replace(r'\\072', ':')
txt, _ = TXT.objects.get_or_create(label=label, domain=domain,
txt_data=s, ttl=ttl)
return txt
def tiny2ar(fqdn, ip):
label, domain = get_label_domain(fqdn)
if AddressRecord.objects.filter(label=label, domain=domain,
ip_str=ip).exists():
print "AddressRecord %s already exists." % domain.name
return
ar, _ = AddressRecord.objects.get_or_create(label=label, domain=domain,
ip_str=ip)
return ar
def tiny2ns(fqdn, ip, x, ttl=86400, timestamp=None, lo=None):
ttl = int(ttl)
domain = ensure_domain(fqdn)
ns, _ = Nameserver.objects.get_or_create(domain=domain, server=x, ttl=ttl)
return ns
def tiny2wut(fqdn, n, rdata, ttl=86400):
n = int(n)
if n == 33:
digged = diglet('SRV', fqdn)
if not digged:
print "No SRV candidate for %s" % fqdn
return
priority, weight, port, target = digged.split(' ')
target = target.rstrip('.')
label, domain = get_label_domain(fqdn)
try:
srv, _ = SRV.objects.get_or_create(label=label, domain=domain,
target=target, port=port,
priority=priority, ttl=ttl,
weight=weight)
return srv
except ValidationError, e:
print "INVALID: %s for SRV %s" % (e, fqdn)
elif n == 28:
digged = diglet('AAAA', fqdn)
label, domain = get_label_domain(fqdn)
if AddressRecord.objects.filter(label=label, domain=domain,
ip_str=digged).exists():
print "AddressRecord %s already exists." % domain.name
return
ar, _ = AddressRecord.objects.get_or_create(label=label, domain=domain,
ip_str=digged, ttl=ttl,
ip_type='6')
return ar
else:
raise Exception("Unknown rdtype %s for %s" % (n, fqdn))
def tiny2mx(fqdn, ip, x, dist=5, ttl=600):
dist = int(dist)
ttl = int(ttl)
domain = ensure_domain(fqdn)
existing = MX.objects.filter(label="", domain=domain,
server=x, priority=dist)
if existing.exists():
return
mx = MX(label="", domain=domain, server=x, priority=dist, ttl=ttl)
mx.save()
return mx
def tiny2cname(fqdn, p):
label, domain = get_label_domain(fqdn)
cname, _ = CNAME.objects.get_or_create(label=label, domain=domain,
target=p)
return cname
def tiny2soa(fqdn, mname, rname, ser, ref=300, ret=900, exp=604800, _min=86400,
ttl=86400, timestamp=None, lo=None):
domain = ensure_domain(fqdn)
if SOA.objects.filter(root_domain=domain).exists():
print "SOA %s already exists." % domain.name
return
soa, _ = SOA.objects.get_or_create(root_domain=domain, primary=mname,
contact=rname, retry=ret, refresh=ref,
expire=exp, minimum=_min, ttl=ttl)
return soa
def tiny2ptr(fqdn, p, ttl=3600):
label, domain = get_label_domain(p)
for rdtype in ['A', 'AAAA']:
ip_type = '6' if rdtype == 'AAAA' else '4'
ip_str = diglet(rdtype, p)
if ip_str:
try:
ptr, _ = PTR.objects.get_or_create(label=label, domain=domain,
ip_str=ip_str,
ip_type=ip_type)
ptr.views.add(public)
ptr.views.add(private)
except ValidationError, e:
print "INVALID: %s for PTR %s" % (e, p)
else:
print "No %s candidate for PTR %s" % (rdtype, p)
class Command(LabelCommand):
def handle_label(self, label, **options):
tinyprefixes = {"'": tiny2txt,
"+": tiny2ar,
".": tiny2ns,
":": tiny2wut,
"@": tiny2mx,
"C": tiny2cname,
"Z": tiny2soa,
"^": tiny2ptr}
for line in open(label):
line = line.strip()
if not line or line[0] == '#':
continue
rdtype, line = line[0], line[1:]
if rdtype in tinyprefixes:
tiny2cyder = tinyprefixes[rdtype]
obj = tiny2cyder(*line.split(':'))
if obj and hasattr(obj, 'views'):
obj.views.add(public)
obj.views.add(private)
else:
raise Exception("Unknown prefix: %s" % rdtype)
| Python | 0.000001 | |
8fc45acd2b0447988e6dd18cc6de75c4316d60e5 | Create wally_test.py | run_tests/shaker_run/wally_test.py | run_tests/shaker_run/wally_test.py | import ConfigParser
import base64
import json
import urllib2
# Testrail API
class APIClient:
def __init__(self, base_url):
self.user = ''
self.password = ''
if not base_url.endswith('/'):
base_url += '/'
self.__url = base_url + 'index.php?/api/v2/'
def send_get(self, uri):
return self.__send_request('GET', uri, None)
def send_post(self, uri, data):
return self.__send_request('POST', uri, data)
def __send_request(self, method, uri, data):
url = self.__url + uri
request = urllib2.Request(url)
if (method == 'POST'):
request.add_data(json.dumps(data))
auth = base64.b64encode('%s:%s' % (self.user, self.password))
request.add_header('Authorization', 'Basic %s' % auth)
request.add_header('Content-Type', 'application/json')
e = None
try:
response = urllib2.urlopen(request).read()
except urllib2.HTTPError as e:
response = e.read()
if response:
result = json.loads(response)
else:
result = {}
if e != None:
if result and 'error' in result:
error = '"' + result['error'] + '"'
else:
error = 'No additional error message received'
raise APIError('TestRail API returned HTTP %s (%s)' %
(e.code, error))
return result
class APIError(Exception):
pass
client = APIClient('https://mirantis.testrail.com/')
client.user = 'sgudz@mirantis.com'
client.password = 'qwertY123'
run_id = 18466
root@node-4:~#
root@node-4:~#
root@node-4:~#
root@node-4:~# python wally.py
Name Random direct performance, 4 KiB blocks; Read, IOPS, id 11643665
root@node-4:~# vim wally.py
import ConfigParser
import base64
import json
import urllib2
# Testrail API
class APIClient:
def __init__(self, base_url):
self.user = ''
self.password = ''
if not base_url.endswith('/'):
base_url += '/'
self.__url = base_url + 'index.php?/api/v2/'
def send_get(self, uri):
return self.__send_request('GET', uri, None)
def send_post(self, uri, data):
return self.__send_request('POST', uri, data)
def __send_request(self, method, uri, data):
url = self.__url + uri
request = urllib2.Request(url)
if (method == 'POST'):
request.add_data(json.dumps(data))
auth = base64.b64encode('%s:%s' % (self.user, self.password))
request.add_header('Authorization', 'Basic %s' % auth)
request.add_header('Content-Type', 'application/json')
e = None
try:
response = urllib2.urlopen(request).read()
except urllib2.HTTPError as e:
response = e.read()
if response:
result = json.loads(response)
else:
result = {}
if e != None:
if result and 'error' in result:
error = '"' + result['error'] + '"'
else:
error = 'No additional error message received'
raise APIError('TestRail API returned HTTP %s (%s)' %
(e.code, error))
return result
class APIError(Exception):
pass
client = APIClient('https://mirantis.testrail.com/')
client.user = 'sgudz@mirantis.com'
client.password = 'qwertY123'
run_id = 18466
def get_tests_ids():
tests = client.send_get('get_tests/{}'.format(run_id))
tests_ids = []
test_names = {}
for item in tests:
tests_ids.append(item['id'])
test_names[item['title']] = item['id']
return test_names
list_t = get_tests_ids()
for item in list_t.keys():
if "4 KiB blocks; Read" in item:
test_4kib_read = list_t[item]
elif "4 KiB blocks; Write" in item:
test_4kib_write = list_t[item]
elif "16MiB blocks; Read":
test_16mib_read = list_t[item]
elif "16MiB blocks; Write":
test_16mib_write = list_t[item]
elif "latency 10ms" in item:
test_latency_10ms = list_t[item]
elif "latency 30ms" in item:
test_latency_30ms = list_t[item]
elif "latency 100ms" in item:
test_latency_100ms = list_t[item]
print test_4kib_read, test_4kib_write, test_16mib_read, test_16mib_write, test_latency_10ms, test_latency_30ms, test_latency_100ms
| Python | 0.000055 | |
f230b3bbc4946527dd6776ad9e8cf60831da4256 | cache module | brocade/cache.py | brocade/cache.py | # -*- coding: utf-8 -*-
""" KVSキャッシュモジュール """
class BaseCache(object):
""" ベースクラス """
def get(self, key, default = None):
""" 値の取得
@param key: キー
@param default: 取得できない場合のデフォルト値
@return: 取得した値
"""
raise NotImplementedError()
def set(self, key, value):
""" 値の設定
@param key: キー
@param value: 値
@return: キャッシュオブジェクト
"""
raise NotImplementedError()
def delete(self, key):
""" キーの削除
@param key: キー
@return: キャッシュオブジェクト
"""
raise NotImplementedError()
class DictCache(BaseCache):
""" 辞書によるインメモリキャッシュ
辞書オブジェクトに保存するのでオブジェクトが破棄されると内容も消えるが、インメモリでプロセス間通信等も行わないので極めて高速
"""
def __init__(self):
self.__cache = {}
def get(self, key, default = None):
return self.__cache.get(key, default)
def set(self, key, value):
self.__cache[key] = value
return self
def delete(self, key):
if key in self.__cache:
del self.__cache[key]
return self
class ChainCache(BaseCache):
""" 複数のキャッシュオブジェクトのチェイン
メモリ/ディスク/ネットワーク等、速度が異なる複数の保存先の中から高速なものを優先的に使いたい場合に有用
"""
def __init__(self, *cache_list):
""" コンストラクタ
@param cache_list: キャッシュオブジェクト一覧(先に指定したものがgetで優先的に使われる)
"""
self.__cache_list = cache_list
def get(self, key, default = None):
""" 値の取得(最初に見つかった値を返し、見つからなかったオブジェクトにはその値を入れる) """
cache_not_found = []
for cache in self.__cache_list:
value = cache.get(key, default)
if value != default:
# 見つかったらそれまでのオブジェクトに値を設定
self.__set(cache_not_found, key, value)
return value
cache_not_found.append(cache)
return default
def set(self, key, value):
""" 全てのキャッシュオブジェクトに値を設定 """
self.__set(self.__cache_list, key, value)
return self
def delete(self, key):
""" 全てのキャッシュオブジェクトから値を削除 """
self.__delete(self.__cache_list, key)
return self
@staticmethod
def __set(cache_list, key, value):
""" 指定のキャッシュオブジェクトに値を設定
@param cache_list: キャッシュオブジェクト一覧
@param key: キー
@param value: 値
"""
for cache in cache_list:
cache.set(key, value)
@staticmethod
def __delete(cache_list, key):
""" 指定のキャッシュオブジェクトから値を削除
@param cache_list: キャッシュオブジェクト一覧
@param key: キー
"""
for cache in cache_list:
cache.delete(key)
def test():
""" テスト """
# 辞書キャッシュのテスト
dict_cache1 = DictCache()
dict_cache1.set("a", 1)
assert dict_cache1.get("a") == 1
assert dict_cache1.get("b") == None
# チェインキャッシュのテスト
dict_cache0 = DictCache()
dict_cache0.set("b", 2)
chain_cache = ChainCache(dict_cache0, dict_cache1)
assert dict_cache0.get("a") == None
assert chain_cache.get("a") == 1
assert dict_cache0.get("a") == 1
assert chain_cache.get("b") == 2
assert dict_cache1.get("b") == None
print("OK")
if __name__ == "__main__":
test()
| Python | 0.999508 | |
a6ca9fdb71eacffe94fad476712650f82870bb2e | Add base code for choosing solver | pyoommf/sim.py | pyoommf/sim.py | import os
from drivers.llg import LLG
import oommfmif as o
class Sim(object):
def __init__(self, mesh, Ms, name=None):
self.mesh = mesh
self.Ms = Ms
self.name = name
self.gamma = 2.21e5
self.energies = []
self.N_Sims_Run = 0
# Want some kind of persistent 'knowledge' of number of runs
# and the sequence these occur in for data analysis
# when we call a simulation multiple times to either
# advance time or change parameters. Will need to think carefully
# about situations such as changing H_applied - should we recreate this
# data from the output files?
# Advantage of this is recreating sim object if needed.
def add(self, energy):
self.energies.append(energy)
def set_solver(self, solver='rk4'):
"""
Available solvers in OOMMF:
rk2, rk2heun, rk4, rkf54, rkf54m, rkf54s
"""
def set_m(self, m_init):
self.m_init = m_init
def create_mif(self, overwrite=True):
if self.name is None:
self.name = 'unnamed'
self.mif_filename = self.name + '_iteration' + \
str(self.N_Sims_Run) + '.mif'
if os.path.isfile(self.mif_filename):
print("DEBUG: This simulation name already exists.")
print("DEBUG: Overwriting MIF.")
mif_file = open(self.mif_filename, 'w')
mif_file.write('# MIF 2.1\n\n')
mif_file.write(self.mesh.atlas_mif())
mif_file.write(self.mesh.mesh_mif())
for energy in self.energies:
mif_file.write(energy.get_mif())
mif_file.write(self.llg.get_mif())
mif_file.write('Destination mags mmArchive\n\n')
mif_file.write(
'Schedule Oxs_TimeDriver::Magnetization mags Stage 1\n\n')
mif_file.close()
def run_until(self, t, alpha=0.1, gamma=2.21e5):
self.llg = LLG(t, self.m_init, self.Ms, alpha, gamma, name=self.name)
self.create_mif()
self.execute_mif()
def execute_mif(self):
path = o.retrieve_oommf_path()
executable = o.retrieve_oommf_executable(path)
process = o.call_oommf('boxsi ' + self.mif_filename)
process.wait()
| import os
from drivers.llg import LLG
import oommfmif as o
class Sim(object):
def __init__(self, mesh, Ms, name=None):
self.mesh = mesh
self.Ms = Ms
self.name = name
self.gamma = 2.21e5
self.energies = []
self.N_Sims_Run = 0
# Want some kind of persistent 'knowledge' of number of runs
# and the sequence these occur in for data analysis
# when we call a simulation multiple times to either
# advance time or change parameters. Will need to think carefully
# about situations such as changing H_applied - should we recreate this
# data from the output files?
# Advantage of this is recreating sim object if needed.
def add(self, energy):
self.energies.append(energy)
def set_m(self, m_init):
self.m_init = m_init
def create_mif(self, overwrite=True):
if self.name is None:
self.name = 'unnamed'
self.mif_filename = self.name + '_iteration' + \
str(self.N_Sims_Run) + '.mif'
if os.path.isfile(self.mif_filename):
print("DEBUG: This simulation name already exists.")
print("DEBUG: Overwriting MIF.")
mif_file = open(self.mif_filename, 'w')
mif_file.write('# MIF 2.1\n\n')
mif_file.write(self.mesh.atlas_mif())
mif_file.write(self.mesh.mesh_mif())
for energy in self.energies:
mif_file.write(energy.get_mif())
mif_file.write(self.llg.get_mif())
mif_file.write('Destination mags mmArchive\n\n')
mif_file.write(
'Schedule Oxs_TimeDriver::Magnetization mags Stage 1\n\n')
mif_file.close()
def run_until(self, t, alpha=0.1, gamma=2.21e5):
self.llg = LLG(t, self.m_init, self.Ms, alpha, gamma, name=self.name)
self.create_mif()
self.execute_mif()
def execute_mif(self):
path = o.retrieve_oommf_path()
executable = o.retrieve_oommf_executable(path)
process = o.call_oommf('boxsi ' + self.mif_filename)
process.wait()
| Python | 0.000001 |
cc78dc401b16ff189b86466e3c0cb4609a72af0d | add tester | batch/depute/test.py | batch/depute/test.py | #!/usr/bin/env python
import os, sys, json
split = False
splitval = False
if len(sys.argv) > 1:
field = sys.argv[1]
if len(sys.argv) > 2:
split = True
if len(sys.argv) > 3:
splitval = int(sys.argv[3])
else:
field = "all"
values = {}
def add_value(val):
if split and ' / ' in val:
for i,v in enumerate(val.split(' / ')):
if type(splitval) != int or splitval == i:
add_value(v)
return
if val not in values:
values[val] = 0
values[val] += 1
MISSING = []
for dep in os.listdir('out'):
with open(os.path.join('out', dep)) as f:
data = json.load(f)
if field == "all":
for k in data:
if data[k] and (type(data[k]) != list or data[k] != [""]):
add_value(k)
continue
if field in data:
if type(data[field]) == list:
if data[field] == [""]:
MISSING.append(data["id_institution"])
for i in data[field]:
if i:
add_value(i)
else: add_value(data[field])
else: MISSING.append(data["id_institution"])
miss = len(MISSING)
if miss <= 3 and max(values.values()) == 1:
print "ALL UNIQUE FIELDS (", len(values), ")"
sys.exit(0)
if miss > 3:
print miss, "MISSING:", MISSING
order = sorted(values, key=lambda x: values[x])
order.reverse()
for k in order:
print k.encode('utf-8'), ":", values[k]
| Python | 0.000001 | |
b674a3e4de86728139e97bb02fa90a62a7700c31 | add speech processing test | testing/miniBrain.py | testing/miniBrain.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
import sys
import time
from os.path import dirname, abspath
sys.path.append(dirname(dirname(abspath(__file__))))
reload(sys)
sys.setdefaultencoding('utf-8')
from EmeraldAI.Pipelines.InputProcessing.ProcessInput import ProcessInput
from EmeraldAI.Pipelines.ScopeAnalyzer.AnalyzeScope import AnalyzeScope
from EmeraldAI.Pipelines.ResponseProcessing.ProcessResponse import ProcessResponse
from EmeraldAI.Pipelines.TextToSpeech.TTS import TTS
from EmeraldAI.Pipelines.Trainer.Trainer import Trainer
from EmeraldAI.Entities.User import User
from EmeraldAI.Entities.Context import Context
from EmeraldAI.Entities.PipelineArgs import PipelineArgs
from EmeraldAI.Config.Config import *
from EmeraldAI.Logic.Audio.SoundMixer import *
def ProcessSpeech(data):
print "ProcessSpeech - Go"
cancelSpeech = False
stopwordList = Config().GetList("Bot", "StoppwordList")
if(data in stopwordList):
cancelSpeech = True
SoundMixer().Stop()
print "ProcessSpeech - No Stopword"
pipelineArgs = PipelineArgs(data)
print "ProcessSpeech - Pipeline Args Created"
pipelineArgs = ProcessInput().ProcessAsync(pipelineArgs)
print "ProcessSpeech - Process Async completed"
pipelineArgs = AnalyzeScope().Process(pipelineArgs)
print "ProcessSpeech - Scope analyzed"
pipelineArgs = ProcessResponse().Process(pipelineArgs)
print "ProcessSpeech - Response processed"
if(not cancelSpeech):
if(pipelineArgs.Animation != None):
print "There should have been an animation", pipelineArgs.Animation
pipelineArgs = TTS().Process(pipelineArgs)
print "ProcessSpeech - TTS Triggered"
trainerResult = Trainer().Process(pipelineArgs)
print "ProcessSpeech - Trainer Done"
Context().History.append(pipelineArgs)
print "Pipeline Args", pipelineArgs.toJSON()
print "Main User", User().toJSON()
print "Trainer Result: ", trainerResult
print "Input: ", data
print "Response: ", pipelineArgs.Response
while SoundMixer().IsPlaying():
time.sleep(1)
print "Set user..."
User().SetUserByCVTag("Max")
print "Start Speech processing"
ProcessSpeech("Warmup")
#ProcessSpeech("Guten Abend")
#ProcessSpeech("Wer ist Angela Merkel")
ProcessSpeech("Wieviel ist 432 plus 68")
ProcessSpeech("Wieviel ist 4 + 32 / 6")
#ProcessSpeech("Bist du ein Mensch")
#ProcessSpeech("TRIGGER_FACEAPP_OFF")
#ProcessSpeech("Was ist eine Süßkartoffel")
exit()
ProcessSpeech("xxx")
ProcessSpeech("xxx")
ProcessSpeech("xxx")
| Python | 0.000001 | |
0942d6bcc1d15b16b4d3170a1574fc5218b2c53b | add python_tips.py | python_tips.py | python_tips.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import argparse
import subprocess
# print 尽量用函数,不用语句
print('Hello World')
# do not use
print 'Hello World'
def subprocess_test():
# 执行子进程获得输出内容,尽量用 subprocess吧
text = os.popen('echo 123').read()
print(text)
# subprocess
# 父进程等待子进程完成
subprocess.call(['ls', '-l'])
return_code = subprocess.call('echo subprocess.call', shell=True)
print(return_code)
# Popen 不等待子进程完成,需调用 wait() 等待完成
child = subprocess.Popen(['ls', '-l'])
child.wait()
'''
child.poll() # 检查子进程状态
child.kill() # 终止子进程
child.send_signal() # 向子进程发送信号
child.terminate() # 终止子进程
'''
print('parent process')
# communicate
# 尽量不要用 shell=True
child1 = subprocess.Popen(['ls', '-l'], stdout=subprocess.PIPE)
child2 = subprocess.Popen(['wc'], stdin=child1.stdout, stdout=subprocess.PIPE)
out = child2.communicate()
print('wc:', out)
def os_file_demo():
# 遍历目录文件
root = '.'
for f in os.listdir(root):
path = os.path.join(root, f)
print(path)
# 分割扩展名 ext
print(os.path.splitext(os.path.basename('/a/b/c.txt')))
def main():
subprocess_test()
os_file_demo()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='desc')
# 参数
# parser.add_argument('wan', help='eth0')
# parser.add_argument('lan', help='wlan0')
# 选项
parser.add_argument('-v', dest='version', default=None, help='version')
parser.add_argument('-u', dest='uin', default=None, help='uin')
args = parser.parse_args()
#print(args.wan, args.lan, args.version, args.uin)
main()
| Python | 0.000419 | |
842c796a223ee9cb78c69ccb59416a2afe0fcee0 | Add tests for Permission class. | tests/permissions.py | tests/permissions.py | import unittest
from permission import Permission, PERMISSION_DELIMITER
class BasicPermissionTests(unittest.TestCase):
def setUp(self):
self.p1 = Permission("test{0}1{0}hello".format(PERMISSION_DELIMITER))
self.p2 = Permission("test{0}2{0}hello".format(PERMISSION_DELIMITER))
self.p3 = Permission("test")
self.p4 = Permission("test{0}1{0}hello".format(PERMISSION_DELIMITER))
self.ps1 = {self.p1, self.p2}
self.ps2 = {self.p1, self.p4}
self.ps3 = {self.p1}
def test_equal(self):
self.assertEqual(self.p1, self.p4)
self.assertNotEqual(self.p1, self.p2)
self.assertNotEqual(self.p1, self.p3)
self.assertEqual(self.ps2, self.ps3)
def test_grants_permission(self):
self.assertTrue(self.p1.grants_permission(self.p1))
self.assertTrue(self.p1.grants_permission(self.p4))
self.assertFalse(self.p1.grants_permission(self.p2))
self.assertFalse(self.p1.grants_permission(self.p3))
self.assertFalse(self.p3.grants_permission(self.p1))
def test_grants_any_permission(self):
self.assertTrue(self.p1.grants_any_permission(self.ps1))
self.assertTrue(self.p2.grants_any_permission(self.ps1))
self.assertFalse(self.p3.grants_any_permission(self.ps1))
self.assertTrue(self.p4.grants_any_permission(self.ps1))
def test_segments(self):
self.assertEqual(self.p1.segments, ["test", "1", "hello"])
self.assertEqual(self.p2.segments, ["test", "2", "hello"])
self.assertEqual(self.p3.segments, ["test"])
self.assertEqual(self.p1.segments, self.p4.segments)
if __name__ == "__main__":
unittest.main()
| Python | 0 | |
dbd453155797829bc1acc472a8939f1dba15f37c | Add very basic tests | tests/spoke-tests.py | tests/spoke-tests.py | import spoke
import unittest2
from datetime import datetime
CUSTOMER_NAME = 'abc123'
CUSTOMER_KEY = 'abc123'
FAUX_ADDRESS = '123 Fake St'
FAUX_CITY = 'Funkytown'
FAUX_FIRST_NAME = 'Xavier'
FAUX_LAST_NAME = 'Ample'
FAUXN_NUMBER = '555 555 5555'
FAUX_ZIP = '12345'
FAUX_STATE = 'IL'
class SpokeTests(unittest2.TestCase):
def test_constructor_required_fields(self):
params = dict(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
)
for k in params.iterkeys():
copy = params.copy()
del copy[k]
self.assertRaises(spoke.ValidationError, spoke.Spoke, **copy)
def test_constructor_optional_fields(self):
spoke.Spoke(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
Logo = spoke.Image(
ImageType = 'jpg',
Url = 'file:///tmp/test.jpg',
),
)
def test_constructor_extra_fields(self):
self.assertRaises(spoke.ValidationError, spoke.Spoke,
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
Extra = 17,
)
def test_new_required_fields(self):
sp = spoke.Spoke(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
)
params = dict(
Cases = [dict(
CaseId = 1234,
CaseType = 'iph4tough',
PrintImage = dict(
ImageType = 'jpg',
Url = 'http://threadless.com/nothing.jpg',
),
Quantity = 1,
)],
OrderId = 2,
OrderInfo = dict(
Address1 = FAUX_ADDRESS,
City = FAUX_CITY,
CountryCode = 'US',
FirstName = FAUX_FIRST_NAME,
LastName = FAUX_LAST_NAME,
OrderDate = datetime.now(),
PhoneNumber = FAUXN_NUMBER,
PostalCode = FAUX_ZIP,
State = FAUX_STATE,
),
ShippingMethod = 'FirstClass',
)
for k in params.iterkeys():
copy = params.copy()
del copy[k]
self.assertRaises(spoke.ValidationError, spoke.Spoke, **copy)
def test_new_optional_fields(self):
sp = spoke.Spoke(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
)
sp.new(
Cases = [dict(
CaseId = 1234,
CaseType = 'iph4tough',
PrintImage = dict(
ImageType = 'jpg',
Url = 'http://threadless.com/nothing.jpg',
),
Quantity = 1,
)],
OrderId = 2,
OrderInfo = dict(
Address1 = FAUX_ADDRESS,
City = FAUX_CITY,
CountryCode = 'US',
FirstName = FAUX_FIRST_NAME,
LastName = FAUX_LAST_NAME,
OrderDate = datetime.now(),
PhoneNumber = FAUXN_NUMBER,
PostalCode = FAUX_ZIP,
State = FAUX_STATE,
),
ShippingMethod = 'FirstClass',
PackSlip = spoke.Image(
ImageType = 'jpg',
Url = 'file:///tmp/nothing.jpg',
),
Comments = [dict(
Type = 'Printer',
CommentText = 'testing',
)]
)
def test_new_extra_fields(self):
sp = spoke.Spoke(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
)
self.assertRaises(spoke.ValidationError, sp.new,
Cases = [dict(
CaseId = 1234,
CaseType = 'iph4tough',
PrintImage = dict(
ImageType = 'jpg',
Url = 'http://threadless.com/nothing.jpg',
),
Quantity = 1,
)],
OrderId = 2,
OrderInfo = dict(
Address1 = FAUX_ADDRESS,
City = FAUX_CITY,
CountryCode = 'US',
FirstName = FAUX_FIRST_NAME,
LastName = FAUX_LAST_NAME,
OrderDate = datetime.now(),
PhoneNumber = FAUXN_NUMBER,
PostalCode = FAUX_ZIP,
State = FAUX_STATE,
),
ShippingMethod = 'FirstClass',
PackSlip = spoke.Image(
ImageType = 'jpg',
Url = 'file:///tmp/nothing.jpg',
),
Comments = [dict(
Type = 'Printer',
CommentText = 'testing',
)],
Extra = 'foo',
)
def test_update_required_fields(self):
sp = spoke.Spoke(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
)
params = dict(
OrderId = 1,
OrderInfo = dict(
Address1 = FAUX_ADDRESS,
City = FAUX_CITY,
CountryCode = 'US',
FirstName = FAUX_FIRST_NAME,
LastName = FAUX_LAST_NAME,
OrderDate = datetime.now(),
PhoneNumber = FAUXN_NUMBER,
PostalCode = FAUX_ZIP,
State = FAUX_STATE,
),
)
for k in params.iterkeys():
copy = params.copy()
del copy[k]
self.assertRaises(spoke.ValidationError, sp.update, **copy)
def test_update_extra_fields(self):
sp = spoke.Spoke(
Customer = CUSTOMER_NAME,
Key = CUSTOMER_KEY,
production = False,
)
self.assertRaises(spoke.ValidationError, sp.update,
OrderId = 1,
OrderInfo = dict(
Address1 = FAUX_ADDRESS,
City = FAUX_CITY,
CountryCode = 'US',
FirstName = FAUX_FIRST_NAME,
LastName = FAUX_LAST_NAME,
OrderDate = datetime.now(),
PhoneNumber = FAUXN_NUMBER,
PostalCode = FAUX_ZIP,
State = FAUX_STATE,
),
Extra = 'foo',
)
| Python | 0 | |
3bb78837f26e1a16ba5261cf14b0b76f55e67a1c | Add test cases for columns | tests/test_column.py | tests/test_column.py | import cygroonga as grn
import os
import shutil
import tempfile
def test_create_column():
work_dir = tempfile.mkdtemp()
try:
with grn.Groonga():
with grn.Context() as ctx:
db_path = os.path.join(work_dir, "test.db")
ctx.create_database(db_path)
table1_name = "Table1"
table1 = ctx.create_table(table1_name,
grn.OBJ_TABLE_HASH_KEY | grn.OBJ_PERSISTENT,
ctx.at(grn.DB_SHORT_TEXT))
column1_name = "column1"
column1_path = db_path + "." + column1_name
column1 = table1.create_column(column1_name,
grn.OBJ_PERSISTENT | grn.OBJ_COLUMN_SCALAR,
ctx.at(grn.DB_TEXT),
path=column1_path)
assert column1.name() == table1_name + "." + column1_name
assert column1.path() == column1_path
assert os.path.isfile(column1_path)
finally:
shutil.rmtree(work_dir)
def test_open_column():
work_dir = tempfile.mkdtemp()
try:
with grn.Groonga():
with grn.Context() as ctx:
db_path = os.path.join(work_dir, "test.db")
ctx.create_database(db_path)
table1_name = "Table1"
table1 = ctx.create_table(table1_name,
grn.OBJ_TABLE_HASH_KEY | grn.OBJ_PERSISTENT,
ctx.at(grn.DB_SHORT_TEXT))
column1_name = "column1"
column1_path = db_path + "." + column1_name
column1 = table1.create_column(column1_name,
grn.OBJ_PERSISTENT | grn.OBJ_COLUMN_SCALAR,
ctx.at(grn.DB_TEXT),
path=column1_path)
assert column1.name() == table1_name + "." + column1_name
assert column1.path() == column1_path
assert os.path.isfile(column1_path)
column1.close()
assert os.path.isfile(column1_path)
column1 = table1.column(column1_name)
assert column1.name() == table1_name + "." + column1_name
assert column1.path() == column1_path
assert os.path.isfile(column1_path)
finally:
shutil.rmtree(work_dir)
def test_remove_column():
work_dir = tempfile.mkdtemp()
try:
with grn.Groonga():
with grn.Context() as ctx:
db_path = os.path.join(work_dir, "test.db")
ctx.create_database(db_path)
table1_name = "Table1"
table1 = ctx.create_table(table1_name,
grn.OBJ_TABLE_HASH_KEY | grn.OBJ_PERSISTENT,
ctx.at(grn.DB_SHORT_TEXT))
column1_name = "column1"
column1_path = db_path + "." + column1_name
column1 = table1.create_column(column1_name,
grn.OBJ_PERSISTENT | grn.OBJ_COLUMN_SCALAR,
ctx.at(grn.DB_TEXT),
path=column1_path)
assert column1.name() == table1_name + "." + column1_name
assert column1.path() == column1_path
assert os.path.isfile(column1_path)
column1.remove()
assert not os.path.isfile(column1_path)
finally:
shutil.rmtree(work_dir)
def test_open_non_existent_column():
work_dir = tempfile.mkdtemp()
try:
with grn.Groonga():
with grn.Context() as ctx:
db_path = os.path.join(work_dir, "test.db")
ctx.create_database(db_path)
table1_name = "Table1"
table1 = ctx.create_table(table1_name,
grn.OBJ_TABLE_HASH_KEY | grn.OBJ_PERSISTENT,
ctx.at(grn.DB_SHORT_TEXT))
column1_name = "column1"
column1 = table1.column(column1_name)
assert not column1
finally:
shutil.rmtree(work_dir)
| Python | 0.000003 | |
cb7d205add1d6e114277e596b2023c755dd1ff19 | add an example unit test | tests/test_docker.py | tests/test_docker.py | """ run with
python setup.py install; pip install . ; nosetests -v --nocapture tests/docker/test_docker.py
python setup.py install; pip install . ; nosetests -v --nocapture tests/docker/test_docker.py:Test_docker.test_001
nosetests -v --nocapture tests/cm_basic/test_var.py
or
nosetests -v tests/cm_basic/test_var.py
"""
from cloudmesh_client.common.Shell import Shell
from cloudmesh_client.common.util import HEADING
from cloudmesh_client.var import Var
def run(command):
print(command)
parameter = command.split(" ")
shell_command = parameter[0]
args = parameter[1:]
result = Shell.execute(shell_command, args)
print(result)
return result
# noinspection PyMethodMayBeStatic,PyPep8Naming
class Test_docker(object):
"""
"""
def setup(self):
pass
def test_003(self):
HEADING("list docker images")
result = run("cms docker image list")
print(result)
assert "cms" in result # need to make real assertion
def test_004(self):
HEADING("list docker images")
result = run("cms docker container list")
print(result)
assert "cms" in result # need to make real assertion
def test_005(self):
HEADING("list docker images")
result = run("cms docker network list")
print(result)
assert "cms" in result # need to make real assertion
| Python | 0 | |
4e0476fa83d0832c328abf00b5167887a0af3fe6 | Add tests for hashes | tests/test_hashes.py | tests/test_hashes.py | from webhooks.hashes import placebo_hash_function, basic_hash_function
def test_placebo():
assert placebo_hash_function() == ""
def test_basic():
hashes = set([basic_hash_function() for x in range(30)])
assert len(hashes) == 30
| Python | 0.000001 | |
ef8ca51dbd9b93a801a4a87be3c04f2c56cdef5a | test for call to enqueue passing | tests/test_server.py | tests/test_server.py | import hashlib
import json
from unittest.mock import Mock
from unittest.mock import ANY
from queue_functions import do_work
from server import handle_post
from uploaders.s3 import get_url
from uploaders.s3 import upload
def test_post():
q = Mock()
filename = 'afakefilename'
files = {'file': [{'body': b'a-fake-file-body', 'filename': filename}]}
hash_object = hashlib.md5(filename.encode())
audio_filename = hash_object.hexdigest() + "-" + filename
analysis_filename = audio_filename + '.analysis.json'
result = json.dumps({'analysis': get_url(analysis_filename), 'audio': get_url(audio_filename)})
# also need to check that we call upload
assert result == handle_post(q, files, get_url, upload) # wait,this seems to fail half the time, wtf?
q.enqueue.assert_called_with(do_work, (ANY, audio_filename, analysis_filename, upload))
| Python | 0 | |
b73e125fdcb12649e79aa2e108dcc019d9fffeb0 | add strtol test | tests/test_strtol.py | tests/test_strtol.py | import nose
import angr
import subprocess
import logging
l = logging.getLogger('angr.tests.strtol')
import os
test_location = str(os.path.dirname(os.path.realpath(__file__)))
def test_strtol():
b = angr.Project(os.path.join(test_location, "../../binaries/tests/x86_64/strtol_test"))
pg = b.factory.path_group(immutable=False)
# find the end of main
expected_outputs = {"base 8 worked\n", "base +8 worked\n", "0x worked\n", "+0x worked\n", "base +10 worked\n",
"base 10 worked\n", "base -8 worked\n", "-0x worked\n", "base -10 worked\n", "Nope\n"}
pg.explore(find=0x400804, num_find=len(expected_outputs))
# check the outputs
pipe = subprocess.PIPE
for f in pg.found:
test_input = f.state.posix.dumps(0)
test_output = f.state.posix.dumps(1)
expected_outputs.remove(test_output)
# check the output works as expected
p = subprocess.Popen("./test2", stdout=pipe, stderr=pipe, stdin=pipe)
ret = p.communicate(test_input)[0]
nose.tools.assert_equal(ret, test_output)
# check that all of the outputs were seen
nose.tools.assert_equal(len(expected_outputs), 0)
if __name__ == "__main__":
test_strtol()
| Python | 0.000043 | |
f5d2b17371dbd974820b9b8ab1fcdb11ad8fa646 | Add in script to count duplicates. | backend/scripts/countdups.py | backend/scripts/countdups.py | #!/usr/bin/env python
import rethinkdb as r
conn = r.connect('localhost', 30815, db='materialscommons')
rql = r.table('datafiles').filter(r.row['usesid'].match("^[0-9a-f]")).pluck('size')
total_bytes = 0
total_files = 0
for doc in rql.run(conn):
total_bytes = total_bytes + doc['size']
total_files = total_files + 1
print "Total bytes = %s for %d dups" %(format(total_bytes, ",d"), total_files)
| Python | 0 | |
23b9274f2dca42df5c838bc6b437f0408799b4dc | add missing file | bambou/nurest_root_object.py | bambou/nurest_root_object.py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import json
from .nurest_connection import HTTP_METHOD_PUT, HTTP_METHOD_GET
from .nurest_request import NURESTRequest
from .nurest_object import NURESTObject
from .nurest_session import _NURESTSessionCurrentContext
from .utils import Sha1
class NURESTRootObject(NURESTObject):
""" NURESTRootObject defines a user that can log in.
Only one NURESTRootObject can be connected at a time.
"""
__default_root_object = None
def __init__(self):
""" Initializes user """
super(NURESTRootObject, self).__init__()
self._api_url = None
self._new_password = None
self._user_name = None
self._password = None
self._api_key = None
self.expose_attribute(local_name='user_name', remote_name='userName', attribute_type=str)
self.expose_attribute(local_name='password', attribute_type=str)
self.expose_attribute(local_name='api_key', remote_name='APIKey', attribute_type=str)
# Properties
@property
def user_name(self):
""" Get user_name """
return self._user_name
@user_name.setter
def user_name(self, user_name):
""" Set user_name """
self._user_name = user_name
@property
def password(self):
""" Get password """
return self._password
@password.setter
def password(self, password):
""" Set password """
self._password = password
@property
def api_key(self):
""" Get API Key """
return self._api_key
@api_key.setter
def api_key(self, api_key):
""" Set API Key """
self._api_key = api_key
# Class Methods
@classmethod
def get_default_root_object(cls):
""" Get default user """
if not cls.__default_root_object:
NURESTRootObject.__default_root_object = cls()
return NURESTRootObject.__default_root_object
# Methods
def prepare_change_password(self, new_password):
""" Prepares password modification """
self._new_password = new_password
def save(self, async=False, callback=None):
""" Updates the user and perform the callback method """
if self._new_password:
self.password = Sha1.encrypt(self._new_password)
controller = _NURESTSessionCurrentContext.session.login_controller
controller.password = self._new_password
controller.api_key = None
data = json.dumps(self.to_dict())
request = NURESTRequest(method=HTTP_METHOD_PUT, url=self.get_resource_url(), data=data)
if async:
return self.send_request(request=request, async=async, local_callback=self._did_save, remote_callback=callback)
else:
connection = self.send_request(request=request)
return self._did_save(connection)
def _did_save(self, connection):
""" Launched when save has been successfully executed """
self._new_password = None
controller = _NURESTSessionCurrentContext.session.login_controller
controller.password = None
controller.api_key = self.api_key
if connection.async:
callback = connection.callbacks['remote']
if connection.user_info:
callback(connection.user_info, connection)
else:
callback(self, connection)
else:
return (self, connection)
def fetch(self, async=False, callback=None):
""" Fetch all information about the current object
Args:
async (bool): Boolean to make an asynchronous call. Default is False
callback (function): Callback method that will be triggered in case of asynchronous call
Returns:
tuple: (current_fetcher, callee_parent, fetched_bjects, connection)
Example:
>>> entity = NUEntity(id="xxx-xxx-xxx-xxx")
>>> entity.fetch() # will get the entity with id "xxx-xxx-xxx-xxx"
>>> print entity.name
"My Entity"
"""
request = NURESTRequest(method=HTTP_METHOD_GET, url=self.get_resource_url())
if async:
return self.send_request(request=request, async=async, local_callback=self._did_fetch, remote_callback=callback)
else:
connection = self.send_request(request=request)
return self._did_retrieve(connection)
| Python | 0.000003 | |
8488e7c5245758e4651e6d723f93d52f3ff54d73 | Add tool for submitting jobs to AreWeCompressedYet | tools/submit_awcy.py | tools/submit_awcy.py | #!/usr/bin/env python
from __future__ import print_function
import requests
import argparse
import os
import subprocess
import sys
if 'DAALA_ROOT' not in os.environ:
print("Please specify the DAALA_ROOT environment variable to use this tool.")
sys.exit(1)
keyfile = open('secret_key','r')
key = keyfile.read().strip()
daala_root = os.environ['DAALA_ROOT']
os.chdir(daala_root)
branch = subprocess.check_output('git symbolic-ref -q --short HEAD',shell=True).strip()
parser = argparse.ArgumentParser(description='Submit test to arewecompressedyet.com')
parser.add_argument('-prefix',default=branch)
args = parser.parse_args()
commit = subprocess.check_output('git rev-parse HEAD',shell=True).strip()
short = subprocess.check_output('git rev-parse --short HEAD',shell=True).strip()
date = subprocess.check_output(['git','show','-s','--format=%ci',commit]).strip()
date_short = date.split()[0];
user = args.prefix
run_id = user+'-'+date_short+'-'+short
print('Creating run '+run_id)
r = requests.post("https://arewecompressedyet.com/submit/job", {'run_id': run_id, 'commit': commit, 'key': key})
print(r)
| Python | 0 | |
0c145918d0f34bee1193eeaa0488eb369f0e843e | Use item_lookup_field for DELETE methods | eve/methods/delete.py | eve/methods/delete.py | # -*- coding: utf-8 -*-
"""
eve.methods.delete
~~~~~~~~~~~~~~~~~~
This module imlements the DELETE method.
:copyright: (c) 2013 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from flask import current_app as app, abort
from eve.utils import config
from eve.auth import requires_auth
from eve.methods.common import get_document, ratelimit
@ratelimit()
@requires_auth('item')
def delete(resource, **lookup):
"""Deletes a resource item. Deletion will occur only if request ETag
matches the current representation of the item.
:param resource: name of the resource to which the item(s) belong.
:param **lookup: item lookup query.
.. versionchanged:: 0.0.7
Support for Rate-Limiting.
.. versionchanged:: 0.0.5
Pass current resource to ``parse_request``, allowing for proper
processing of new configuration settings: `filters`, `sorting`, `paging`.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
"""
original = get_document(resource, **lookup)
if not original:
abort(404)
app.data.remove(resource, original[config.ID_FIELD])
return {}, None, None, 200
@requires_auth('resource')
def delete_resource(resource):
"""Deletes all item of a resource (collection in MongoDB terms). Won't drop
indexes. Use with caution!
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionadded:: 0.0.2
"""
app.data.remove(resource)
return {}, None, None, 200
| # -*- coding: utf-8 -*-
"""
eve.methods.delete
~~~~~~~~~~~~~~~~~~
This module imlements the DELETE method.
:copyright: (c) 2013 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
from flask import current_app as app, abort
from eve.utils import config
from eve.auth import requires_auth
from eve.methods.common import get_document, ratelimit
@ratelimit()
@requires_auth('item')
def delete(resource, **lookup):
"""Deletes a resource item. Deletion will occur only if request ETag
matches the current representation of the item.
:param resource: name of the resource to which the item(s) belong.
:param **lookup: item lookup query.
.. versionchanged:: 0.0.7
Support for Rate-Limiting.
.. versionchanged:: 0.0.5
Pass current resource to ``parse_request``, allowing for proper
processing of new configuration settings: `filters`, `sorting`, `paging`.
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
"""
original = get_document(resource, **lookup)
if not original:
abort(404)
app.data.remove(resource, lookup[config.ID_FIELD])
return {}, None, None, 200
@requires_auth('resource')
def delete_resource(resource):
"""Deletes all item of a resource (collection in MongoDB terms). Won't drop
indexes. Use with caution!
.. versionchanged:: 0.0.4
Added the ``requires_auth`` decorator.
.. versionadded:: 0.0.2
"""
app.data.remove(resource)
return {}, None, None, 200
| Python | 0 |
837089f9195af984597522fffc8c2c6a02e73097 | Create config.example.py | scripts/eurotram/config.example.py | scripts/eurotram/config.example.py | dbname='gis'
user='trolleway'
host='localhost'
password='admin'
ngw_url='http://trolleway.nextgis.com'
ngw_resource_id=
ngw_login = 'administrator'
ngw_password = 'admin'
| Python | 0.000003 | |
4d30756e722cafa40fa449e48c967eeebc58500a | Add a manage.py command to import realm filters | zerver/management/commands/import_realm_filters.py | zerver/management/commands/import_realm_filters.py | from __future__ import absolute_import
from django.core.management.base import BaseCommand
from zerver.models import RealmFilter, get_realm
import logging
class Command(BaseCommand):
help = """Imports realm filters to database"""
def handle(self, *args, **options):
realm_filters = {
"zulip.com": [
("#(?P<id>[0-9]{2,8})", "https://trac.zulip.net/ticket/%(id)s"),
],
"mit.edu/zephyr_mirror": [],
}
for domain, filters in realm_filters.iteritems():
realm = get_realm(domain)
if realm is None:
logging.error("Failed to get realm for domain %s" % (domain,))
continue
for filter in filters:
RealmFilter(realm=realm, pattern=filter[0], url_format_string=filter[1]).save()
logging.info("Created realm filter %s for %s" % (filter[0], domain))
| Python | 0 | |
c7fa4500b22104b34b50bbcacc3b64923d6da294 | Add a parser for plain text | trex/parsers.py | trex/parsers.py | # -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from io import TextIOWrapper
from rest_framework.parsers import BaseParser
class PlainTextParser(BaseParser):
media_type = "text/plain"
def parse(self, stream, media_type=None, parser_context=None):
print "Running PlainTextParser"
charset = self.get_charset(media_type)
if charset:
stream = TextIOWrapper(stream, encoding=charset)
return stream
def get_charset(self, media_type):
if not media_type:
return None
charset = None
msplit = media_type.split(" ");
for m in msplit:
m = m.strip()
if "charset" in m:
csplit = m.split("=")
if len(csplit) > 1:
charset = csplit[1]
return charset.strip().lower()
return None
| Python | 0.000945 | |
72d7e2a37bec5f7ae904ed2119dd8c30c22801fb | Add clinvar bot users | gennotes_server/migrations/0002_add_clinvar_bot_users.py | gennotes_server/migrations/0002_add_clinvar_bot_users.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth import get_user_model
from django.db import migrations
def add_clinvar_bot_users(apps, schema_editor):
usernames = ['clinvar-variant-importer', 'clinvar-data-importer']
for username in usernames:
get_user_model().objects.get_or_create(username=username)
class Migration(migrations.Migration):
dependencies = [
('gennotes_server', '0001_initial'),
]
operations = [
migrations.RunPython(add_clinvar_bot_users),
]
| Python | 0 | |
9eb52487a23ae7cd970e26ef7c4ad49d542b7524 | Add command for removing Wine versions from installers. With hard-coded Wine versions as per Tannis' request. | games/management/commands/remove_wine_versions.py | games/management/commands/remove_wine_versions.py | """Removes specified Wine versions from all installers."""
import logging
from django.core.management.base import BaseCommand
from django.db.models import Q
from common.util import load_yaml, dump_yaml
from games.models import Installer
LOGGER = logging.getLogger(__name__)
VERSIONS_TO_KEEP = (
'tkg-mwo-4.1-x86_64',
'tkg-osu-4.6-x86_64',
'lutris-lol-4.20-x86_64',
'lutris-mtga-4.21-x86_64',
'lutris-5.0-rc3-x86_64',
'lutris-fshack-5.0-rc2-x86_64',
'lutris-vkchildwindow-5.0-rc2-x86_64'
)
class Command(BaseCommand):
help = "Removes specified Wine versions from all installers."
def add_arguments(self, parser):
parser.add_argument(
'--dry-run',
action='store_true',
type=bool,
dest='dry_run',
help="don't write to the database"
)
'''
parser.add_argument(
'--all-except',
action='store_true',
type=bool,
dest='all_except',
help="remove all Wine versions except the specified"
)
parser.add_argument(
'--force',
action='store_true',
type=bool,
dest='force',
help="run even without specified Wine versions "
"(dangerous with --all-except)"
)
parser.add_argument(
'versions',
action='append',
nargs='+',
type=list,
dest='versions',
help="the Wine versions to remove/keep"
)
'''
@staticmethod
def remove_wine_version(script, slug, version_filter):
# pylint: disable=too-many-return-statements
not_found = object()
try:
wine_config = script.get("wine", not_found)
if wine_config is not_found:
return False
except AttributeError:
LOGGER.error("The script %s is invalid", slug)
return False
try:
wine_version = wine_config.get("version", not_found)
if wine_version is not_found:
return False
except AttributeError:
LOGGER.error("The script %s is invalid", slug)
return False
if not version_filter(wine_version):
return False
LOGGER.info("Removing Wine version %s from %s", wine_version, slug)
try:
del wine_config["version"]
if not wine_config:
del script["wine"]
except TypeError:
LOGGER.error("The script %s is invalid", slug)
return False
return True
def handle(self, *args, **options):
"""Removes specified Wine versions from all installers."""
# Get dry run flag from options.
dry_run = options.get('dry_run')
'''
# Get the specified Wine versions from the command line.
versions = options.get('versions')
# If no versions were specified and --force not given,
# refuse to run.
if not versions and not options.get('force'):
LOGGER.error("No versions specified, use --force to run anyway")
return
# Create version filter lambda from versions.
if options.get('all_except'):
version_filter = lambda version: version not in versions
else:
version_filter = lambda version: version in versions
'''
version_filter = lambda version: version not in VERSIONS_TO_KEEP
# Search for installers that have a Wine version specified.
installers = Installer.objects.filter(
( # JSON format
Q(content__icontains=',"wine":{')
& Q(content__icontains='"version":')
) | ( # YAML format
Q(content__icontains=r"\nwine:\n ")
& Q(content__icontains=r"\n version: ")
)
)
# For each of those installers:
for installer in installers:
# Parse the installer content.
script = load_yaml(installer.content)
# Remove the Wine version if it's not in
# the list of versions to keep.
changed = Command.remove_wine_version(
script,
installer.slug,
version_filter
)
# If the script hasn't been changed, there's
# no need to save it.
if not changed:
continue
# Serialize the new installer content.
installer.content = dump_yaml(script)
# Save the new installer in the database.
LOGGER.info("Updating installer %s", installer)
if not dry_run:
installer.save()
| Python | 0 | |
d56515878d4a1d4d56a10426fe5d6c45de97a671 | Create servos.py | gadgets/motors/servos.py | gadgets/motors/servos.py | from gadgets.th_gpio import TH_GPIO
import time
class Servo5V():
def __init__(self,pin_number=12,freq=100):
self.pin_number = pin_number
self.freq = freq
self.pwm = TH_GPIO().pwm_create(self.pin_number,freq=self.freq)
self.width = float(1000/self.freq)
def set_freq(self,freq):
self.freq = freq
self.pwm.set_freq(freq)
self.width = float(1000/self.freq)
def write(self,angle):
duty = float(angle)/self.width + 2.5
self.pwm.change(duty)
def cleanup(self):
TH_GPIO().disable_pin(self.pin_number)
| Python | 0.000022 | |
0a074f3af770f049cf6f112bdc7fa5ae35c4a6dc | Create Run.py | ImageNet/Run.py | ImageNet/Run.py | # From https://groups.google.com/a/tensorflow.org/forum/#!topic/discuss/4xjc7tSrb18
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os, math, time
import cv2, csv
import numpy as np
import tensorflow as tf
import CIFAR10
from datetime import datetime
from PIL import Image
os.environ["CUDA_VISIBLE_DEVICES"] = "-1"
HOME = '/HOME/' # /HOME/DATA/
width = 24
height = 24
categories = []
with open(HOME + "DATA/LABELS", 'r') as csvfile:
Labels = csv.reader(csvfile, delimiter=' ', quotechar='|')
for L in Labels:
categories.append(L) # L[0]
filename = HOME + "DATA/0000.png"
#im = Image.open(filename)
#im.save(filename, format='PNG', subsampling=0, quality=100)
with tf.Session() as sess:
input_img = tf.image.decode_png(tf.read_file(filename), channels=3)
tf_cast = tf.cast(input_img, tf.float32)
float_image = tf.image.resize_image_with_crop_or_pad(tf_cast, height, width)
float_image = tf.image.per_image_standardization(float_image)
images = tf.expand_dims(float_image, 0)
logits = CIFAR10.inference(images)
_, top_k_pred = tf.nn.top_k(logits, k=5)
variable_averages = tf.train.ExponentialMovingAverage(CIFAR10.MOVING_AVERAGE_DECAY)
variables_to_restore = variable_averages.variables_to_restore()
saver = tf.train.Saver(variables_to_restore)
ckpt = tf.train.get_checkpoint_state(HOME+'MODEL')
if ckpt and ckpt.model_checkpoint_path:
print("Model path = ", ckpt.model_checkpoint_path)
saver.restore(sess, ckpt.model_checkpoint_path)
else:
print('No checkpoint file found.')
exit(0)
#init_op = tf.initialize_all_variables()
#sess.run(init_op)
_, top_indices = sess.run([_, top_k_pred])
for key, value in enumerate(top_indices[0]):
print ("Type %20s" % categories[value] + "\t\t" + str(_[0][key]))
| Python | 0.000001 | |
99f2130476064062c3dd6338163010df53d60594 | Bootstrap output | output.py | output.py | import colour
import csv
import json
# export as json, csv, textfile or output to console
def write_data(data, format=None):
pass
| Python | 0.999999 | |
4fb80eede37a2af23c165cb0997989c039f8166e | Add utterance extraction script | utterances.py | utterances.py | #######################################
## SPADE utterance extraction script ##
#######################################
## Processes and extracts start-times and end-times for all speaker utterances.
## Used for extracting data collected as part of the SPeech Across Dialects of English
## (SPADE) project.
## Input:
## - corpus name (e.g., Buckeye, SOTC)
## - corpus metadata (stored in a YAML file), which
## specifies the path to the audio, transcripts and metadata
## Output:
## - CSV of utterance metadata for the corpus
import yaml
import time
from datetime import datetime
import sys
import os
import argparse
base_dir = os.path.dirname(os.path.abspath(__file__))
script_dir = os.path.join(base_dir, 'Common')
sys.path.insert(0, script_dir)
import common
from polyglotdb import CorpusContext
from polyglotdb.utils import ensure_local_database_running
from polyglotdb.config import CorpusConfig
from polyglotdb.io.enrichment import enrich_lexicon_from_csv
def utterance_export(config, corpus_name, corpus_directory, dialect_code, speakers, ignored_speakers=None):
## Main duration export function. Collects durational information into query format
## and outputs CSV file of measures
csv_path = os.path.join(base_dir, corpus_name, '{}_utterances.csv'.format(corpus_name))
with CorpusContext(config) as c:
print("Beginning duration export")
beg = time.time()
## Process stress information for the vowel. All vowels in this analysis
## should contain primary stress, and so filter for stressed based on
## either the list of stressed vowels defined in the YAML file, or those
## which have had a primary stress label applied during lexical enrichment.
q = c.query_graph(c.utterance).filter(c.utterance.speaker.name.not_in_(ignored_speakers))
q = q.columns(c.utterance.speaker.name.column_name('speaker'),
c.utterance.id.column_name('utterance_label'),
c.utterance.begin.column_name('utterance_begin'),
c.utterance.end.column_name('utterance_end'),
c.utterance.discourse.name.column_name('discourse'))
for sp, _ in c.hierarchy.speaker_properties:
if sp == 'name':
continue
q = q.columns(getattr(c.utterance.speaker, sp).column_name(sp))
## Write the query to a CSV file
print("Writing CSV")
q.to_csv(csv_path)
end = time.time()
time_taken = time.time() - beg
print('Query took: {}'.format(end - beg))
print("Results for query written to " + csv_path)
common.save_performance_benchmark(config, 'utterance_export', time_taken)
## Process command-line arguments (corpus metadata, corpus reset, etc).
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('corpus_name', help='Name of the corpus')
parser.add_argument('-r', '--reset', help="Reset the corpus", action='store_true')
parser.add_argument('-d', '--docker', help="This script is being called from Docker", action='store_true')
args = parser.parse_args()
corpus_name = args.corpus_name
reset = args.reset
docker = args.docker
directories = [x for x in os.listdir(base_dir) if os.path.isdir(x) and x != 'Common']
if args.corpus_name not in directories:
print(
'The corpus {0} does not have a directory (available: {1}). Please make it with a {0}.yaml file inside.'.format(
args.corpus_name, ', '.join(directories)))
sys.exit(1)
corpus_conf = common.load_config(corpus_name)
print('Processing...')
# sanity check database access
common.check_database(corpus_name)
ignored_speakers = corpus_conf.get('ignore_speakers', [])
stressed_vowels = corpus_conf.get('stressed_vowels', [])
if reset:
common.reset(corpus_name)
ip = common.server_ip
if docker:
ip = common.docker_ip
## start processing the corpus
with ensure_local_database_running(corpus_name, port=common.server_port, ip=ip, token=common.load_token()) as params:
config = CorpusConfig(corpus_name, **params)
config.formant_source = 'praat'
# Common set up
## Check if the corpus already exists as a database: if not, import the audio and
## transcripts and store in graph format
common.loading(config, corpus_conf['corpus_directory'], corpus_conf['input_format'])
## Add information to the corpus regarding lexical, speaker, and linguistic information
common.lexicon_enrichment(config, corpus_conf['unisyn_spade_directory'], corpus_conf['dialect_code'])
common.speaker_enrichment(config, corpus_conf['speaker_enrichment_file'])
common.basic_enrichment(config, corpus_conf['vowel_inventory'] + corpus_conf['extra_syllabic_segments'], corpus_conf['pauses'])
## Call the utterance export function, as defined above
utterance_export(config, corpus_name, corpus_conf['corpus_directory'], corpus_conf['dialect_code'], corpus_conf['speakers'], ignored_speakers=ignored_speakers)
print('Finishing up!')
| Python | 0.000724 | |
b453943f86f97e38e52af3a1b048ee93b0177df8 | add a test to make sure we don't have any more missing migrations | axes/tests/test_models.py | axes/tests/test_models.py | from django.test import TestCase
class MigrationsCheck(TestCase):
def setUp(self):
from django.utils import translation
self.saved_locale = translation.get_language()
translation.deactivate_all()
def tearDown(self):
if self.saved_locale is not None:
from django.utils import translation
translation.activate(self.saved_locale)
def test_missing_migrations(self):
from django.db import connection
from django.apps.registry import apps
from django.db.migrations.executor import MigrationExecutor
executor = MigrationExecutor(connection)
from django.db.migrations.autodetector import MigrationAutodetector
from django.db.migrations.state import ProjectState
autodetector = MigrationAutodetector(
executor.loader.project_state(),
ProjectState.from_apps(apps),
)
changes = autodetector.changes(graph=executor.loader.graph)
self.assertEqual({}, changes)
| Python | 0 | |
1e7aee8c5597a7ccd9f2bc8f4e05e3ae489c3bfd | Add bot.py to run as an actual bot, via pywikibot | src/bot.py | src/bot.py | from app import *
from time import sleep
import pywikibot
def run_bot(template_param, access_token=None, site=None, max_edits=100000):
cached_pages = list_cache_contents('bot_cache/')
edits_made = 0
for page_name in cached_pages:
print(page_name)
cache_fname = 'bot_cache/'+to_cache_name(page_name)
with open(cache_fname, 'r') as f:
page_json = json.load(f)
if run_bot_on_page(page_json, template_param, access_token=access_token, site=site):
edits_made += 1
sleep(3)
if edits_made >= max_edits:
return
def run_bot_on_page(proposed_edits, template_param, access_token=None, site=None):
page_name = proposed_edits['page_name']
for edit in proposed_edits['proposed_edits']:
edit_hash = edit['orig_hash']
change = edit['proposed_change']
match = re.findall(r'^' + template_param, change)
if match:
try:
app.logger.info('Attempting change on {}: {}'.format(page_name, change))
change_made = perform_bot_edit(page_name, '[[Wikipedia:OABOT|Open access bot]]: add %s identifier to citation with #oabot.' % match[0], edit_hash, change, access_token=access_token, site=site)
if change_made:
return True
except ValueError:
app.logger.exception('perform_bot_edit failed on {}'.format(page_name))
return False
def perform_bot_edit(page_name, summary, template_hash, proposed_addition, access_token=None, site=None):
# Get the page
text = main.get_page_over_api(page_name)
# Perform each edit
new_text, change_made = make_new_wikicode_for_bot(text, template_hash, proposed_addition, page_name)
# Save the page
if change_made:
if site:
page = pywikibot.Page(site, page_name)
page.text = new_text
page.save(summary)
else:
edit_wiki_page(page_name, new_text, access_token, summary, bot='yes')
# Remove the cache
cache_fname = "bot_cache/"+to_cache_name(page_name)
if os.path.isfile(cache_fname):
os.remove(cache_fname)
return change_made
def make_new_wikicode_for_bot(text, template_hash, proposed_addition, page_name):
wikicode = mwparserfromhell.parse(text)
change_made = False
for template in wikicode.filter_templates():
edit = main.TemplateEdit(template, page_name)
if edit.orig_hash == template_hash:
try:
edit.update_template(proposed_addition)
change_made = True
except ValueError:
app.logger.exception('update_template failed on {}'.format(page_name))
pass # TODO report to the user
return unicode(wikicode), change_made
if __name__ == '__main__':
import sys
template_param = sys.argv[1]
app.logger.info("Starting additions for parameter: {}".format(template_param))
site = pywikibot.Site()
site.login()
run_bot(template_param, site=site)
| Python | 0 | |
eb517e5b323ea183571b9d4967f46821729dd3e7 | add part 6 | part_6.py | part_6.py | # Let's try to draw a point moving on a line
# To make things simple, we are going back to 1D
pos = 1
velo = 1
# Since there are multiple positions at a time, we can represent as a list
line_1 = [' ', ' ', ' ']
line_2 = 3*[' ']
# Note how they are equal statements
print(line_1, line_2)
# If we wanted to print the line without the list notation
# We can join an empty string with a list
print("".join(line_1)) | Python | 0.000012 | |
7f714b662908e60bd78eef8d5a1729da5ededbe6 | include gpx.py, which is the start of a new GPX module that fits better than gpxparser.py | vector/gpx.py | vector/gpx.py | """
GPX IO and manipulation (UNDER DEVELOPMENT)
This is a rewrite of gpxparser.py, designed to fit better with guppy types.
"""
import sys
from xml.dom import minidom, Node
from xml.etree.ElementTree import ElementTree, Element
import collections
Point = collections.namedtuple("Point", ["vertex", "data", "properties"])
Trkseg = collections.namedtuple("Trkseg", ["vertices", "data", "properties"])
Track = collections.namedtuple("Track", ["segments", "properties"])
class GPX(object):
""" Represents a GPX documents, with waypoints, tracks, and routes as
attributes. """
waypts = {}
tracks = {}
routes = {}
def __init__(self, f=None, waypts=None, tracks=None, routes=None):
""" Create a GPX object, either from a GPX file or from lists of
waypoints, tracks, and routes. """
if f is not None:
self.fromfile(f)
else:
self.gpx = Element("gpx")
if waypts is not None:
self.build_wpt(waypts)
if tracks is not None:
self.build_trk(tracks)
if routes is not None:
self.build_rte(routes)
return
def fromfile(self, f):
""" Read a GPX document from *f*, which may be a filename or a
file-like object. """
gpxtree = ElementTree(file=f)
self.gpx = gpxtree.find("gpx")
for el in gpxtree.findall("wpt"):
self.parse_wpt(el)
for el in gpxtree.findall("trk"):
self.parse_trk(el)
for el in gpxtree.findall("rte"):
self.parse_rte(el)
return
def parse_wpt(self, node):
pass
def parse_trk(self, node):
""" Parse a <trk> node, updating self.tracks. """
name = node.getElementsByTagName('name')[0].firstChild.data
if not name in self.tracks:
self.tracks[name] = {}
segments = []
for trkseg in node.getElementsByTagName('trkseg'):
lats = []
lons = []
eles = []
coords = []
time = []
points = []
for trkpt in trkseg.getElementsByTagName('trkpt'):
lat = float(trkpt.getAttribute('lat'))
lon = float(trkpt.getAttribute('lon'))
ele = float(trkpt.getElementsByTagName('ele')[0].firstChild.data)
time = trkpt.getElementsByTagName('time')[0].firstChild.data
points.append(Point(lon, lat, ele, time))
segments.append([a for a in points])
self.tracks[name] = segments
return
def parse_rte(self, node):
pass
def build_wpt(self, waypts):
pass
def build_trk(self, tracks):
""" Build "trk" nodes. """
for track in tracks:
trk = Element("trk")
if "name" in track.properties:
name = Element("name", text=track.data["name"])
trk.append(name)
for segment in track.segments:
trkseg = Element("trkseg")
trk.append(trkseg)
for i, c in enumerate(segment.vertices):
trkpt = Element("trkpt", attrib={"lon":str(c[0]),
"lat":str(c[1])})
if len(c) > 2:
ele = Element("ele", text=str(c[2]))
trkpt.append(ele)
if "time" in segment.data:
time = Element("time", text=str(segment.data["time"][i]))
trkpt.append(time)
for field in segment.data:
node = Element(field, text=str(segments.data[field][i]))
trkpt.append(node)
trkseg.append(trkpt)
self.gpx.append(trk)
return
def build_rte(self, routes):
pass
def writefile(self, f, waypts=True, tracks=True, routes=True):
""" Write GPX object to a GPX file. Writes all waypoints, tracks, and
routes by default, which can be changed by changing the kwargs to
False. """
ElementTree(element=gpx).write(f)
return
| Python | 0.000001 | |
98a6fd1b1d095d6babc55c5d415c2450743fdba6 | Add antibody audits | src/encoded/audit/antibody_lot.py | src/encoded/audit/antibody_lot.py | from snovault import (
AuditFailure,
audit_checker,
)
from .conditions import rfa
@audit_checker('antibody_lot', frame=['characterizations'],
condition=rfa('ENCODE3', 'modERN'))
def audit_antibody_missing_characterizations(value, system):
'''
Check to see what characterizations are lacking for each antibody,
for the cell lines we know about.
'''
if not value['characterizations']:
detail = '{} '.format(value['@id']) + \
'does not have any supporting characterizations submitted.'
yield AuditFailure('no characterizations submitted', detail,
level='NOT_COMPLIANT')
return
primary_chars = []
secondary_chars = []
num_compliant_primary = 0
compliant_secondary = False
for char in value['characterizations']:
if 'primary_characterization_method' in char:
primary_chars.append(char)
if char['status'] in ['compliant', 'exempt from standards']:
num_compliant_primary += 1
if 'secondary_characterization_method' in char:
secondary_chars.append(char)
if char['status'] in ['compliant', 'exempt from standards']:
compliant_secondary = True
if not primary_chars:
detail = '{} '.format(value['@id']) + \
'does not have any primary characterizations submitted.'
yield AuditFailure('no primary characterizations', detail,
level='NOT_COMPLIANT')
if not secondary_chars:
detail = '{} '.format(value['@id']) + \
'does not have any secondary characterizations submitted.'
yield AuditFailure('no secondary characterizations', detail,
level='NOT_COMPLIANT')
if len(primary_chars) != num_compliant_primary:
detail = '{} '.format(value['@id']) + \
'needs compliant primary in one or more cell types.'
yield AuditFailure('need compliant primaries', detail,
level='NOT_COMPLIANT')
if secondary_chars and not compliant_secondary:
detail = '{} '.format(value['@id']) + \
'needs a compliant secondary characterization.'
yield AuditFailure('need compliant secondary', detail,
level='NOT_COMPLIANT')
return
| Python | 0 | |
e8560c42e3ae73f1753073b8ad6aef7d564e6d65 | Implement basic active monitoring algorithm | Host/original.py | Host/original.py | import sys
from functools import reduce
tempVmId = -1
def enhancedActiveVMLoadBalancer(vmStateList, currentAllocationCounts):
'''
vmStateList: Dict<vmId, vmState>
currentAllocationCounts: Dict<vmId, currentActiveAllocationCount>
'''
global tempVmId
vmId = -1
totalAllocations = reduce(lambda x, y: x + y, currentAllocationCounts)
if(totalAllocations < len(vmStateList)):
for i, vm in enumerate(vmStateList):
if(currentAllocationCounts[i] == 0):
vmId = i
break
else:
minCount = sys.maxint
for i, vm in enumerate(vmStateList):
curCount = currentAllocationCounts[i]
if(curCount < minCount):
if(i != tempVmId):
vmId = i
break
tempVmId = vmId
print("Returning, ", vmId)
return vmId
enhancedActiveVMLoadBalancer([
{'cpu': 10, 'mem': 10},
{'cpu': 17, 'mem': 40},
{'cpu': 40, 'mem': 20},
{'cpu': 80, 'mem': 15}
], [1, 4, 1, 1])
| Python | 0.000018 | |
0a5e419dd91317d3a9d755cc5e8ee32c3a68d4af | Fix dates in show pending notifications | src/ggrc/notification/__init__.py | src/ggrc/notification/__init__.py | # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: mouli@meics.org
# Maintained By: miha@reciprocitylabs.com
from collections import defaultdict
from freezegun import freeze_time
from datetime import date, datetime
from ggrc.extensions import get_extension_modules
from ggrc.models import Notification
from ggrc.utils import merge_dict
from ggrc import db
from sqlalchemy import and_
class NotificationServices():
def __init__(self):
self.services = self.all_notifications()
def all_notifications(self):
services = {}
for extension_module in get_extension_modules():
contributions = getattr(
extension_module, 'contributed_notifications', None)
if contributions:
if callable(contributions):
contributions = contributions()
services.update(contributions)
return services
def get_service_function(self, name):
if name not in self.services:
raise ValueError("unknown service name: %s" % name)
return self.services[name]
def call_service(self, name, pn):
service = self.get_service_function(name)
return service(pn)
services = NotificationServices()
def get_notification_data(notifications):
if not notifications:
return {}
aggregate_data = {}
def merge_into(destination, source):
if destination is None:
return source
for pn in notifications:
data = services.call_service(pn.object_type.name, pn)
aggregate_data = merge_dict(aggregate_data, data)
return aggregate_data
def get_pending_notifications():
notifications = db.session.query(Notification).filter(
Notification.sent_at == None).all() # noqa
notif_by_day = defaultdict(list)
for notification in notifications:
notif_by_day[notification.send_on].append(notification)
data = {}
today = datetime.combine(date.today(), datetime.min.time())
for day, notif in notif_by_day.iteritems():
current_day = max(day, today)
with freeze_time(current_day):
data[current_day] = get_notification_data(notif)
return notifications, data
def get_todays_notifications():
notifications = db.session.query(Notification).filter(
and_(Notification.send_on <= date.today(),
Notification.sent_at == None # noqa
)).all()
return notifications, get_notification_data(notifications)
def generate_notification_email(data):
pass
def dispatch_notifications():
pass
| # Copyright (C) 2015 Google Inc., authors, and contributors <see AUTHORS file>
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
# Created By: mouli@meics.org
# Maintained By: miha@reciprocitylabs.com
from collections import defaultdict
from freezegun import freeze_time
from datetime import date
from ggrc.extensions import get_extension_modules
from ggrc.models import Notification
from ggrc.utils import merge_dict
from ggrc import db
from sqlalchemy import and_
class NotificationServices():
def __init__(self):
self.services = self.all_notifications()
def all_notifications(self):
services = {}
for extension_module in get_extension_modules():
contributions = getattr(
extension_module, 'contributed_notifications', None)
if contributions:
if callable(contributions):
contributions = contributions()
services.update(contributions)
return services
def get_service_function(self, name):
if name not in self.services:
raise ValueError("unknown service name: %s" % name)
return self.services[name]
def call_service(self, name, pn):
service = self.get_service_function(name)
return service(pn)
services = NotificationServices()
def get_notification_data(notifications):
if not notifications:
return {}
aggregate_data = {}
def merge_into(destination, source):
if destination is None:
return source
for pn in notifications:
data = services.call_service(pn.object_type.name, pn)
aggregate_data = merge_dict(aggregate_data, data)
return aggregate_data
def get_pending_notifications():
notifications = db.session.query(Notification).filter(
Notification.sent_at == None).all() # noqa
notif_by_day = defaultdict(list)
for notification in notifications:
notif_by_day[notification.send_on].append(notification)
data = {}
for day, notif in notif_by_day.iteritems():
with freeze_time(day):
data[day] = get_notification_data(notif)
return notifications, data
def get_todays_notifications():
notifications = db.session.query(Notification).filter(
and_(Notification.send_on <= date.today(),
Notification.sent_at == None # noqa
)).all()
return notifications, get_notification_data(notifications)
def generate_notification_email(data):
pass
def dispatch_notifications():
pass
| Python | 0 |
b7e09bb39aa6161215799960bd5fda33a882e40f | fix docstring | 01_basics/03_advanced_expressions/01_basic_indexing_soln.py | 01_basics/03_advanced_expressions/01_basic_indexing_soln.py | # Fill in the TODOs in this exercise, then run the script to see if your
# solution works.
import numpy as np
import theano.tensor as T
def increment_odd(x):
"""
x: a Theano vector
Returns:
y: a Theano vector equal to x, but with all odd-numbered elements
incremented by 1.
"""
raise NotImplementedError("TODO: implement the function.")
if __name__ == "__main__":
x = T.vector()
xv = np.zeros((4,), dtype=x.dtype)
yv = increment_odd(x).eval({x:xv})
assert np.allclose(yv, np.array([0., 1., 0., 1.]))
| Python | 0.000018 | |
787b46749a26f8078c1ac4e914aea7fbd0ced8c6 | Add test for checking if privatecode in journey's is unique per day | bin/test.py | bin/test.py | import helper
import logging
import psycopg2
from settings.const import database_connect
conn = psycopg2.connect(database_connect)
cur = conn.cursor()
cur.execute("""
SELECT j.id,jp.operator_id,j.operator_id FROM
(select journeypatternref,count(distinct pointorder) as points from pointinjourneypattern group by journeypatternref) as pattern,
(select timedemandgroupref,count(distinct pointorder) as timepoints from pointintimedemandgroup group by timedemandgroupref) as timepattern,
journey as j LEFT JOIN journeypattern as jp ON (j.journeypatternref = jp.id)
WHERE
j.journeypatternref = pattern.journeypatternref AND
j.timedemandgroupref = timepattern.timedemandgroupref AND
points != timepoints;
""")
rows = cur.fetchall()
cur.close()
timegroupsValid = len(rows) == 0
assert timegroupsValid
cur.execute("""
SELECT links.operator_id,rechts.operator_id FROM
(SELECT j.id,j.operator_id,j.privatecode,validdate FROM journey as j LEFT JOIN availabilityconditionday USING (availabilityconditionref) where
isavailable = true) as links,
(SELECT j.id,j.operator_id,j.privatecode,validdate FROM journey as j LEFT JOIN availabilityconditionday USING (availabilityconditionref) where
isavailable = true) as rechts
WHERE links.id != rechts.id AND links.validdate = rechts.validdate AND links.privatecode = rechts.privatecode
""")
rows = cur.fetchall()
cur.close()
duplicateTripidentifiers = len(rows) == 0
assert uniqueTripidentifiers
| Python | 0 | |
3693b1aea769af1e0fbe31007a00f3e33bcec622 | Add function to solve two pair sum | aids/sorting_and_searching/pair_sum.py | aids/sorting_and_searching/pair_sum.py | '''
Given an integer array, output all pairs that sum up to a specific value k
'''
from binary_search import binary_search_iterative
def pair_sum_sorting(arr, k):
'''
Using sorting - O(n logn)
'''
number_of_items = len(arr)
if number_of_items < 2:
return
arr.sort()
for index, item in enumerate(arr):
index_pair = binary_search_iterative(arr, index, number_of_items - 1, k - item)
if index_pair and index_pair > index:
print item, arr[index_pair]
def pair_sum_set(arr, k):
'''
Using set - O(n) (time - average case), O(n) (space)
'''
if len(arr) < 2:
return
seen = set()
output = set()
for item in arr:
target = k - item
if target not in seen:
seen.add(target)
else:
output.add(item, target) # print item, target
# for output with non-duplicate i.e. (1,3) and (3,1) are the samw thing
# output.add((min(num, target), max(num, target)))
print '\n'.join([str(item) for item in output]) | Python | 0.002605 | |
72cfd9b52e860aaaca05e7ef7941d0b4e17ad95f | Add vocab_word.py | vocab_word.py | vocab_word.py | import cv2
import numpy as np
from os import listdir
from os.path import isfile, join
from numpy import *
from scipy.cluster.vq import kmeans,vq
def buildVocabulary(path,k,grid_m,grid_n):
files = [ f for f in listdir(path) if isfile(join(path,f)) ]
dict_vocab = array([])
for i in range(0,grid_m):
for j in range(0,grid_n):
for f in files:
total_desc = array([])
img = cv2.imread(path+f)
desc = localFeature(img,grid_m,grid_n,i,j)
if len(desc.shape) == 1:
desc = array([desc])
if len(total_desc) == 0:
total_desc = desc
else:
total_desc = np.append(total_desc,desc,axis = 0)
vocab,dist = kmeans(total_desc,k) # k is the seed number
if len(dict_vocab) == 0:
dict_vocab = [vocab]
else:
dict_vocab = np.append(dict_vocab,[vocab],axis = 0)
def findWord(dict_vocab,path,grid_m,grid_n):
files = [ f for f in listdir(path) if isfile(join(path,f)) ]
word_hist = array([])
for f in files:
img = cv2.imread(path+f)
line_hist = array([])
for i in range(0,grid_m):
for j in range(0,grid_n):
desc = localFeature(img,grid_m,grid_n,i,j)
hist = buildWordHist(desc,dict_vocab[grid_n*i+j])
if len(line_hist) == 0:
line_hist = hist
else
line_hist = np.hstack((line_hist,hist))
if len(word_hist) == 0:
word_hist = line_hist
else:
word_hist = np.vstack((word_hist,line_hist))
return word_hist
def buildWordHist(desc,dict_part):
index,temp = vq(desc,dict_part)
k = dict_part.shape[0]
hist,bucket = np.histogram(index,bins = range(k+1))
return hist
def main():
path = '/home/alicelee0606/helloflask/'
d_path = path+'database/'
t_path = path+'testcase/'
k = 180
grid_m = 1
grid_n = 1
dict_vocab = buildVocabulary(d_path,k,grid_m,grid_n)
d_hist = findWord(dict_vocab,d_path,grid_m,grid_n)
t_hist = findWord(dict_vocab,t_path,grid_m,grid_n)
| Python | 0.998753 | |
9a67d63650b751c7b876f248bb3d82e619b37725 | Add new script to create a list of words from frequencies | frequenciesToWords.py | frequenciesToWords.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Spell corrector - http://www.chiodini.org/
# Copyright © 2015 Luca Chiodini <luca@chiodini.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import argparse
import codecs
import sys
def main():
parser = argparse.ArgumentParser(
description="Script to get pure words from unigrams frequencies.")
parser.add_argument("-f", "--file", help="source file to be processed",
required=True)
parser.add_argument("-o", "--output", help="output file with results",
required=True)
args = parser.parse_args()
words = set()
# Process input file and save keys.
with codecs.open(args.file, 'r', 'utf8') as f:
idx = 0
for line in f:
if idx > 0: # skip first line (header)
vals = line.rsplit(' ', 1)
words.add(vals[0])
idx += 1
# Write keys to output file.
with codecs.open(args.output, 'w', 'utf8') as f:
for w in words:
f.write("%s\n" % w)
if __name__ == '__main__':
sys.exit(main())
| Python | 0 | |
875fd0f57b1cbead04bd60b7d8c19cd1f106595a | add example python server | Server/server.py | Server/server.py | #!/usr/bin/env python
import tornado.ioloop
import tornado.web
import tornado.websocket
from tornado.options import define, options, parse_command_line
import os
import json
import uuid
define("port", default=8888, help="run on the given port", type=int)
clients = set()
metadatas = dict()
class DiscoveryClient():
connection = None
relations = set()
def __init__(self, c):
self.connection = c
class WebSocketHandler(tornado.websocket.WebSocketHandler):
def open(self):
clients.add(DiscoveryClient(self))
return None
def on_close(self):
for client in clients:
if client.connection == self:
clients.remove(client)
break
def on_message(self, msg):
payload = json.loads(msg)
# decompose json
body = payload["body"]
header = payload["header"]
# handle `absence`
if header["type"] == "absence":
print "Recived `absence` message: %s" % (body["id"])
for client in clients:
if client.connection == self:
client.relations.remove(body["id"])
# handle `presence`
if header["type"] == "presence":
print "Recived `presence` message: %s" % (body["id"])
payload = json.dumps({"header": {"type": "metadata"}, "body": metadatas[body["id"]]})
for client in clients:
if client.connection == self:
client.relations.add(body["id"])
# send metadata user to client
client.connection.write_message(payload, binary=True)
# handle `metadata`
if header["type"] == "metadata":
print "Recived `metadata` message: %s" % (body)
metadatas[body["id"]] = body
payload = json.dumps({"header": {"type": "metadata"}, "body": body})
for client in clients:
client.connection.ws_connection.write_message(payload, binary=True)
app = tornado.web.Application([
(r'/chat', WebSocketHandler)
])
if __name__ == '__main__':
parse_command_line()
print "Listening on port %i" % (options.port)
app.listen(options.port)
tornado.ioloop.IOLoop.instance().start()
| Python | 0 | |
4535d6c41e17031b943e7016fc7de6f76b890f17 | Put the test into the correct directory. | test/lib/test_inputsource.py | test/lib/test_inputsource.py | ########################################################################
# test/xslt/test_inputsource.py
import os
from amara.lib import inputsource, iri, treecompare
module_dir = os.path.dirname(os.path.abspath(__file__))
rlimit_nofile = 300
try:
import resource
except ImportError:
pass
else:
rlimit_nofile = resource.getrlimit(resource.RLIMIT_NOFILE)[0] + 10
def test_many_inputsources():
assert rlimit_nofile < 20000, "is your file limit really that large?"
# Amara's inputsource consumes a filehandle, in the 'stream' attribute
# See what happens if we run out of file handles.
sources = []
filename = os.path.join(module_dir, "borrowed", "da_20000714_02.xslt")
for i in range(rlimit_nofile):
try:
sources.append(inputsource(filename))
except:
print "Failed after", i, "files"
raise
| Python | 0.999992 | |
5bbb2a994397374356964b1db4c23b6b8ff5c848 | Add the 'version' variable. | TODO/__init__.py | TODO/__init__.py | # The MIT License
#
# Copyright (c) 2016 Jeremie DECOCK (http://www.jdhp.org)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# PEP0440 compatible formatted version, see:
# https://www.python.org/dev/peps/pep-0440/
#
# Generic release markers:
# X.Y
# X.Y.Z # For bugfix releases
#
# Admissible pre-release markers:
# X.YaN # Alpha release
# X.YbN # Beta release
# X.YrcN # Release Candidate
# X.Y # Final release
#
# Dev branch marker is: 'X.Y.dev' or 'X.Y.devN' where N is an integer.
# 'X.Y.dev0' is the canonical version of 'X.Y.dev'
#
__version__ = '0.1.dev0'
__all__ = ['TODO']
| Python | 0.001248 | |
fcd96cb766f3211a185a3aadbd7c8dde795134ca | Add ILCommand class | il_commands.py | il_commands.py | """Classes representing IL commands, including procedures to generate asm code
from a given IL command.
"""
import spots
class ILCommand:
"""Base interface for all IL commands"""
def __init__(self):
raise NotImplementedError
def input_values(self):
"""Return set of values read by this command."""
raise NotImplementedError
def output_values(self):
"""Return set of values modified by this command."""
raise NotImplementedError
def clobber_spots(self):
"""Return set of spots that are clobbered by this command."""
raise NotImplementedError
def make_asm(self, spotmap, asm_code):
"""Generate assembly code for this command. Generated assembly can read
any of the values returned from input_values, may overwrite any values
returned from output_values, and may change the value of any spots
returned from clobber_spots without worry.
asm_code (ASMCode) - Object to which to save generated code.
spotmap - Dictionary mapping each input/output value to a spot.
"""
raise NotImplementedError
class AddCommand:
"""ADD - adds arg1 and arg2, then saves to output"""
def __init__(self, output, arg1, arg2):
self.output = output
self.arg1 = arg1
self.arg2 = arg2
def input_values(self):
return {self.arg1, self.arg2}
def output_values(self):
return {self.output}
def clobber_spots(self):
# Current implementation lazily clobbers RAX always.
return set(spots.RAX)
def make_asm(self, spotmap, asm_code):
arg1_asm = spotmap[self.arg1].asm_str(self.arg1.ctype.size)
arg2_asm = spotmap[self.arg2].asm_str(self.arg2.ctype.size)
output_asm = spotmap[self.output].asm_str(self.output.ctype.size)
rax_asm = spots.RAX.asm_str(self.arg1.ctype.size)
asm_code.add_command("mov", rax_asm, arg1_asm)
asm_code.add_command("add", rax_asm, arg2_asm)
asm_code.add_command("mov", output_asm, rax_asm)
| Python | 0 | |
f51bccaebdf0992a708ac96d329b3218df23c3d0 | Create welcome.py | welcome.py | welcome.py | # coding=utf-8
import sys
reload(sys)
sys.setdefaultencoding('utf8')
sys.path.append('pyBot/ext_libs')
import re
import ast
import json
import time
import login
import datetime
import requests
from urllib2 import urlopen
from random import randrange
from sseclient import SSEClient as EventSource
minutes = 33
signUrl = 'https://ru.wikipedia.org/w/?action=raw&utf8=1&title=User:LatitudeBot/Sign'
token, cookies = login.login()
signList = []
signData = urlopen(signUrl).readlines()
for line in signData:
line = str(line.decode('UTF-8').rstrip('\n'))
if re.match(r'^\*', line):
signList.append('{{Hello}} ' + re.sub(r'^\*\s', '', line) + ' ~~~~~')
r_users = []
payload = {'action': 'query', 'format': 'json', 'list': 'recentchanges',
'rcprop': 'user|timestamp', 'rcshow': '!bot|!anon', 'rctype': 'new|edit', 'rcend': (datetime.datetime.now() - datetime.timedelta(minutes = minutes)).strftime("%Y-%m-%d %H:%M:%S"), 'rclimit': 5000, 'token': token}
r_changes = json.loads(requests.post('https://ru.wikipedia.org/w/api.php', data=payload, cookies=cookies).text)
users = ast.literal_eval('{query}'.format(**r_changes))
users = ast.literal_eval('{recentchanges}'.format(**users))
usersCheck = []
usersList = ''
for user in users:
if user['user'] not in usersCheck:
usersCheck.append(user['user'])
usersList += user['user'] + '|'
payload = {'action': 'query', 'format': 'json', 'utf8': '', 'list': 'users', 'ususers': usersList.rstrip('|'), 'usprop': 'blockinfo|editcount|groups', 'token': token}
r_userinfo = json.loads(requests.post('https://ru.wikipedia.org/w/api.php', data=payload, cookies=cookies).text)
userinfo = ast.literal_eval('{query}'.format(**r_userinfo))
userinfo = ast.literal_eval('{users}'.format(**userinfo))
for user in userinfo:
if ('blockid' not in user) and ('invalid' not in user):
if (user['editcount'] > 0) and (user['editcount'] < 25):
payload = {'action': 'query', 'format': 'json', 'utf8': '', 'list': 'logevents', 'letype': 'delete', 'letitle': 'User talk:' + user['name'], 'token': token}
r_isdelete = json.loads(requests.post('https://ru.wikipedia.org/w/api.php', data=payload, cookies=cookies).text)
isdelete = ast.literal_eval('{query}'.format(**r_isdelete))
isdelete = ast.literal_eval('{logevents}'.format(**isdelete))
if len(isdelete) == 0:
r_users.append(user['name'])
for r_user in r_users:
random_index = randrange(0, len(signList))
sign = signList[random_index]
payload = {'action': 'edit', 'format': 'json', 'title': 'User talk:' + r_user, 'utf8': '', 'createonly': '', 'notminor': '', 'text': sign, 'summary': u'Добро пожаловать!', 'token': token}
r_edit = requests.post('https://ru.wikipedia.org/w/api.php', data=payload, cookies=cookies)
| Python | 0 | |
0cf909cce9ba47f34297e87ae800f49b7ea4e18a | Correct ci failed tests | homeassistant/components/thermostat/radiotherm.py | homeassistant/components/thermostat/radiotherm.py | """
homeassistant.components.thermostat.radiotherm
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Adds support for Radio Thermostat wifi-enabled home thermostats
"""
import logging
from homeassistant.components.thermostat import (ThermostatDevice, STATE_COOL,
STATE_IDLE, STATE_HEAT)
from homeassistant.const import (CONF_HOST, CONF_NAME, TEMP_FAHRENHEIT)
from urllib.error import URLError
REQUIREMENTS = ['radiotherm']
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Radio Thermostat. """
logger = logging.getLogger(__name__)
try:
import radiotherm
except ImportError:
logger.exception(
"Error while importing dependency radiotherm. "
"Did you maybe not install the radiotherm dependency?")
return
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
if host is None:
logger.error("host not defined in config.")
return
try:
tstat = radiotherm.get_thermostat(host)
except URLError:
logger.exception(
"Unable to connect to Radio Thermostat")
return
add_devices([RadioThermostat(tstat, name)])
class RadioThermostat(ThermostatDevice):
""" Represent a Radio Thermostat. """
def __init__(self, device, name=None):
self.device = device
if name:
self.set_name(name)
@property
def name(self):
""" Returns the name of the Radio Thermostat. """
return self.device.name['raw']
@property
def unit_of_measurement(self):
""" Unit of measurement this thermostat expresses itself in. """
return TEMP_FAHRENHEIT
@property
def device_state_attributes(self):
""" Returns device specific state attributes. """
# Move these to Thermostat Device and make them global
return {
"humidity": None,
"target_humidity": None,
"fan": self.device.fmode['human'],
"mode": self.device.tmode['human']
}
@property
def current_temperature(self):
""" Returns the current temperature. """
return self.device.temp['raw']
@property
def operation(self):
""" Returns current operation. head, cool idle """
if self.device.tmode['human'] == 'Cool':
return STATE_COOL
elif self.device.tmode['human'] == 'Heat':
return STATE_HEAT
else:
return STATE_IDLE
@property
def target_temperature(self):
""" Returns the temperature we try to reach. """
if self.operation == STATE_COOL:
temp = self.device.t_cool['raw']
elif self.operation == STATE_HEAT:
temp = self.device.t_heat['raw']
return round(temp, 1)
def set_temperature(self, temperature):
""" Set new target temperature """
if self.operation == STATE_COOL:
self.device.t_cool = temperature
elif self.operation == STATE_HEAT:
self.device.t_heat = temperature
def set_name(self, name):
""" Set thermostat name """
self.device.name = name
| """
homeassistant.components.thermostat.radiotherm
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Adds support for Radio Thermostat wifi-enabled home thermostats
"""
import logging
from homeassistant.components.thermostat import (ThermostatDevice, STATE_COOL,
STATE_IDLE, STATE_HEAT)
from homeassistant.const import (CONF_HOST, CONF_NAME, TEMP_FAHRENHEIT)
from urllib.error import URLError
REQUIREMENTS = ['radiotherm']
def setup_platform(hass, config, add_devices, discovery_info=None):
""" Sets up the Radio Thermostat. """
logger = logging.getLogger(__name__)
try:
import radiotherm
except ImportError:
logger.exception(
"Error while importing dependency radiotherm. "
"Did you maybe not install the radiotherm dependency?")
return
host = config.get(CONF_HOST)
name = config.get(CONF_NAME)
if host is None:
logger.error("host not defined in config.")
return
try:
tstat = radiotherm.get_thermostat(host)
except URLError as err:
logger.Exception(
"Unable to connect to Radio Thermostat")
return
add_devices([RadioThermostat(tstat, name)])
class RadioThermostat(ThermostatDevice):
""" Represent a Radio Thermostat. """
def __init__(self, device, name=None):
self.device = device
if name:
self.set_name(name)
@property
def name(self):
""" Returns the name of the Radio Thermostat. """
return self.device.name['raw']
@property
def unit_of_measurement(self):
""" Unit of measurement this thermostat expresses itself in. """
return TEMP_FAHRENHEIT
@property
def device_state_attributes(self):
""" Returns device specific state attributes. """
# Move these to Thermostat Device and make them global
return {
"humidity": None,
"target_humidity": None,
"fan": self.device.fmode['human'],
"mode": self.device.tmode['human']
}
@property
def current_temperature(self):
""" Returns the current temperature. """
return self.device.temp['raw']
@property
def operation(self):
""" Returns current operation. head, cool idle """
if self.device.tmode['human'] == 'Cool':
return STATE_COOL
elif self.device.tmode['human'] == 'Heat':
return STATE_HEAT
else:
return STATE_IDLE
@property
def target_temperature(self):
""" Returns the temperature we try to reach. """
if self.operation == STATE_COOL:
temp = self.device.t_cool['raw']
elif self.operation == STATE_HEAT:
temp = self.device.t_heat['raw']
return round(temp, 1)
def set_temperature(self, temperature):
""" Set new target temperature """
if self.operation == STATE_COOL:
self.device.t_cool = temperature
elif self.operation == STATE_HEAT:
self.device.t_heat
def set_name(self, name):
""" Set thermostat name """
self.device.name = name
| Python | 0.000001 |
732898dc4858ae5cfc7eac3e470069ac702f6c12 | Add a command for deactivating a generation | mapit/management/commands/mapit_generation_deactivate.py | mapit/management/commands/mapit_generation_deactivate.py | # This script deactivates a particular generation
from optparse import make_option
from django.core.management.base import BaseCommand
from mapit.models import Generation
class Command(BaseCommand):
help = 'Deactivate a generation'
args = '<GENERATION-ID>'
option_list = BaseCommand.option_list + (
make_option('--commit', action='store_true', dest='commit',
help='Actually update the database'),
make_option('--force', action='store_true', dest='force',
help='Force deactivation, even if it would leave no active generations'))
def handle(self, generation_id, **options):
generation_to_deactivate = Generation.objects.get(id=int(generation_id, 10))
if not generation_to_deactivate.active:
raise CommandError, "The generation %s wasn't active" % (generation_id,)
active_generations = Generation.objects.filter(active=True).count()
if active_generations <= 1 and not options['force']:
raise CommandError, "You're trying to deactivate the only active generation. If this is what you intended, please re-run the command with --force"
generation_to_deactivate.active = False
if options['commit']:
generation_to_deactivate.save()
print "%s - deactivated" % generation_to_deactivate
else:
print "%s - not deactivated, dry run" % generation_to_deactivate
| Python | 0.000004 | |
ffdf48c758877dd869f4fb4ce598635ff6545d5d | add script for just building the *.zip | mac/scripts/build-temp.py | mac/scripts/build-temp.py | #!/usr/bin/env python
import sys
import os
import os.path
import re
import time
import subprocess
import stat
import shutil
"""
Release build script designed to automate as much of the proces as possible
and minimize errors.
Pushing an update to mac client is involved. Files that must be changed:
* Info.plist
* conf.php and mac-ipupdater-relnotes-$ver.html
* IpUpdaterAppCast.xml
(update pubDate, sparkle:version and sparkle:shortVersionString)
Checklist for pushing a new release:
* edit Info.plist to set new version
* create mac-ipupdater-relnotes-$ver.html, check it in and deploy it
* run this script
* verify it made the right changes to IpUpdaterAppCast.xml
* checkin and deploy the binary to the website
* update conf.php to account for new version, check it in and deploy to website
* checkin and deploy IpUpdaterCast.xml
"""
SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
SRC_DIR = os.path.realpath(os.path.join(SCRIPT_DIR, ".."))
RELEASE_BUILD_DIR = os.path.join(SRC_DIR, "build", "Release")
INFO_PLIST_PATH = os.path.realpath(os.path.join(SCRIPT_DIR, "..", "Info.plist"))
WEBSITE_DESKTOP_DIR = os.path.realpath(os.path.join(SCRIPT_DIR, "..", "..", "..", "website", "desktop"))
APPENGINE_SRC_DIR = os.path.realpath(os.path.join(SCRIPT_DIR, "..", "..", "..", "appengine-opendnsupdate"))
APP_CAST_PATH = os.path.join(APPENGINE_SRC_DIR, "IpUpdaterAppCast.xml")
def exit_with_error(s):
print(s)
sys.exit(1)
def ensure_dir_exists(path):
if not os.path.exists(path) or not os.path.isdir(path):
exit_with_error("Directory '%s' desn't exist" % path)
def ensure_file_exists(path):
if not os.path.exists(path) or not os.path.isfile(path):
exit_with_error("File '%s' desn't exist" % path)
def ensure_file_doesnt_exist(path):
if os.path.exists(path):
exit_with_error("File '%s' already exists and shouldn't. Forgot to update version in Info.plist?" % path)
def readfile(path):
fo = open(path)
data = fo.read()
fo.close()
return data
def writefile(path, data):
fo = open(path, "w")
fo.write(data)
fo.close()
def get_file_size(filename):
st = os.stat(filename)
return st[stat.ST_SIZE]
def run_cmd_throw(*args):
cmd = " ".join(args)
print("Running '%s'" % cmd)
cmdproc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
res = cmdproc.communicate()
errcode = cmdproc.returncode
if 0 != errcode:
print "Failed with error code %d" % errcode
print "Stdout:"
print res[0]
print "Stderr:"
print res[1]
raise Exception("'%s' failed with error code %d" % (cmd, errcode))
return (res[0], res[1])
# a really ugly way to extract version from Info.plist
def extract_version_from_plist(plist_path):
plist = readfile(plist_path)
#print(plist)
regex = re.compile("CFBundleVersion</key>(.+?)<key>", re.DOTALL | re.MULTILINE)
m = regex.search(plist)
version_element = m.group(1)
#print("version_element: '%s'" % version_element)
regex2 = re.compile("<string>(.+?)</string>")
m = regex2.search(version_element)
version = m.group(1)
version = version.strip()
#print("version: '%s'" % version)
return version
# build version is either x.y or x.y.z
def ensure_valid_version(version):
m = re.match("\d+\.\d+", version)
if m: return
m = re.match("\d+\.\d+\.\d+", version)
if m: return
print("version ('%s') should be in format: x.y or x.y.z" % version)
sys.exit(1)
def zip_name(version):
return "OpenDNS-Updater-Mac-%s.zip" % version
def zip_path(version):
return os.path.join(RELEASE_BUILD_DIR, zip_name(version))
def build_and_zip(version):
os.chdir(SRC_DIR)
print("Cleaning release target...")
xcodeproj = "OpenDNS Updater.xcodeproj"
run_cmd_throw("xcodebuild", "-project", xcodeproj, "-configuration", "Release", "clean");
print("Building release target...")
(out, err) = run_cmd_throw("xcodebuild", "-project", xcodeproj, "-configuration", "Release", "-target", "OpenDNS Updater")
ensure_dir_exists(RELEASE_BUILD_DIR)
os.chdir(RELEASE_BUILD_DIR)
(out, err) = run_cmd_throw("zip", "-9", "-r", zip_name(version), "OpenDNS Updater.app")
def main():
version = extract_version_from_plist(INFO_PLIST_PATH)
print("Building mac updater version '%s'" % version)
ensure_valid_version(version)
build_and_zip(version)
ensure_file_exists(zip_path(version))
print("Built '%s'" % zip_path(version))
if __name__ == "__main__":
main()
| Python | 0 | |
f04d683d44507a53be39a2db54d545d2f2a1361b | Add example settings module | settings_example.py | settings_example.py | import os
import re
from imap import EmailCheckError, EmailServer
from postgresql import DatabaseServer
CSV_FOLDER = os.getcwd()
# Restrict emails by sender.
EMAIL_FROM = 'sender@example.com'
# Restrict emails by subject.
EMAIL_SUBJECT_RE = re.compile(''.join([
r'(?P<year>\d{4})',
r'(?P<month>\d{2})',
r'(?P<day>\d{2})',
r'(?P<hour>\d{2})',
r'(?P<minute>\d{2})',
r'\.csv',
]))
TABLE_NAME_FORMAT = 'data_{year}{month}'
def get_database_client():
con = 'my_username/my_password@database.example.com:5432/my_database'
return DatabaseServer(con)
def get_email_client():
return EmailServer('mail.example.com', 'my_username', 'my_password')
| Python | 0 | |
29c20a662f347e720c5228f0853eaa6ac0164379 | Create ScreenSocket.py | ScreenSocket.py | ScreenSocket.py | #!/usr/bin/env python
##--Zachary Trette
##-- accepts commands for screen responses
##-- EV3- Remote - https://github.com/flyinactor91/EV3-Remote
## 2013-12-1
from socket import *
import sys, os
import pygame
from pygame.locals import *
def setup():
pygame.init()
w = 640
h = 480
size=(w,h)
screen = pygame.display.set_mode(size,HWSURFACE|DOUBLEBUF|RESIZABLE)
return screen, size
def runCue(SorI, strOrImage):
if SorI == "I":
im = pygame.image.load(strOrImage)
scrn.blit(pygame.transform.scale(im,size),(0,0))
pygame.display.flip()
elif SorI == "T":
basicfont = pygame.font.SysFont(None, 48)
text = basicfont.render(strOrImage, True, (255, 0, 0), (0, 0, 0))
textrect = text.get_rect()
textrect.centerx = scrn.get_rect().centerx
textrect.centery = scrn.get_rect().centery
scrn.fill((0,0,0))
scrn.blit(text, textrect)
pygame.display.flip()
elif SorI == "C":
basicfont = pygame.font.SysFont(None, 48)
text = basicfont.render("", True, (0, 0, 0), (0, 0, 0))
textrect = text.get_rect()
textrect.centerx = scrn.get_rect().centerx
textrect.centery = scrn.get_rect().centery
scrn.fill((0,0,0))
scrn.blit(text, textrect)
pygame.display.flip()
TCP_PORT = 5678
defaultTimeout = 5
if len(sys.argv) == 2:
TCP_IP = sys.argv[1]
BUFFER_SIZE = 1024
screenSocket = socket(AF_INET, SOCK_STREAM)
screenSocket.bind(('' , TCP_PORT))
screenSocket.listen(1)
dne = False
scrn, size = setup()
while not dne:
connectionSocket , addr = screenSocket.accept()
connectionSocket.settimeout(defaultTimeout)
msg = connectionSocket.recv(BUFFER_SIZE)
msg = msg.strip()
if msg == 'QUIT':
print "DONE"
dne = True
else:
t = msg[0]
s = msg[1:].strip()
runCue(t,s)
#connectionSocket.send()
connectionSocket.close()
screenSocket.close()
| Python | 0.000001 | |
1d02b3cf6e7656c4e93159c38a97db30522438ad | Add project selection tests | tests/test_select_project.py | tests/test_select_project.py | from unittest import TestCase
from unittest.mock import MagicMock, patch
from sys import modules
from json import loads
package_jsons_data = ["""
{
"name": "first-project",
"nativescript": {
}
}
""",
"""
{
"name": "second-project",
"nativescript": {
}
}
"""]
class TestProjects(TestCase):
def test_select_project_when_no_project_should_return_none(self):
callback_called = False
projects_space = modules["nativescript-plugin.projects_space"]
def _callback(device):
self.assertIsNone(device)
nonlocal callback_called
callback_called = True
class MockWindow:
def folders(self):
return []
class MockNSCommand:
def get_working_dir():
return ""
def get_window():
return MockWindow()
projects_space.select_project(MockNSCommand, _callback)
self.assertTrue(callback_called)
@patch('nativescript-plugin.project.path.isdir', side_effect=lambda project_path: True)
@patch('nativescript-plugin.project.path.isfile', side_effect=lambda file_path: True)
@patch('nativescript-plugin.project.open', side_effect=lambda file, mode, buffering: package_jsons_data[0])
@patch('nativescript-plugin.project.load', side_effect=lambda file: loads(file))
def test_select_project_when_one_project_should_return_project_dir(self, isdir, isfile, open, load):
callback_called = False
projects_space = modules["nativescript-plugin.projects_space"]
working_dir = "working-dir"
def _callback(project_dir):
self.assertEqual(project_dir, working_dir)
nonlocal callback_called
callback_called = True
class MockWindow:
def folders(self):
return []
class MockNSCommand:
def get_working_dir():
return working_dir
def get_window():
return MockWindow()
projects_space.select_project(MockNSCommand, _callback)
self.assertTrue(callback_called)
@patch('nativescript-plugin.project.path.isdir', side_effect=lambda project_path: True)
@patch('nativescript-plugin.project.path.isfile', side_effect=lambda file_path: True)
@patch('nativescript-plugin.project.open', side_effect=lambda file, mode, buffering: package_jsons_data[0])
@patch('nativescript-plugin.project.load', side_effect=lambda file: loads(file))
def test_select_project_when_multiple_projects_should_prompt_user(self, isdir, isfile, open, load):
projects_space = modules["nativescript-plugin.projects_space"]
working_dir = "working-dir"
sub_dir = "sub-dir"
class MockWindow:
def folders(self):
return [sub_dir]
def show_quick_panel(ns_command, actual_projects, panel_callback):
assert [sub_dir, working_dir] == actual_projects
class MockNSCommand:
def get_working_dir():
return working_dir
def get_window():
return MockWindow()
projects_space.select_project(MockNSCommand, None)
@patch('nativescript-plugin.project.path.isdir', side_effect=lambda project_path: True)
@patch('nativescript-plugin.project.path.isfile', side_effect=lambda file_path: True)
@patch('nativescript-plugin.project.open', side_effect=lambda file, mode, buffering: package_jsons_data[0])
@patch('nativescript-plugin.project.load', side_effect=lambda file: loads(file))
def test_select_project_when_multiple_projects_when_user_cancels_should_return_none(self, isdir, isfile, open, load):
callback_called = False
projects_space = modules["nativescript-plugin.projects_space"]
working_dir = "working-dir"
sub_dir = "sub-dir"
def _callback(project_dir):
self.assertIsNone(project_dir)
nonlocal callback_called
callback_called = True
class MockWindow:
def folders(self):
return [sub_dir]
def show_quick_panel(ns_command, actual_projects, panel_callback):
assert [sub_dir, working_dir] == actual_projects
panel_callback(-1)
class MockNSCommand:
def get_working_dir():
return working_dir
def get_window():
return MockWindow()
projects_space.select_project(MockNSCommand, _callback)
self.assertTrue(callback_called)
@patch('nativescript-plugin.project.path.isdir', side_effect=lambda project_path: True)
@patch('nativescript-plugin.project.path.isfile', side_effect=lambda file_path: True)
@patch('nativescript-plugin.project.open', side_effect=lambda file, mode, buffering: package_jsons_data[0])
@patch('nativescript-plugin.project.load', side_effect=lambda file: loads(file))
def test_select_project_when_multiple_projects_when_user_selects_should_return_project_dir(self, isdir, isfile, open, load):
callback_called = False
projects_space = modules["nativescript-plugin.projects_space"]
working_dir = "working-dir"
sub_dir = "sub-dir"
expected_projects = [sub_dir, working_dir]
index = 1
def _callback(project_dir):
self.assertEqual(project_dir, expected_projects[index])
nonlocal callback_called
callback_called = True
class MockWindow:
def folders(self):
return [sub_dir]
def show_quick_panel(ns_command, actual_projects, panel_callback):
assert expected_projects == actual_projects
panel_callback(index)
class MockNSCommand:
def get_working_dir():
return working_dir
def get_window():
return MockWindow()
projects_space.select_project(MockNSCommand, _callback)
self.assertTrue(callback_called)
if __name__ == '__main__':
unittest.main()
| Python | 0 | |
d79ed2b4aa8315579688f4c6e9bfc8980e9717e3 | Create chghost.py | merc/features/ircv32/chghost.py | merc/features/ircv32/chghost.py | from merc import capability
from merc import feature
from merc import message
class ChgHostFeature(feature.Feature):
NAME = __name__
install = ChgHostFeature.install
@ChgHostFeature.register_user_capability
class ChgHostCapability(capability.Capability):
NAME = "chghost"
class _ChgHost(message.Command):
def handle_for(self, app, user, prefix):
user.check_is_irc_operator()
target = self.get_target(app, user)
old_hostmask = target.hostmask
target.username = self.username
target.host = self.host
app.network.user_broadcast(target, old_hostmask,
ChgHost(self.username, self.host))
@ChgHostFeature.register_user_command
class ChgHost(_ChgHost):
NAME = "CHGHOST"
MIN_ARITY = 2
def __init__(self, username, host, *args):
self.username = username
self.host = host
def as_command_params(self):
return [self.username, self.host]
def can_send_to(self, user):
return ChgHostCapability(user).get()
def get_target(self, app, user):
return user
@ChgHostFeature.register_user_command
class SAChgHost(_ChgHost):
NAME = "SACHGHOST"
MIN_ARITY = 3
def __init__(self, target, username, host, *args):
self.target = target
self.username = username
self.host = host
def get_target(self, app, user):
return app.users.get(self.target)
| Python | 0.000003 | |
f11f5cf946c61f45d5059ecdd828018cf0bb7a55 | Add pygments based lexical output processing | cloudmonkey/lexer.py | cloudmonkey/lexer.py | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
try:
from pygments import highlight
from pygments.console import ansiformat
from pygments.formatter import Formatter
from pygments.formatters import Terminal256Formatter
from pygments.lexer import bygroups, include, RegexLexer
from pygments.token import *
import sys
except ImportError, e:
print e
MONKEY_COLORS = {
Token: '',
Whitespace: 'reset',
Text: 'reset',
Name: 'green',
Operator: 'teal',
Operator.Word: 'lightgray',
String: 'purple',
Keyword: '_red_',
Error: 'red',
Literal: 'yellow',
Number: 'blue',
}
def get_colorscheme():
return MONKEY_COLORS
class MonkeyLexer(RegexLexer):
keywords = ['[a-z]*id', '[a-zA-Z]*:']
attributes = ['[Tt]rue', '[Ff]alse']
params = ['[a-z]*[Nn]ame', 'type', '[Ss]tate']
uuid_rgx = r'[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}'
date_rgx = r'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9:]{8}-[0-9]{4}'
def makelistre(lis):
return r'(' + r'|'.join(lis) + r')'
tokens = {
'root': [
(r' ', Whitespace),
(date_rgx, Number),
(uuid_rgx, Literal),
(r'(?:\b\d+\b(?:-\b\d+|%)?)', Number),
(r'^[-=]*\n', Operator.Word),
(r'Error', Error),
(makelistre(keywords), Keyword),
(makelistre(attributes), Literal),
(makelistre(params) + r'( = )(.*)', bygroups(Name, Operator,
String)),
(makelistre(params), Name),
(r'(^[a-zA-Z]* )(=)', bygroups(Name, Operator)),
(r'\S+', Text),
]
}
def analyse_text(text):
npos = text.find('\n')
if npos < 3:
return False
return text[0] == '[' and text[npos - 1] == ']'
class MonkeyFormatter(Formatter):
def __init__(self, **options):
Formatter.__init__(self, **options)
self.colorscheme = get_colorscheme()
def format(self, tokensource, outfile):
self.encoding = outfile.encoding
return Formatter.format(self, tokensource, outfile)
def format_unencoded(self, tokensource, outfile):
for ttype, value in tokensource:
color = self.colorscheme.get(ttype)
while color is None:
ttype = ttype[:-1]
color = self.colorscheme.get(ttype)
if color:
spl = value.split('\n')
for line in spl[:-1]:
if line:
outfile.write(ansiformat(color, line))
outfile.write('\n')
if spl[-1]:
outfile.write(ansiformat(color, spl[-1]))
else:
outfile.write(value)
def monkeyprint(text):
fmter = MonkeyFormatter()
lexer = MonkeyLexer()
lexer.encoding = 'utf-8'
fmter.encoding = 'utf-8'
highlight(text, lexer, fmter, sys.stdout)
| Python | 0.000001 | |
e5055344001b8fa313ff7bcde3284d90bb6c2d62 | add preprocess program | compress.py | compress.py | #!/usr/bin/env python3
###############################################################################
# Copyright (c) 2015 Jamis Hoo
# Distributed under the MIT license
# (See accompanying file LICENSE or copy at http://opensource.org/licenses/MIT)
#
# Project:
# Filename: compress.py
# Version: 1.0
# Author: Jamis Hoo
# E-mail: hjm211324@gmail.com
# Date: Jul 26, 2015
# Time: 13:48:16
# Description:
###############################################################################
import os
import tarfile
TAR_DIR = "TAR/"
WORDS_DICTIONARY = "words"
INVERTED_INDEX = "index"
COMPRESS_DIR = "COMPRESS/"
BLOCK_SIZE = int(1024 * 1024 * 1024 * 10) # 10 GiB
# tolerate 1 GiB empty space at the end of each block
MIN_BLOCK_SIZE = int(BLOCK_SIZE * 0.9) # 9 GiB
# check files and dirs
if not os.path.isfile(INVERTED_INDEX):
print(INVERTED_INDEX, "does not exist. ")
exit(1)
if not os.path.isfile(WORDS_DICTIONARY):
print(WORDS_DICTIONARY, "does not exist. ")
exit(1)
if not os.path.isdir(TAR_DIR):
print(TAR_DIR, "does not exist or isn't a directory. ")
exit(1)
if os.path.exists(COMPRESS_DIR) and not os.path.isdir(COMPRESS_DIR):
print(COMPRESS_DIR, "exists and is not directory. ")
exit(1)
if not os.path.exists(COMPRESS_DIR):
os.mkdir(COMPRESS_DIR)
# load words dictionary
# words dictionary: tar_filename -> keywords splited with comma
words_dictionary = dict()
words_dict_file = open(WORDS_DICTIONARY)
for l in words_dict_file:
index = l[: l.find('\t')]
keywords = ",".join([ x.strip() for x in l[l.find('\t') + 1: -1].split(", ") ])
words_dictionary[index] = keywords
words_dict_file.close()
# find the next compress block
compress_block_counter = 0
block_filename = format(compress_block_counter, "08x")
existing_compress_blocks = sorted(os.listdir(COMPRESS_DIR))
if len(existing_compress_blocks):
last_block_filename = existing_compress_blocks[-1]
last_block_size = os.path.getsize(COMPRESS_DIR + "/" + last_block_filename)
compress_block_counter = int(last_block_filename, 16)
if last_block_size > MIN_BLOCK_SIZE:
compress_block_counter += 1
# we use 8 digit hex number as filename, in the range of uint32
block_filename = format(compress_block_counter, "08x")
block_handler = open(COMPRESS_DIR + "/" + block_filename, "ab")
print("Append at", COMPRESS_DIR + block_filename, hex(block_handler.tell()))
# append content to block handler
# return (block index, offset, size)
def append_to_block(content):
global block_handler
global compress_block_counter
global block_filename
if block_handler.tell() + len(content) > BLOCK_SIZE:
block_handler.close()
compress_block_counter += 1
block_filename = format(compress_block_counter, "08x")
block_handler = open(COMPRESS_DIR + "/" + block_filename, "ab")
offset = block_handler.tell()
block_index = compress_block_counter
block_handler.write(content)
assert block_handler.tell() - offset == len(content)
return (block_index, offset, len(content))
inverted_index = dict()
# traverse each tar archive
for tar in os.listdir(TAR_DIR):
if tar[: -4] not in words_dictionary:
print("WARN: TAR", tar[: -4], "not in words dictionary. ")
continue
keywords = words_dictionary[tar[: -4]]
print(tar, ":", keywords)
tar_handler = tarfile.open(TAR_DIR + "/" + tar)
for tar_mem in tar_handler.getmembers():
content = tar_handler.extractfile(tar_mem).read()
file_info = append_to_block(content)
if keywords not in inverted_index:
inverted_index[keywords] = [file_info]
else:
inverted_index[keywords].append(file_info)
# append inverted index
index_handler = open(INVERTED_INDEX, "a")
for keywords, positions in inverted_index.items():
output_str = keywords + "\t"
for pos in positions:
for i in pos:
output_str += format(i, "x") + ","
output_str = output_str[: -1] + "\n"
index_handler.write(output_str)
| Python | 0.000001 | |
850f50dd2ec69db25d6ed19db49e35e0a2d8248b | Add basic script to do RJ-NEQ with complex | examples/neq-switching/run_equilibrium_setup.py | examples/neq-switching/run_equilibrium_setup.py | import numpy as np
import os
import tqdm
from openeye import oechem, oeiupac
from openmmtools import integrators, states, mcmc, constants
from openmoltools import forcefield_generators
from perses.rjmc.topology_proposal import TopologyProposal, SystemGenerator
from perses.rjmc.geometry import FFAllAngleGeometryEngine
from perses.annihilation.ncmc_switching import NCMCEngine
from perses.tests.utils import extractPositionsFromOEMOL
from simtk import openmm, unit
from io import StringIO
from simtk.openmm import app
import copy
from perses.dispersed.feptasks import compute_reduced_potential
import mdtraj as md
temperature = 300.0*unit.kelvin
beta = 1.0 / (temperature*constants.kB)
def generate_complex_topologies_and_positions(ligand_filename, protein_pdb_filename):
ifs = oechem.oemolistream()
ifs.open(ligand_filename)
# get the list of molecules
mol_list = [oechem.OEMol(mol) for mol in ifs.GetOEMols()]
mol_dict = {oechem.OEMolToSmiles(mol) : mol for mol in mol_list}
ligand_topology_dict = {smiles : forcefield_generators.generateTopologyFromOEMol(mol) for smiles, mol in mol_dict}
protein_pdbfile = open(protein_pdb_filename, 'r')
pdb_file = app.PDBFile(protein_pdbfile)
protein_pdbfile.close()
receptor_positions = pdb_file.positions
receptor_topology = pdb_file.topology
receptor_md_topology = md.Topology.from_openmm(receptor_topology)
n_receptor_atoms = receptor_md_topology.n_atoms
complex_topologies = {}
complex_positions = {}
for smiles, ligand_topology in ligand_topology_dict.items():
ligand_md_topology = md.Topology.from_openmm(ligand_topology)
n_complex_atoms = ligand_md_topology.n_atoms + n_receptor_atoms
copy_receptor_md_topology = copy.deepcopy(receptor_md_topology)
complex_positions = unit.Quantity(np.array([n_complex_atoms, 3]), unit=unit.nanometers)
complex_topology = copy_receptor_md_topology.join(ligand_md_topology)
complex_topologies[smiles] = complex_topology
ligand_positions = extractPositionsFromOEMOL(mol_dict[smiles])
complex_positions[:n_receptor_atoms, :] = receptor_positions
complex_positions[n_receptor_atoms:, :] = ligand_positions
complex_positions[smiles] = complex_positions
return complex_topologies, complex_positions
def solvate_system(topology, positions, system_generator, padding=9.0 * unit.angstrom, num_added=None, water_model='tip3p'):
modeller = app.Modeller(topology, positions)
modeller.addSolvent(system_generator._forcefield, model=water_model, padding=padding, numAdded=num_added)
solvated_topology = modeller.topology
solvated_positions = modeller.positions
solvated_system = system_generator.build_system(solvated_topology)
return solvated_positions, solvated_topology, solvated_system
def create_solvated_complex_systems(protein_pdb_filename, ligand_filename, output_directory, project_prefix):
barostat = openmm.MonteCarloBarostat(1.0*unit.atmosphere, temperature, 50)
system_generator = SystemGenerator(['ff99sbildn.xml', 'gaff.xml', 'tip3p.xml'], barostat=barostat, forcefield_kwargs={'nonbondedMethod': app.PME,
'constraints': app.HBonds,
'hydrogenMass': 4 * unit.amus})
complex_topologies, complex_positions = generate_complex_topologies_and_positions(ligand_filename, protein_pdb_filename)
list_of_smiles = list(complex_topologies.keys())
initial_smiles = list_of_smiles[0]
initial_topology = complex_topologies[initial_smiles]
initial_positions = complex_positions[initial_smiles]
solvated_initial_positions, solvated_topology, solvated_system = solvate_system(initial_topology, initial_positions, system_generator)
md_topology = md.Topology.from_openmm(solvated_topology)
num_added = md_topology.n_residues - initial_topology.n_residues
if not os.path.exists(output_directory):
os.mkdir(output_directory)
np.save("{}_{}_initial.npy".format(project_prefix, 0), (solvated_initial_positions, md_topology, solvated_system))
for i in tqdm.trange(1, len(list_of_smiles)):
smiles = list_of_smiles[i]
topology = complex_topologies[smiles]
positions = complex_positions[smiles]
solvated_positions, solvated_topology, solvated_system = solvate_system(topology, positions, system_generator, padding=None, num_added=num_added)
np.save("{}_{}_initial.npy".format(project_prefix, i),
(solvated_positions, md.Topology.from_openmm(solvated_topology), solvated_system))
if __name__=="__main__":
import sys
import yaml
yaml_filename = sys.argv[1]
with open(yaml_filename, "r") as yamlfile:
options = yaml.load(yaml_filename)
setup_options = options['setup']
ligand_filename = setup_options['ligand_filename']
protein_pdb_filename = setup_options['protein_pdb_filename']
project_prefix = setup_options['project_prefix']
output_directory = setup_options['output_directory']
create_solvated_complex_systems(protein_pdb_filename, ligand_filename, output_directory, project_prefix) | Python | 0 | |
d4a7cdd400fe29458cc584455c7b082efed99e2b | Add files via upload | timedscripts/rollupwinlog.py | timedscripts/rollupwinlog.py | import requests
import json
import pandas as pd
import sqlite3
from datetime import date
from datetime import datetime
from dateutil import parser
import time
SERVER = '10.24.25.130:8000'
conn = sqlite3.connect('usersrollup.db')
c = conn.cursor()
def makedb():
c.execute('''Create Table users (username text,compname text,stat text,time text)''')
conn.commit()
now = str(datetime.now())
r = requests.get('http://'+SERVER+'/get_dup')
newtxt = json.loads(r.text)
if (newtxt=={}):
print("Returned nothing.");
else:
#print(newtxt,now)
for x in newtxt:
time.sleep(5)
r = requests.get('http://'+SERVER+'/get_log?username='+x+'&compname=all')
thisreturn = json.loads(r.text)
#print(x,thisreturn)
for key,value in thisreturn.items():
data2 = (value['username'],value['compname'],value['stat'],now)
try:
c.execute("INSERT INTO users VALUES "+str(data2))
except sqlite3.OperationalError:
makedb()
c.execute("INSERT INTO users VALUES "+str(data2))
conn.commit()
#need to request to clear the log
#r = requests.get('http://'+SERVER+'/db?action=clearlog')
| Python | 0 | |
98fbfe6e65c4cb32ea0f4f6ce6cba77f7fadcb7b | Add test for vendor object creation | app/api/tests/test_vendor_api.py | app/api/tests/test_vendor_api.py | from django.test import Client, TestCase
from .utils import obtain_api_key, create_admin_account
class VendorApiTest(TestCase):
"""Test for Vendor API."""
def setUp(self):
self.client = Client()
self.endpoint = '/api'
self.admin_test_credentials = ('admin', 'admin@taverna.com', 'qwerty123')
create_admin_account(*self.admin_test_credentials)
self.header = {
'HTTP_X_TAVERNATOKEN': obtain_api_key(
self.client, *self.admin_test_credentials
)
}
self.vendors = (
('vendor1', 'info1'),
('vendor2', 'info2')
)
def make_request(self, query, method='GET'):
if method == 'GET':
return self.client.get(self.endpoint,
data={'query': query},
**self.header
).json()
if method == 'POST':
return self.client.post(self.endpoint,
data={'query': query},
**self.header
).json()
def create_vendor(self, name, info):
query = '''
mutation{
createVendor(input: {name: "%s", info: "%s"}){
vendor{
id,
originalId,
name,
info
}
}
}
''' % (name, info)
return self.make_request(query, 'POST')
def retrieve_vendor(self, vendor_id):
query = 'query {vendor(id: "%s") {name}}' % (vendor_id)
return self.make_request(query)
def create_multiple_vendors(self):
return [self.create_vendor(name, info) for name, info in self.vendors]
def test_creation_of_vendor_object(self):
# For new vendor record
response = self.create_vendor('vendor4', 'info4')
created_vendor = response['vendor']
expected = {
'vendor': {
'id': created_vendor['id'],
'originalId': created_vendor['originalId'],
'name': 'vendor4',
'info': 'info4'
}
}
self.assertEqual(expected, response)
| Python | 0 | |
2cd2d7a20f2d19221b40aac9bfa1303dbfd97459 | create metashare.wsgi | metashare/apache/metashare.wsgi | metashare/apache/metashare.wsgi | import os
import sys
path = '/var/www/CEF-ELRC'
if path not in sys.path:
sys.path.insert(0, path)
sys.path.insert(0, '{0}/metashare'.format(path))
sys.path.append('{0}/lib/python2.7/site-packages'.format(path))
os.environ['DJANGO_SETTINGS_MODULE'] = 'metashare.settings'
import django.core.handlers.wsgi
application = django.core.handlers.wsgi.WSGIHandler() | Python | 0 | |
123959bc3594299c2f1d4c54b11a996e92147347 | Add missing migration | system_maintenance/migrations/0002_auto_20181214_2122.py | system_maintenance/migrations/0002_auto_20181214_2122.py | # Generated by Django 2.1.4 on 2018-12-14 21:22
from django.db import migrations
import markupfield_helpers.helpers
class Migration(migrations.Migration):
dependencies = [
('system_maintenance', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='documentationrecord',
name='documentation',
field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Document how to perform a task.', null=True, rendered_field=True),
),
migrations.AlterField(
model_name='maintenancerecord',
name='description',
field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Enter a description of the system maintenance performed.', null=True, rendered_field=True),
),
migrations.AlterField(
model_name='maintenancerecord',
name='problems',
field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Describe problems that arose during system maintenance.', null=True, rendered_field=True),
),
migrations.AlterField(
model_name='maintenancerecord',
name='procedure',
field=markupfield_helpers.helpers.MarkupField(blank=True, help_text='Enter details of how the system maintenance was performed.', null=True, rendered_field=True),
),
]
| Python | 0.0002 | |
71b97c202373ee127d57bcdb53ef3f6f4e8d7d57 | Revert "Revert "Added a wrapper script for buildOnServer that merges certain files"" | busytown/doAllTheBuild.py | busytown/doAllTheBuild.py | #!/usr/bin/python
import os,sys,json
# SAMPLE USE CASE: python doAllTheBuild.py --no-daemon -PuseMaxDepVersions
#
# Runs both gradle builds, then merges the output that needs merging
# Arguments to this script are passed to both gradle builds without modification
# Exceptions to this policy:
# if DIST_DIR=path/to/dir is not passed, it is assumed to be out/dist(/ui)
# build variables OUT_DIR=out(/ui), ANDROID_HOME=prebuilts/fullsdk-linux
# are set before each gradle build
# -p frameworks/support(/ui) is passed by default
# arguments with '=' in them (e.g. SNAPSHOT=true) are prefixed to the gradle runs
# If you want to run only one gradle build, you do not want to merge.
# So do not run this; instead run the gradle build directly
os.chdir(os.path.dirname(os.path.abspath(__file__)))
os.chdir("../../../")
projectDirArg = "-p frameworks/support"
ui = "/ui"
tasks = "buildOnServer"
gradlew = "frameworks/support/gradlew"
gradlewC = "frameworks/support/ui/gradlew"
outDirArg = "OUT_DIR=out"
androidHomeArg = "ANDROID_HOME=prebuilts/fullsdk-linux"
androidxGradleCommand = " ".join([outDirArg, androidHomeArg, gradlew, tasks, projectDirArg])
composeGradleCommand = " ".join([outDirArg + ui, androidHomeArg, gradlewC, tasks, projectDirArg + ui])
distargs = [arg for arg in sys.argv if "DIST_DIR=" in arg]
distDir = "out/dist" if len(distargs) == 0 else distargs[0][8:]
distarg = "" if len(distargs) == 0 else " " + distargs[0]
distargC = "" if len(distargs) == 0 else " " + distargs[0] + ui
preargs = " ".join([arg for arg in sys.argv if '=' in arg and arg not in distargs]) # args of the form VAR=thing
postargs = " ".join([arg for arg in sys.argv if ".py" not in arg and arg not in distargs and arg not in preargs])
# remove "doAllTheBuild.py"
def runGradleBuilds():
os.system(" ".join([preargs + distarg, androidxGradleCommand, postargs]))
os.system(" ".join([preargs + distargC, composeGradleCommand, postargs]))
def mergeAggregateBuildInfoFiles() :
N_COMMON_ARTIFACTS = 2 #the number of artifacts in both androidx and compose
#benchmark-common and benchmark-junit4
androidxBuildInfo = json.load(open("androidx_aggregate_build_info.txt"))["artifacts"]
nitemsA = len(androidxBuildInfo)
composeBuildInfo = json.load(open("ui/androidx_aggregate_build_info.txt"))["artifacts"]
nitemsC = len(composeBuildInfo)
resultJson = {"artifacts":androidxBuildInfo + composeBuildInfo}
#assert len(androidxBuildInfo) == nitemsA + nitemsC - N_COMMON_ARTIFACTS
#TODO: make this actually work, and properly
with open("all_aggregate_build_info.txt", 'w') as outfile:
json.dump(resultJson, outfile, sort_keys=True, indent=4, separators=(',', ': '))
def mergeBuildInfoFolders():
os.system("cp -au ui/build-info/. build-info/")
# -a = all in directory; -u = overwrite iff newer
runGradleBuilds()
def doThingsInDistDir():
os.chdir(distDir)
mergeAggregateBuildInfoFiles()
mergeBuildInfoFolders()
doThingsInDistDir() | Python | 0.000002 | |
b080ae154cc8e948e3f4e7b79bfbde0221a31e61 | Add devices detected by ping as SOURCE_TYPE_ROUTER instead of GPS (#5625) | homeassistant/components/device_tracker/ping.py | homeassistant/components/device_tracker/ping.py | """
Tracks devices by sending a ICMP ping.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.ping/
device_tracker:
- platform: ping
count: 2
hosts:
host_one: pc.local
host_two: 192.168.2.25
"""
import logging
import subprocess
import sys
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.device_tracker import (
PLATFORM_SCHEMA, DEFAULT_SCAN_INTERVAL, SOURCE_TYPE_ROUTER)
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant import util
from homeassistant import const
import homeassistant.helpers.config_validation as cv
DEPENDENCIES = []
_LOGGER = logging.getLogger(__name__)
CONF_PING_COUNT = 'count'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(const.CONF_HOSTS): {cv.string: cv.string},
vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int,
})
class Host:
"""Host object with ping detection."""
def __init__(self, ip_address, dev_id, hass, config):
"""Initialize the Host pinger."""
self.hass = hass
self.ip_address = ip_address
self.dev_id = dev_id
self._count = config[CONF_PING_COUNT]
if sys.platform == "win32":
self._ping_cmd = ['ping', '-n 1', '-w 1000', self.ip_address]
else:
self._ping_cmd = ['ping', '-n', '-q', '-c1', '-W1',
self.ip_address]
def ping(self):
"""Send ICMP ping and return True if success."""
pinger = subprocess.Popen(self._ping_cmd, stdout=subprocess.PIPE)
try:
pinger.communicate()
return pinger.returncode == 0
except subprocess.CalledProcessError:
return False
def update(self, see):
"""Update device state by sending one or more ping messages."""
failed = 0
while failed < self._count: # check more times if host in unreachable
if self.ping():
see(dev_id=self.dev_id, source_type=SOURCE_TYPE_ROUTER)
return True
failed += 1
_LOGGER.debug("ping KO on ip=%s failed=%d", self.ip_address, failed)
def setup_scanner(hass, config, see):
"""Setup the Host objects and return the update function."""
hosts = [Host(ip, dev_id, hass, config) for (dev_id, ip) in
config[const.CONF_HOSTS].items()]
interval = timedelta(seconds=len(hosts) * config[CONF_PING_COUNT]) + \
DEFAULT_SCAN_INTERVAL
_LOGGER.info("Started ping tracker with interval=%s on hosts: %s",
interval, ",".join([host.ip_address for host in hosts]))
def update(now):
"""Update all the hosts on every interval time."""
for host in hosts:
host.update(see)
track_point_in_utc_time(hass, update, now + interval)
return True
return update(util.dt.utcnow())
| """
Tracks devices by sending a ICMP ping.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/device_tracker.ping/
device_tracker:
- platform: ping
count: 2
hosts:
host_one: pc.local
host_two: 192.168.2.25
"""
import logging
import subprocess
import sys
from datetime import timedelta
import voluptuous as vol
from homeassistant.components.device_tracker import (
PLATFORM_SCHEMA, DEFAULT_SCAN_INTERVAL)
from homeassistant.helpers.event import track_point_in_utc_time
from homeassistant import util
from homeassistant import const
import homeassistant.helpers.config_validation as cv
DEPENDENCIES = []
_LOGGER = logging.getLogger(__name__)
CONF_PING_COUNT = 'count'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(const.CONF_HOSTS): {cv.string: cv.string},
vol.Optional(CONF_PING_COUNT, default=1): cv.positive_int,
})
class Host:
"""Host object with ping detection."""
def __init__(self, ip_address, dev_id, hass, config):
"""Initialize the Host pinger."""
self.hass = hass
self.ip_address = ip_address
self.dev_id = dev_id
self._count = config[CONF_PING_COUNT]
if sys.platform == "win32":
self._ping_cmd = ['ping', '-n 1', '-w 1000', self.ip_address]
else:
self._ping_cmd = ['ping', '-n', '-q', '-c1', '-W1',
self.ip_address]
def ping(self):
"""Send ICMP ping and return True if success."""
pinger = subprocess.Popen(self._ping_cmd, stdout=subprocess.PIPE)
try:
pinger.communicate()
return pinger.returncode == 0
except subprocess.CalledProcessError:
return False
def update(self, see):
"""Update device state by sending one or more ping messages."""
failed = 0
while failed < self._count: # check more times if host in unreachable
if self.ping():
see(dev_id=self.dev_id)
return True
failed += 1
_LOGGER.debug("ping KO on ip=%s failed=%d", self.ip_address, failed)
def setup_scanner(hass, config, see):
"""Setup the Host objects and return the update function."""
hosts = [Host(ip, dev_id, hass, config) for (dev_id, ip) in
config[const.CONF_HOSTS].items()]
interval = timedelta(seconds=len(hosts) * config[CONF_PING_COUNT]) + \
DEFAULT_SCAN_INTERVAL
_LOGGER.info("Started ping tracker with interval=%s on hosts: %s",
interval, ",".join([host.ip_address for host in hosts]))
def update(now):
"""Update all the hosts on every interval time."""
for host in hosts:
host.update(see)
track_point_in_utc_time(hass, update, now + interval)
return True
return update(util.dt.utcnow())
| Python | 0 |
cebaba60bf12b00ce267fb84cb3029eb318041c2 | Flask hello world | synthia.py | synthia.py | from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello, world.'
| Python | 0.999432 | |
7c33e8c7a386e911d835f81e637515d40dfc4e62 | Add a Laplace equation solving benchmark (from Numpy) | benchmarks/bench_laplace.py | benchmarks/bench_laplace.py | """
Benchmark Laplace equation solving.
From the Numpy benchmark suite, original code at
https://github.com/yarikoptic/numpy-vbench/commit/a192bfd43043d413cc5d27526a9b28ad343b2499
"""
import numpy as np
from numba import jit
dx = 0.1
dy = 0.1
dx2 = (dx * dx)
dy2 = (dy * dy)
@jit(nopython=True)
def laplace(N, Niter):
u = np.zeros((N, N))
u[0] = 1
for i in range(Niter):
u[1:(-1), 1:(-1)] = ((((u[2:, 1:(-1)] + u[:(-2), 1:(-1)]) * dy2) +
((u[1:(-1), 2:] + u[1:(-1), :(-2)]) * dx2))
/ (2 * (dx2 + dy2)))
return u
class Laplace:
N = 150
Niter = 200
def setup(self):
# Warm up
self.run_laplace(10, 10)
def run_laplace(self, N, Niter):
u = laplace(N, Niter)
def time_laplace(self):
self.run_laplace(self.N, self.Niter)
| Python | 0.000031 | |
9a3c5517446a2f26875925b1c42607ea6aa31b29 | Implementing the first step of the Web App Flow. | getAuthenticationCode.py | getAuthenticationCode.py | #!/usr/bin/env python
#---------------------------------------------------------------------------
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#---------------------------------------------------------------------------
#
# This script implements the first step of the Web Application Flow:
#
# http://developer.github.com/v3/oauth/#web-application-flow
#
#---------------------------------------------------------------------------
import requests
import json
jsonFile=open('LocalConfiguration/accountCredentials.json')
accountCredentials=json.load(jsonFile)
jsonFile.close()
clientID=accountCredentials['Client ID']
redirectURI=accountCredentials['Redirect URI']
scopes=accountCredentials['Scopes']
state=accountCredentials['State']
clientIdString='client_id='+clientID
payload={'client_id':clientID,'redirect_uri':redirectURI,'scope':scopes,'state':state}
url='https://github.com/login/oauth/authorize?'+clientIdString
headers={'Content-Type':'application/json','Accept':'application/json'}
r = requests.get(url,data=json.dumps(payload),headers=headers)
print r
| Python | 0.998887 | |
8956ee3bd89b12da20ebb1946d41c4133467ae79 | Add py-pure-eval (#19180) | var/spack/repos/builtin/packages/py-pure-eval/package.py | var/spack/repos/builtin/packages/py-pure-eval/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyPureEval(PythonPackage):
"""This is a Python package that lets you safely evaluate certain AST nodes
without triggering arbitrary code that may have unwanted side effects."""
homepage = "https://github.com/alexmojaki/pure_eval"
url = "https://github.com/alexmojaki/pure_eval/archive/master.zip"
git = "https://github.com/alexmojaki/pure_eval.git"
version('master', branch='master')
depends_on('python@3.5:3.9', type=('build', 'run'))
depends_on('py-setuptools@44:', type='build')
| Python | 0.000002 | |
6d1eda812d57c6c251fb037b005103172de886af | Update __init__.py | erpnext/__init__.py | erpnext/__init__.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import frappe
__version__ = '7.0.63'
def get_default_company(user=None):
'''Get default company for user'''
from frappe.defaults import get_user_default_as_list
if not user:
user = frappe.session.user
companies = get_user_default_as_list(user, 'company')
if companies:
default_company = companies[0]
else:
default_company = frappe.db.get_single_value('Global Defaults', 'default_company')
return default_company
def get_default_currency():
'''Returns the currency of the default company'''
company = get_default_company()
if company:
return frappe.db.get_value('Company', company, 'default_currency')
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import frappe
__version__ = '7.1.0-beta'
def get_default_company(user=None):
'''Get default company for user'''
from frappe.defaults import get_user_default_as_list
if not user:
user = frappe.session.user
companies = get_user_default_as_list(user, 'company')
if companies:
default_company = companies[0]
else:
default_company = frappe.db.get_single_value('Global Defaults', 'default_company')
return default_company
def get_default_currency():
'''Returns the currency of the default company'''
company = get_default_company()
if company:
return frappe.db.get_value('Company', company, 'default_currency')
| Python | 0.000072 |
f0bd64992b05b0e7edd4b1ac6e99e1cd9db213d6 | Create search.py | search.py | search.py | Python | 0.000001 | ||
c74a9943bbd9e7908ba884e0fea5b3390e8d668e | add migration | wastd/observations/migrations/0004_auto_20160905_1943.py | wastd/observations/migrations/0004_auto_20160905_1943.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.9 on 2016-09-05 11:43
from __future__ import unicode_literals
from django.db import migrations, models
import wastd.observations.models
class Migration(migrations.Migration):
dependencies = [
('observations', '0003_auto_20160902_1206'),
]
operations = [
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='algal_growth',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Algal growth on carapace'),
),
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='barnacles',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Barnacles'),
),
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='damage_injury',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Obvious damage or injuries'),
),
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='missing_limbs',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Missing limbs'),
),
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='propeller_damage',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Propeller strike damage'),
),
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='scanned_for_pit_tags',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Scanned for PIT tags'),
),
migrations.AlterField(
model_name='distinguishingfeatureobservation',
name='tagging_scars',
field=models.CharField(choices=[('na', 'Not observed'), ('absent', 'Confirmed absent'), ('present', 'Confirmed present')], default='na', help_text='', max_length=300, verbose_name='Tagging scars'),
),
migrations.AlterField(
model_name='encounter',
name='location_accuracy',
field=models.CharField(choices=[('10', 'GPS reading at exact location (10 m)'), ('1000', 'Site centroid or place name (1 km)'), ('10000', 'Rough estimate (10 km)')], default='1000', help_text='The accuracy of the supplied location.', max_length=300, verbose_name='Location accuracy (m)'),
),
migrations.AlterField(
model_name='mediaattachment',
name='attachment',
field=models.FileField(help_text='Upload the file', max_length=500, upload_to=wastd.observations.models.encounter_media, verbose_name='File attachment'),
),
]
| Python | 0.000001 | |
9115628cf10e194f1975e01142d8ae08ab5c4b06 | Add test for pandas dataframe loading | joommf/test_odtreader.py | joommf/test_odtreader.py | def test_odtreader_dynamics_example():
from joommf.sim import Sim
from joommf.mesh import Mesh
from joommf.energies.exchange import Exchange
from joommf.energies.demag import Demag
from joommf.energies.zeeman import FixedZeeman
from joommf.drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
t_sim = 0.5e-9 # simulation time (s)
gamma = 2.21e5
alpha = 0.1
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='dynamics_example', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(FixedZeeman(H))
sim.set_evolver(
evolver.LLG(t_sim, m_init, Ms, alpha, gamma, name='evolver'))
# Set initial magnetisation.
sim.set_m(m_init)
# Run simulation.
sim.run()
assert sim.df.time.values[-1] == 0.5e-09
| Python | 0.000001 | |
64d8f45e1868fd73415e8f1fe6acc21868d45fa7 | Add rfreceiver mode selector | catkin/src/appctl/scripts/rfreceiver_mode_select.py | catkin/src/appctl/scripts/rfreceiver_mode_select.py | #!/usr/bin/env python
"""
This node listens for keyfob button presses and changes the mode accordingly.
"""
import rospy
from appctl.msg import Mode
from std_msgs.msg import Byte
class ButtonHandler:
def __init__(self, modes, mode_pub):
self.modes = modes
self.mode_pub = mode_pub
def handle_msg(self, msg):
if msg.data in self.modes:
self.mode_pub.publish(mode=self.modes[msg.data])
def main():
rospy.init_node('rfreceiver_mode_select')
modes = {
1: 'tactile',
2: 'attended'
}
mode_pub = rospy.Publisher(
'/appctl/mode',
Mode,
queue_size = 1
)
button_handler = ButtonHandler(modes, mode_pub)
mode_sub = rospy.Subscriber(
'/rfreceiver/buttondown',
Byte,
button_handler.handle_msg
)
rospy.spin()
if __name__=='__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| Python | 0 | |
9e58f5507ba0a128c696bdec218d244df27feb87 | add list_graspability script | jsk_arc2017_common/scripts/list_graspability.py | jsk_arc2017_common/scripts/list_graspability.py | #!/usr/bin/env python
import jsk_arc2017_common
graspability = jsk_arc2017_common.get_object_graspability()
for obj_id, obj in enumerate(graspability):
print('{:02}: {}'.format(obj_id+1, obj))
for style in graspability[obj]:
print(' {}: {}'.format(style, graspability[obj][style]))
| Python | 0.000001 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.