commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
a466a89cd18252c6d90fd3b590148ca3268ff637 | Add a couple of simple tests for LPD geometry | European-XFEL/h5tools-py | karabo_data/tests/test_lpd_geometry.py | karabo_data/tests/test_lpd_geometry.py | from matplotlib.figure import Figure
import numpy as np
from karabo_data.geometry2 import LPD_1MGeometry
def test_inspect():
geom = LPD_1MGeometry.from_quad_positions([
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)
])
# Smoketest
fig = geom.inspect()
assert isinstance(fig, Figure)
def test_snap_assemble_data():
geom = LPD_1MGeometry.from_quad_positions([
(11.4, 299),
(-11.5, 8),
(254.5, -16),
(278.5, 275)
])
stacked_data = np.zeros((16, 256, 256))
img, centre = geom.position_modules_fast(stacked_data)
assert img.shape == (1202, 1104)
assert tuple(centre) == (604, 547)
assert np.isnan(img[0, 0])
assert img[50, 50] == 0
| bsd-3-clause | Python | |
efac5a6167e2ac437072084f0e48147ccccab793 | test read() on all examples | rcs-analytics/icy | test/test_examples.py | test/test_examples.py | import icy
import contextlib
import os
from datetime import datetime
class DummyFile(object):
def write(self): pass
if __name__ == '__main__':
print('running examples tests ...')
t0 = datetime.now()
results = [0, 0, 0, 0]
for ex in sorted(icy.examples):
t1 = datetime.now()
try:
with contextlib.redirect_stdout(DummyFile):
data = icy.read(icy.examples[ex])
n_keys = len(data.keys())
if n_keys > 0:
print('data {:<15} [SUCCESS] {:.1f}s, {} dfs, {}'.format(
ex, (datetime.now()-t1).total_seconds(), n_keys, icy.mem(data)))
results[0] += 1
else:
print('data {:<15} [NO IMPORT] {:.1f}s'.format(ex, (datetime.now()-t1).total_seconds()))
results[1] += 1
except:
print('data {:<15} [EXCEPTION] {:.1f}s'.format(ex, (datetime.now()-t1).total_seconds()))
results[2] += 1
print()
print('ran {} tests in {:.1f} seconds'.format(len(icy.examples),
(datetime.now()-t0).total_seconds()))
print('{} success / {} no import / {} exception'.format(
str(results[0]), str(results[1]), str(results[2])))
| mit | Python | |
796f000d3ba2fd2f289bb68ce817a81eb56dcb3d | Test for issue200 | armandobs14/rdflib,marma/rdflib,RDFLib/rdflib,dbs/rdflib,marma/rdflib,yingerj/rdflib,RDFLib/rdflib,dbs/rdflib,ssssam/rdflib,avorio/rdflib,yingerj/rdflib,armandobs14/rdflib,RDFLib/rdflib,dbs/rdflib,armandobs14/rdflib,armandobs14/rdflib,marma/rdflib,dbs/rdflib,avorio/rdflib,ssssam/rdflib,avorio/rdflib,RDFLib/rdflib,yingerj/rdflib,ssssam/rdflib,ssssam/rdflib,yingerj/rdflib,marma/rdflib,avorio/rdflib | test/test_issue200.py | test/test_issue200.py | #!/usr/bin/env python
import os, sys
import rdflib
import unittest
try:
from hashlib import md5
except ImportError:
from md5 import md5
if sys.platform == 'Java':
from nose import SkipTest
raise SkipTest('No os.pipe() in Jython, skipping')
# Adapted from http://icodesnip.com/snippet/python/simple-universally-unique-id-uuid-or-guid
def bnode_uuid():
"""
Generates a uuid on behalf of Python 2.4
"""
import random, time, socket
t = long( time.time() * 1000.0 )
r = long( random.random()*100000000000000000L )
try:
a = socket.gethostbyname( socket.gethostname() )
except:
# if we can't get a network address, just imagine one
a = random.random()*100000000000000000L
data = str(t)+' '+str(r)+' '+str(a)
data = md5(data).hexdigest()
yield data
class TestRandomSeedInFork(unittest.TestCase):
def test_same_bnodeid_sequence_in_fork(self):
"""Demonstrates that with os.fork(), the child process produces
the same sequence of BNode ids as does the parent process.
"""
r, w = os.pipe() # these are file descriptors, not file objects
pid = os.fork()
if pid:
pb1 = rdflib.term.BNode()
os.close(w) # use os.close() to close a file descriptor
r = os.fdopen(r) # turn r into a file object
txt = r.read()
os.waitpid(pid, 0) # make sure the child process gets cleaned up
else:
os.close(r)
w = os.fdopen(w, 'w')
cb = rdflib.term.BNode()
w.write(cb)
w.close()
os._exit(0)
assert txt == str(pb1), "Test now obsolete, random seed working"
def test_random_not_reseeded_in_fork(self):
"""Demonstrates ineffectiveness of reseeding Python's random.
"""
r, w = os.pipe() # these are file descriptors, not file objects
pid = os.fork()
if pid:
pb1 = rdflib.term.BNode()
os.close(w) # use os.close() to close a file descriptor
r = os.fdopen(r) # turn r into a file object
txt = r.read()
os.waitpid(pid, 0) # make sure the child process gets cleaned up
else:
os.close(r)
import random, time
try:
preseed = os.urandom(16)
except NotImplementedError:
preseed = ''
# Have doubts about this. random.seed will just hash the string
random.seed('%s%s%s' % (preseed, os.getpid(), time.time()))
del preseed
w = os.fdopen(w, 'w')
cb = rdflib.term.BNode()
w.write(cb)
w.close()
os._exit(0)
assert txt == str(pb1), "Reseeding worked, this test is obsolete"
def test_bnode_uuid_differs_in_fork(self):
"""Demonstrates that with os.fork(), the child process produces
a sequence of BNode ids that differs from the sequence produced
by the parent process.
"""
r, w = os.pipe() # these are file descriptors, not file objects
pid = os.fork()
if pid:
pb1 = rdflib.term.BNode(_sn_gen=bnode_uuid(), _prefix="")
os.close(w) # use os.close() to close a file descriptor
r = os.fdopen(r) # turn r into a file object
txt = r.read()
os.waitpid(pid, 0) # make sure the child process gets cleaned up
else:
os.close(r)
w = os.fdopen(w, 'w')
cb = rdflib.term.BNode(_sn_gen=bnode_uuid(), _prefix="")
w.write(cb)
w.close()
os._exit(0)
assert txt != str(pb1), "Parent process BNode id: " + \
"%s, child process BNode id: %s" % (
txt, str(pb1))
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | Python | |
defe99d80e102527140fb5742f4deedb6748f05e | add execute. | wings27/sc_spider | sc_spider/execute.py | sc_spider/execute.py | from scrapy.cmdline import execute
execute()
| apache-2.0 | Python | |
80f35ad0d3a6a1f04eb0339bb1088ebe6eb27af5 | Add result classes for update/insert/delete ops | vmalloc/mongomock,marcinbarczynski/mongomock,mdomke/mongomock,drorasaf/mongomock,magaman384/mongomock,StarfishStorage/mongomock,julianhille/mongomock | mongomock/results.py | mongomock/results.py | try:
from pymongo.results import InsertOneResult
from pymongo.results import InsertManyResult
from pymongo.results import UpdateResult
from pymongo.results import DeleteResult
except ImportError:
class _WriteResult(object):
def __init__(self, acknowledged=True):
self.__acknowledged = acknowledged
@property
def acknowledged(self):
return self.__acknowledged
class InsertOneResult(_WriteResult):
__slots__ = ('__inserted_id', '__acknowledged')
def __init__(self, inserted_id, acknowledged=True):
self.__inserted_id = inserted_id
super(InsertOneResult, self).__init__(acknowledged)
@property
def inserted_id(self):
return self.__inserted_id
class InsertManyResult(_WriteResult):
__slots__ = ('__inserted_ids', '__acknowledged')
def __init__(self, inserted_ids, acknowledged=True):
self.__inserted_ids = inserted_ids
super(InsertManyResult, self).__init__(acknowledged)
@property
def inserted_ids(self):
return self.__inserted_ids
class UpdateResult(_WriteResult):
__slots__ = ('__raw_result', '__acknowledged')
def __init__(self, raw_result, acknowledged=True):
self.__raw_result = raw_result
super(UpdateResult, self).__init__(acknowledged)
@property
def raw_result(self):
return self.__raw_result
@property
def matched_count(self):
if self.upserted_id is not None:
return 0
self.__raw_result.get('n', 0)
@property
def modified_count(self):
return self.__raw_result.get('nModified')
@property
def upserted_id(self):
return self.__raw_result.get('upserted')
class DeleteResult(_WriteResult):
__slots__ = ('__raw_result', '__acknowledged')
def __init__(self, raw_result, acknowledged=True):
self.__raw_result = raw_result
super(DeleteResult, self).__init__(acknowledged)
@property
def raw_result(self):
return self.__raw_result
@property
def deleted_count(self):
return self.__raw_result.get('n', 0)
| bsd-3-clause | Python | |
35b5ef2d39363e893796a8384209034093d8a11e | add import script for Burnley | chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_burnley.py | polling_stations/apps/data_collection/management/commands/import_burnley.py | from data_collection.management.commands import BaseXpressWebLookupCsvImporter
class Command(BaseXpressWebLookupCsvImporter):
council_id = 'E07000117'
# note: extension is TSV, but file is actually comma seperated
addresses_name = 'BurnleyPropertyPostCodePollingStationWebLookup-2017-03-10.TSV'
stations_name = 'BurnleyPropertyPostCodePollingStationWebLookup-2017-03-10.TSV'
elections = ['local.lancashire.2017-05-04']
| bsd-3-clause | Python | |
6fce34ca55d4dfaee921480077b48e0984d8fe1c | Create max_of_three.py | lcnodc/codes,lcnodc/codes | 09-revisao/practice_python/max_of_three.py | 09-revisao/practice_python/max_of_three.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Exercise 28: Max Of Three
Implement a function that takes as input three variables, and returns
the largest of the three. Do this without using the Python max()
function!
The goal of this exercise is to think about some internals that Python
normally takes care of for us. All you need is some variables and if
statements!
"""
import random
def get_max(a, b, c):
if a > b:
if a > c:
return a
elif c > a:
return c
else:
return a, c
elif b > a:
if b > c:
return b
elif c > b:
return c
else:
return b, c
else:
if a > c:
return a, b
elif c > a:
return c
else:
return a, b, c
def get_random_numbers(amount):
for number in range(amount):
yield random.randint(1, 100)
if __name__ == "__main__":
for i in range(10):
a, b, c = get_random_numbers(3)
print(
"The max of three (%i, %i, %i) is %s" %
(a, b, c, get_max(a, b, c)))
| mit | Python | |
54940c2532d90e23b919031e948e4c7b608b1866 | add import script for Wycombe | DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations | polling_stations/apps/data_collection/management/commands/import_wycombe.py | polling_stations/apps/data_collection/management/commands/import_wycombe.py | from django.contrib.gis.geos import Point
from data_collection.management.commands import BaseCsvStationsShpDistrictsImporter
from data_finder.helpers import geocode_point_only, PostcodeError
class Command(BaseCsvStationsShpDistrictsImporter):
srid = 27700
council_id = 'E07000007'
districts_name = 'Polling Districts_region'
stations_name = 'polling-stations.csv'
elections = ['local.buckinghamshire.2017-05-04']
def district_record_to_dict(self, record):
return {
'internal_council_id': str(record[0]).strip(),
'name': str(record[4]).strip(),
}
def format_address(self, address):
address_parts = address.split(',')
postcode = address_parts[-1].strip()
del(address_parts[-1])
address_text = "\n".join([a.strip() for a in address_parts])
return (address_text, postcode)
def station_record_to_dict(self, record):
codes = record.polling_district.split(', ')
address, postcode = self.format_address(record.polling_place)
try:
point = geocode_point_only(postcode)
location = Point(point['wgs84_lon'], point['wgs84_lat'], srid=4326)
except PostcodeError:
location = None
stations = []
for code in codes:
stations.append({
'internal_council_id': code,
'address' : address,
'postcode': postcode,
'polling_district_id': code,
'location': location,
})
return stations
| bsd-3-clause | Python | |
0c5c2dd033a4d586e24fc76a7f5398db52470ac6 | Add python wrapper | jarikomppa/escapi,jarikomppa/escapi,jarikomppa/escapi,jarikomppa/escapi | python/camera.py | python/camera.py | """
A simple python wrapper around escapi
Usage:
from camera import Device
device = Deveice.connect(0, 500, 500)
image = device.get_image()
"""
import os
from ctypes import *
from PIL import Image
def resolve(name):
f = os.path.join(os.path.dirname(__file__), name)
return f
class CAPTURE_PROPETIES:
CAPTURE_BRIGHTNESS = 1,
CAPTURE_CONTRAST = 2,
CAPTURE_HUE = 3,
CAPTURE_SATURATION = 4,
CAPTURE_SHARPNESS = 5,
CAPTURE_GAMMA = 6,
CAPTURE_COLORENABLE = 7,
CAPTURE_WHITEBALANCE = 8,
CAPTURE_BACKLIGHTCOMPENSATION = 9,
CAPTURE_GAIN = 10,
CAPTURE_PAN = 11,
CAPTURE_TILT = 12,
CAPTURE_ROLL = 13,
CAPTURE_ZOOM = 14,
CAPTURE_EXPOSURE = 15,
CAPTURE_IRIS = 16,
CAPTURE_FOCUS = 17,
CAPTURE_PROP_MAX = 18,
class SimpleCapParms(Structure):
_fields_ = [
("buffer", POINTER(c_int)),
("width", c_int),
("height", c_int),
]
lib = None
def init():
global lib
lib = cdll.LoadLibrary(resolve("escapi32.dll"))
lib.initCapture.argtypes = [c_int, POINTER(SimpleCapParms)]
lib.initCapture.restype = c_int
lib.initCOM()
def device_name(device):
"""
Get the device name for the given device
:param device: The number of the device
:return: The name of the device
"""
namearry = (c_char * 256)()
lib.getCaptureDeviceName(device, namearry, 256)
camearaname = namearry.value
return camearaname
def init_camera(device, width, height):
array = (width * height * c_int)()
options = SimpleCapParms()
options.width = width
options.height = height
options.buffer = array
lib.initCapture(device, byref(options))
return array
def get_image(device, width, height, array):
lib.doCapture(device)
while lib.isCaptureDone(device) == 0:
pass
img = Image.frombuffer('RGBA', (width, height), array, 'raw', 'BGRA', 0, 0)
return img
def deinit_camera(device):
lib.deinitCapture(device)
class Device():
def __init__(self, device, width, height, array):
self._device = device
self._array = array
self._width = width
self._height = height
@classmethod
def connect(cls, device, width, height):
if not lib:
init()
array = init_camera(device, width, height)
return cls(device, width, height, array)
def disconnect(self):
deinit_camera(self._device)
def get_image(self):
return get_image(self._device, self._width, self._height, self._array)
| unlicense | Python | |
a877ba0845ea868a79d89965c419ec637bc85ff1 | add python | Stymphalian/CodeSnacks,Stymphalian/CodeSnacks,Stymphalian/CodeSnacks,Stymphalian/CodeSnacks | python/python.py | python/python.py | from collections import namedtuple
Point = namedtuple('Point',['x','y'])
if __name__ == "__main__":
main()
import heapq
heap = []
heapq.heapify(heap)
heapq.heappush(heap,(1,2))
| mit | Python | |
5ba42c79d777b8afad82a6dc120afc366643e9b8 | Create test.py | tspannhw/nlp-utilities,tspannhw/nlp-utilities | test.py | test.py | import spacy
nlp = spacy.load('en')
doc5 = nlp(u"Timothy Spann is studying at Princeton University in New Jersey.")
# Named Entity Recognizer (NER)
for ent in doc5.ents:
print ent, ent.label, ent.label_
| apache-2.0 | Python | |
e5f22aaf4de371df0b52a275dbebbd4cd6c1d980 | add test file | j-salazar/mchacks15,j-salazar/mchacks15,j-salazar/mchacks15 | test.py | test.py | import os
import logging
import redis
import gevent
from flask import Flask, render_template
from flask_sockets import Sockets
| mit | Python | |
50c3e9ffeacf7db5c002186e178ca24a6c14bf22 | Add py-watchdog (#19167) | iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-watchdog/package.py | var/spack/repos/builtin/packages/py-watchdog/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyWatchdog(PythonPackage):
"""Python library and shell utilities to monitor filesystem events."""
homepage = "https://github.com/gorakhargosh/watchdog"
url = "https://github.com/gorakhargosh/watchdog/archive/v0.10.3.tar.gz"
version('0.10.3', sha256='f7e5040b483cc9a8618a4e72c11a527b42434a0766ea96dce633e8b44b1369de')
version('0.10.2', sha256='4dba861f5e6960c8063ad68137772ff35f1516ea47d64b53042dabd2d8f8dbdc')
version('0.10.1', sha256='972c6050eb5d275b582c75e6ff71ef562a6c3be6361d6d9b37110e0ba718994d')
version('0.10.0', sha256='39e2966b8c9596e45b463815668989b87c7d205e47c7e6e4a7db9a58354b99ff')
version('0.9.0', sha256='e8a32701dff43a4e671a40acf92f55a8fffd8ca74333b75436d63a94d104faef')
version('0.8.3', sha256='e9a27d0ab121fc86217ab833e778c76880aad4ecdb623796f4b7aee9925394ed')
version('0.8.2', sha256='386e882c8bc7df8a3c4c8803e6fcf2f7cf836c9a8867ff9d91b4af19d262d023')
version('0.8.1', sha256='05e5d8acd5061aff789359cd8f25a85ba4c848a8feded3fc68f9c57f0b181373')
version('0.8.0', sha256='5abac06e63ad8d5b001626a16bfdd1b918637aa8d85cf933e68de2e627b56053')
version('0.7.1', sha256='d795fa85ce9252eeb2294a5182c99013433aeb736cc7a1fc9e14e1e2a1a19690')
depends_on('python@2.7,3.4:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-pyyaml', type=('build', 'run'))
| lgpl-2.1 | Python | |
c151f843964408481a5f6569718537a786ff5722 | range excluding script, still lacks some error checking | bpcox/range-exclude | range-exclude.py | range-exclude.py | #! /usr/bin/env python3.4
import ipaddress
import math
supernet = False
subnet = False
while not supernet:
inputRange = input('Input the IP range you would like remove a subrange from: ')
try:
supernet =ipaddress.ip_network(inputRange)
except ValueError:
print('Invalid input, try again')
while not subnet:
inputRange = input('Input the IP range you would like to remove: ')
try:
subnet = ipaddress.ip_network(inputRange)
except ValueError:
print('Invalid input, try again')
if (supernet.version == subnet.version):
result =supernet.address_exclude(subnet)
for IPrange in result:
print(IPrange)
else:
print('Both IP ranges must be of the same type (IPv4 or IPv6)')
| mit | Python | |
e639cea1f5264870e3f5f19fbd88345a23ef61a9 | add timestamps to debug logging | dashpay/sentinel,ivansib/sentinel,ivansib/sentinel,thelazier/sentinel,thelazier/sentinel,dashpay/sentinel | lib/misc.py | lib/misc.py | import time
import re
import sys, os
sentinel_options = []
def is_numeric(strin):
import decimal
# Decimal allows spaces in input, but we don't
if strin.strip() != strin:
return False
try:
value = decimal.Decimal(strin)
except decimal.InvalidOperation as e:
return False
return True
def printdbg(str):
ts = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime(now()))
logstr = "{} {}".format(ts, str)
if os.environ.get('SENTINEL_DEBUG', None):
print(logstr)
def is_hash(s):
m = re.match('^[a-f0-9]{64}$', s)
if m: return True
return False
def now():
return int(time.time())
def add_sentinel_option(param):
sentinel_options.append(param)
## check parameters from the user
def completer(text, state):
options = [i for i in commands if i.startswith(text)]
options.extend(sentinel_options)
if state < len(options):
return options[state]
else:
return None
def startup():
# python startup file
import readline
import rlcompleter
import atexit
import os
# tab completion
readline.parse_and_bind('tab: complete')
readline.set_completer(completer)
# do not use - as delimiter
old_delims = readline.get_completer_delims() # <-
readline.set_completer_delims(old_delims.replace('-', '')) # <-
# history file
histfile = os.path.join(os.environ.get('HOME'), '.pythonhistory')
try:
readline.read_history_file(histfile)
except IOError:
pass
atexit.register(readline.write_history_file, histfile)
del os, histfile, readline, rlcompleter
import readline
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def get(self, name):
return self.__dict__.get(name, None)
| import time
import re
import sys, os
sentinel_options = []
def is_numeric(strin):
import decimal
# Decimal allows spaces in input, but we don't
if strin.strip() != strin:
return False
try:
value = decimal.Decimal(strin)
except decimal.InvalidOperation as e:
return False
return True
def printdbg(str):
if os.environ.get('SENTINEL_DEBUG', None):
print(str)
def is_hash(s):
m = re.match('^[a-f0-9]{64}$', s)
if m: return True
return False
def now():
return int(time.time())
def add_sentinel_option(param):
sentinel_options.append(param)
## check parameters from the user
def completer(text, state):
options = [i for i in commands if i.startswith(text)]
options.extend(sentinel_options)
if state < len(options):
return options[state]
else:
return None
def startup():
# python startup file
import readline
import rlcompleter
import atexit
import os
# tab completion
readline.parse_and_bind('tab: complete')
readline.set_completer(completer)
# do not use - as delimiter
old_delims = readline.get_completer_delims() # <-
readline.set_completer_delims(old_delims.replace('-', '')) # <-
# history file
histfile = os.path.join(os.environ.get('HOME'), '.pythonhistory')
try:
readline.read_history_file(histfile)
except IOError:
pass
atexit.register(readline.write_history_file, histfile)
del os, histfile, readline, rlcompleter
import readline
class Bunch(object):
def __init__(self, **kwargs):
self.__dict__.update(kwargs)
def get(self, name):
return self.__dict__.get(name, None)
| mit | Python |
9d44e4eb4c8d2c2f10152894f7c53d9feaae528c | Add ip-restriction plugin to declare ip whitelists/blacklists and restrict api access | menecio/django-api-bouncer | api_bouncer/middlewares/ip_restriction.py | api_bouncer/middlewares/ip_restriction.py | import ipaddress
from django.http import JsonResponse
from ..models import Plugin
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
class IpRestrictionMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
host = request.META.get('HTTP_HOST')
consumer_id = request.META.get('HTTP_CONSUMER_ID')
plugin_conf = Plugin.objects.filter(
api__hosts__contains=[host],
name='ip-restriction'
).first()
if (
plugin_conf and (
not plugin_conf.config.get('consumer_id') or
plugin_conf.config.get('consumer_id') == consumer_id
)
):
config = plugin_conf.config
whitelist = config['whitelist']
blacklist = config['blacklist']
client_ip = get_client_ip(request)
if not self.check_ip_address(client_ip, blacklist, whitelist):
return JsonResponse({'errors': 'Forbidden'}, status=403)
response = self.get_response(request)
return response
def check_ip_address(self, client_ip, blacklist, whitelist):
client_ip = ipaddress.ip_address(client_ip)
for ip in blacklist:
if client_ip in ipaddress.ip_network(ip):
return False
if (
whitelist and
not any([
client_ip in
ipaddress.ip_network(ip) for ip in whitelist
])
):
return False
return True
| apache-2.0 | Python | |
b62256d47ebf5df9404441f5ac450b780b4937ae | add twinkle effect | ethanacm/the_snake_lights | test.py | test.py | # NeoPixel library strandtest example
# Author: Tony DiCola (tony@tonydicola.com)
#
# Direct port of the Arduino NeoPixel library strandtest example. Showcases
# various animations on a strip of NeoPixels.
import time
from neopixel import *
# LED strip configuration:
LED_COUNT = 300 # Number of LED pixels.
LED_PIN = 18 # GPIO pin connected to the pixels (18 uses PWM!).
# LED_PIN = 10 # GPIO pin connected to the pixels (10 uses SPI /dev/spidev0.0).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 5 # DMA channel to use for generating signal (try 5)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_CHANNEL = 0 # set to '1' for GPIOs 13, 19, 41, 45 or 53
LED_STRIP = ws.WS2811_STRIP_GRB # Strip type and colour ordering
# Define functions which animate LEDs in various ways.
def twinkle(strip, color, wait_ms=100):
while True:
for i in range(strip.numPixels / 30):
strip.setPixelColor(i, color)
strip.show()
time.sleep(wait_ms / 1000)
def colorWipe(strip, color, wait_ms=50):
"""Wipe color across display a pixel at a time."""
for i in range(strip.numPixels()):
strip.setPixelColor(i, color)
strip.show()
time.sleep(wait_ms / 1000.0)
def theaterChase(strip, color, wait_ms=50, iterations=10):
"""Movie theater light style chaser animation."""
for j in range(iterations):
for q in range(3):
for i in range(0, strip.numPixels(), 3):
strip.setPixelColor(i + q, color)
strip.show()
time.sleep(wait_ms / 1000.0)
for i in range(0, strip.numPixels(), 3):
strip.setPixelColor(i + q, 0)
def wheel(pos):
"""Generate rainbow colors across 0-255 positions."""
if pos < 85:
return Color(pos * 3, 255 - pos * 3, 0)
elif pos < 170:
pos -= 85
return Color(255 - pos * 3, 0, pos * 3)
else:
pos -= 170
return Color(0, pos * 3, 255 - pos * 3)
def rainbow(strip, wait_ms=20, iterations=1):
"""Draw rainbow that fades across all pixels at once."""
for j in range(256 * iterations):
for i in range(strip.numPixels()):
strip.setPixelColor(i, wheel((i + j) & 255))
strip.show()
time.sleep(wait_ms / 1000.0)
def rainbowCycle(strip, wait_ms=20, iterations=5):
"""Draw rainbow that uniformly distributes itself across all pixels."""
for j in range(256 * iterations):
for i in range(strip.numPixels()):
strip.setPixelColor(i, wheel((int(i * 256 / strip.numPixels()) + j) & 255))
strip.show()
time.sleep(wait_ms / 1000.0)
def theaterChaseRainbow(strip, wait_ms=50):
"""Rainbow movie theater light style chaser animation."""
for j in range(256):
for q in range(3):
for i in range(0, strip.numPixels(), 3):
strip.setPixelColor(i + q, wheel((i + j) % 255))
strip.show()
time.sleep(wait_ms / 1000.0)
for i in range(0, strip.numPixels(), 3):
strip.setPixelColor(i + q, 0)
# Main program logic follows:
if __name__ == '__main__':
# Create NeoPixel object with appropriate configuration.
strip = Adafruit_NeoPixel(LED_COUNT, LED_PIN, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, LED_CHANNEL,
LED_STRIP)
# Intialize the library (must be called once before other functions).
strip.begin()
print('Press Ctrl-C to quit.')
twinkle(strip, Color(127,127,127,))
| mit | Python | |
db692a253a606923096a2a12359bad5302e53e55 | Create test.py | okraus/DeepLoc,okraus/DeepLoc | test.py | test.py | print "test.py"
| bsd-3-clause | Python | |
5606c7ad952b45c0bbb77b605dede737d65d7a17 | Create thor.py | nbar1/thor | thor.py | thor.py | # thor
#
# Process files in a given rootFolder to meet specific naming criteria
#
# @created 5/4/2013
# @modified 5/18/2013
# @author Nick Barone
import os, re, string
# folder to process
rootFolder = "/home/dlna01/stream/videos/"
# processFile
# return root folder
#
# @param string path
# @return string root path
def processFileForRootFolder(path):
root = filter(None, path.replace(rootFolder,"").split("/"))
return root[0]
# cleanDirs
# Changes directory names based on predetermined rules
def cleanDirs():
rootDirs = []
for dirname, dirnames, filenames in os.walk(rootFolder):
for filename in filenames:
rootDirs.append(processFileForRootFolder(os.path.join(dirname, filename)))
# filter files from dirs
dirContents = list(set(filter(None, rootDirs)))
for videoDir in list(set(filter(None, rootDirs))):
if not os.path.isdir(rootFolder + videoDir):
dirContents.remove(videoDir)
# clean up dir names
for vDir in dirContents:
vDirOld = vDir
# replace common inaccuracies
vDir = vDir.replace(".", " ")
vDir = vDir.replace("_", " ")
rpMap = [ "'" , "\"" , "/" , "\\" ]
for rpMapItem in rpMap:
vDir = vDir.replace(rpMapItem, "")
#remove anything contained in brackets [ ]
vDir = re.sub(re.compile('\[.+?\]', re.DOTALL), "", vDir)
#remove and year stamps and everything after them
vDir = re.sub(re.compile('\(*\d{4}\)*.+', re.DOTALL), "", vDir)
#remove common includes
rpMap = [ "hdtv.+", "x264.+", "1080i.+", "1080p.+", "720p.+", "480p.+", "dvdrip.+" ]
for rpMapItem in rpMap:
vDir = re.sub(re.compile(rpMapItem, re.IGNORECASE | re.DOTALL), "", vDir)
# replace crowded dash
vDir = vDir.replace("-", " - ")
# strip extra spaces
vDir = re.sub(re.compile(' +'), " ", vDir).strip()
if vDirOld == vDir:
print "GOOD: " + vDirOld + " === " + vDir
else:
print "RENAME: " + vDirOld + " >>> " + vDir
os.rename(rootFolder+vDirOld, rootFolder+vDir)
# cleanFiles
# Changes filenames based on predetermined rules
def cleanFiles():
rootDirs = []
for dirname, dirnames, filenames in os.walk(rootFolder):
for filename in filenames:
rootDirs.append(os.path.join(dirname, filename))
# filter dirs from files
dirContents = list(set(filter(None, rootDirs)))
# clean up dir names
for vDir in dirContents:
vDirOld = vDir
splitFile = vDir.split("/", -1)
vFileName = splitFile.pop().split(".")
vFileDir = "/".join(splitFile) + "/"
vFileExt = vFileName.pop()
vFileName = ".".join(vFileName)
vFileNameOld = vFileName + "." + vFileExt
if(vFileExt.lower() in [ "jpg", "jpeg", "png", "txt", "srt", "ds_store", "nfo", "yify", "part", "ass", "doc", "docx", "7z" ]):
# delete file
os.remove(vDirOld)
continue
if "sample" in vFileName.lower():
# delete file
os.remove(vDirOld)
continue
# replace common inaccuracies
vFileName = vFileName.replace(".", " ")
vFileName = vFileName.replace("_", " ")
rpMap = [ "'" , "\"" , "/" , "\\" ]
for rpMapItem in rpMap:
vFileName = vFileName.replace(rpMapItem, "")
#remove anything contained in brackets [ ]
vFileName = re.sub(re.compile('\[.+?\]', re.DOTALL), "", vFileName)
#remove and year stamps and everything after them
vFileName = re.sub(re.compile('\(*\d{4}\)*.+', re.DOTALL), "", vFileName)
#remove common includes
rpMap = [ "hdtv.+", "x264.+", "1080i.+", "1080p.+", "720p.+", "dvdrip.+" ]
for rpMapItem in rpMap:
vFileName = re.sub(re.compile(rpMapItem, re.IGNORECASE | re.DOTALL), "", vFileName)
# replace crowded dash
vFileName = vFileName.replace("-", " - ")
# strip extra spaces
vFileName = re.sub(re.compile(' +'), " ", vFileName).strip()
vFileName = vFileName + "." + vFileExt
if vFileNameOld == vFileName:
print "GOOD: " + vFileNameOld + " === " + vFileName
else:
print "RENAME: " + vFileNameOld + " >>> " + vFileName
os.rename(vDirOld, vFileDir + vFileName)
# cleanEmptyDirs
# Deleted any directories that are empty after cleaning is complete
#
# @param string path
def cleanEmptyDirs(path):
if not os.path.isdir(path):
return
# remove empty subfolders
files = os.listdir(path)
if len(files):
for f in files:
fullpath = os.path.join(path, f)
if os.path.isdir(fullpath):
cleanEmptyDirs(fullpath)
# if folder empty, delete it
files = os.listdir(path)
if len(files) == 0:
print "REMOVING:" + path
os.rmdir(path)
# run
# Run set of commands for proper execution
def run():
cleanDirs()
cleanFiles()
cleanEmptyDirs(rootFolder)
# run
run()
| mit | Python | |
9f8e7ff3220c81e22fde17a85989653b45602f28 | Create 6kyu_n-centered_array.py | Orange9000/Codewars,Orange9000/Codewars | Solutions/6kyu/6kyu_n-centered_array.py | Solutions/6kyu/6kyu_n-centered_array.py | from math import ceil
def is_centered(arr,n):
return any(sum(arr[i:-i])==n for i in range(ceil(len(arr)/2))) or sum(arr)==n
| mit | Python | |
46ebd88e56300d6f64421acaa2a3eb42db7acc81 | Add simple CFR implementation | JakubPetriska/poker-cfr,JakubPetriska/poker-cfr | kuhn_poker/train_cfr.py | kuhn_poker/train_cfr.py | import random
NUM_ACTIONS = 2
class Node:
def __init__(self, info_set):
super().__init__()
self.info_set = info_set
self.regret_sum = [0] * NUM_ACTIONS
self.strategy = [0] * NUM_ACTIONS
self.strategy_sum = [0] * NUM_ACTIONS
def get_strategy(self, realization_weight):
normalizing_sum = 0
for a in range(NUM_ACTIONS):
self.strategy[a] = self.regret_sum[a] if self.regret_sum[a] > 0 else 0
normalizing_sum += self.strategy[a]
for a in range(NUM_ACTIONS):
if normalizing_sum > 0:
self.strategy[a] /= normalizing_sum
else:
self.strategy[a] = 1.0 / NUM_ACTIONS
self.strategy_sum[a] += realization_weight * self.strategy[a]
return self.strategy
def get_average_strategy(self):
avg_strategy = [0] * NUM_ACTIONS
normalizing_sum = 0
for a in range(NUM_ACTIONS):
normalizing_sum += self.strategy_sum[a]
for a in range(NUM_ACTIONS):
if normalizing_sum > 0:
avg_strategy[a] = self.strategy_sum[a] / normalizing_sum
else:
avg_strategy[a] = 1.0 / NUM_ACTIONS
return avg_strategy
def __str__(self):
return '%s: %s' % (self.info_set, self.get_average_strategy())
nodeMap = {}
def cfr(cards, history, p0, p1):
plays = len(history)
player = plays % 2
opponent = 1 - player
if plays > 1:
terminal_pass = history[plays - 1] == 'p'
double_bet = history[plays - 2:plays] == 'bb'
is_player_card_higher = cards[player] > cards[opponent]
if terminal_pass:
if history == 'pp':
return 1 if is_player_card_higher else -1
else:
return 1
elif double_bet:
return 2 if is_player_card_higher else -2
info_set = str(cards[player]) + history
if info_set in nodeMap:
node = nodeMap[info_set]
else:
node = Node(info_set)
node.infoSet = info_set
nodeMap[info_set] = node
strategy = node.get_strategy(p0 if player == 0 else p1)
util = [0] * NUM_ACTIONS
node_util = 0
for a in range(NUM_ACTIONS):
next_history = history + ('p' if a == 0 else 'b')
if player == 0:
util[a] = -cfr(cards, next_history, p0 * strategy[a], p1)
else:
util[a] = -cfr(cards, next_history, p0, p1 * strategy[a])
node_util += strategy[a] * util[a]
for a in range(NUM_ACTIONS):
regret = util[a] - node_util
node.regret_sum[a] += (p1 if player == 0 else p0) * regret
return node_util
def train(iterations):
cards = [1, 2, 3]
util = 0
for i in range(iterations):
for card_index_1 in range(len(cards) - 1, -1, -1):
card_index_2 = random.randint(0, card_index_1)
tmp = cards[card_index_1]
cards[card_index_1] = cards[card_index_2]
cards[card_index_2] = tmp
util += cfr(cards, '', 1, 1)
print('Average game value: %s' % (util / iterations))
for key, node in nodeMap.items():
print(str(node))
if __name__ == "__main__":
iterations = 1000
train(iterations)
| mit | Python | |
bf0be80bdc2ec691fb0f0ac1bcfe3337f070c199 | Add some tests | spreadflow/spreadflow-core,znerol/spreadflow-core | spreadflow_core/test/test_jobqueue.py | spreadflow_core/test/test_jobqueue.py | # -*- coding: utf-8 -*-
"""Tests for cooperative job queue.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from twisted.internet import defer
from twisted.trial import unittest
from spreadflow_core.jobqueue import JobQueue
class JobQueueTestCase(unittest.TestCase):
def test_returns_deferred_when_empty(self):
"""
A :class:`twisted.internet.defer.Deferred` is returned from
:meth:`spreadflow_core.jobqueue.JobQueue.next` if queue is empty.
"""
queue = JobQueue()
queue_ready_1 = queue.next()
self.assertIsInstance(queue_ready_1, defer.Deferred)
queue_ready_2 = queue.next()
self.assertIsInstance(queue_ready_2, defer.Deferred)
self.assertEqual(queue_ready_1, queue_ready_2)
self.assertFalse(queue_ready_1.called)
def test_fires_deferred_when_ready(self):
"""
A deferred returned from :meth:`spreadflow_core.jobqueue.JobQueue.next`
is fired after a new job is added to the queue.
"""
channel = object()
queue = JobQueue()
queue_ready_1 = queue.next()
self.assertIsInstance(queue_ready_1, defer.Deferred)
self.assertFalse(queue_ready_1.called)
job_completed = queue.put(channel, lambda: 'Bazinga!')
job_completed.addCallback(lambda result: self.assertEqual(result, 'Bazinga!'))
# Scheduling a job must result in the deferred being called.
self.assertTrue(queue_ready_1.called)
# Scheduling a job must in direct execution.
self.assertFalse(job_completed.called)
# Iterating the queue should obviously execute the job.
queue_ready_2 = queue.next()
self.assertTrue(job_completed.called)
# After executing one job, the queue must not return a deferred in
# order to signal the cooperator task that it wants to run again
# immediately.
self.assertIsNone(queue_ready_2)
# Should return a new deferred if the queue gets empty again.
queue_ready_3 = queue.next()
self.assertNotEqual(queue_ready_1, queue_ready_3)
self.assertFalse(queue_ready_3.called)
def test_iterates_in_fifo_order(self):
"""
Jobs are executed in FIFO order.
"""
results = []
def job(*args, **kwds):
results.append((args, kwds))
channel = object()
queue = JobQueue()
self.assertEqual(len(results), 0)
queue.put(channel, job, 'first', arg='one')
queue.put(channel, job, 'second')
queue.put(channel, job)
queue.put(channel, job, 'fourth', 'additional', 'positional', plus='some', key='words')
queue_ready = []
for times in range(4):
self.assertEqual(len(results), times)
queue_ready = queue.next()
self.assertIsNone(queue_ready)
self.assertEqual(results, [
(('first',), {'arg': 'one'}),
(('second',), {}),
((), {}),
(('fourth', 'additional', 'positional'), {'plus': 'some', 'key': 'words'})
])
queue_ready = queue.next()
self.assertIsInstance(queue_ready, defer.Deferred)
| mit | Python | |
7d9aed1bed273b1b83f22436e5c24cde3cc93586 | add an rnn model | longjie/chainer-char-rnn,yusuketomoto/chainer-char-rnn,yanweifu/chainer-char-rnn,longjie/chainer-char-rnn,kylemcdonald/chainer-char-rnn,kl-ing-one/chainer-char-rnn | CharRNN.py | CharRNN.py | import numpy as np
from chainer import Variable, FunctionSet
import chainer.functions as F
class CharRNN(FunctionSet):
def __init__(self, n_vocab, n_units):
super(CharRNN, self).__init__(
embed = F.EmbedID(n_vocab, n_units),
l1_x = F.Linear(n_units, 4*n_units),
l1_h = F.Linear(n_units, 4*n_units),
l2_h = F.Linear(n_units, 4*n_units),
l2_x = F.Linear(n_units, 4*n_units),
l3 = F.Linear(n_units, n_vocab),
)
for param in self.parameters:
param[:] = np.random.uniform(-0.08, 0.08, param.shape)
def forward_one_step(self, x_data, y_data, state, train=True, dropout_ratio=0.5):
x = Variable(x_data, volatile=not train)
t = Variable(y_data, volatile=not train)
h0 = self.embed(x)
h1_in = self.l1_x(F.dropout(h0, ratio=dropout_ratio, train=train)) + self.l1_h(state['h1'])
c1, h1 = F.lstm(state['c1'], h1_in)
h2_in = self.l2_x(F.dropout(h1, ratio=dropout_ratio, train=train)) + self.l2_h(state['h2'])
c2, h2 = F.lstm(state['c2'], h2_in)
y = self.l3(F.dropout(h2, ratio=dropout_ratio, train=train))
state = {'c1': c1, 'h1': h1, 'c2': c2, 'h2': h2}
if train:
return state, F.softmax_cross_entropy(y, t)
else:
return state, F.softmax(y)
def make_initial_state(n_units, batchsize=50, train=True):
return {name: Variable(np.zeros((batchsize, n_units), dtype=np.float32),
volatile=not train)
for name in ('c1', 'h1', 'c2', 'h2')}
| mit | Python | |
b291408936aeff869aa8bb9754687b2949bfc2a4 | Add laupdate, a script to update list of admins | PersianWikipedia/fawikibot,PersianWikipedia/fawikibot | laupdate.py | laupdate.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Credit: Amir Sarabadani <ladsgroup@gmail.com>
import pywikibot
import time
from pywikibot.data.api import Request
import re
site = pywikibot.Site('fa', fam='wikipedia')
print "Fetching admins list"
data = Request(site=site, action="query", list="allusers", augroup="sysop", aulimit=500).submit()
adminsac = []
adminbots = ["Dexbot"]
adminsdiac = {}
for admin in data["query"]["allusers"]:
admin = admin["name"]
if admin in adminbots:
continue
acaction = []
dcaction = []
actions = "block, protect, rights, delete, upload, import, renameuser".split(
", ")
for adminaction in actions:
data1 = Request(site=site, action="query", list="logevents",
leuser=admin, letype=adminaction).submit()
for action in data1["query"]["logevents"]:
times = action["timestamp"].split("T")[0].split("-")
today = time.strftime('%Y/%m/%d').split("/")
diff = ((int(today[0]) - int(times[0])) * 365) + (
(int(today[1]) - int(times[1])) * 30) + (int(today[2]) - int(times[2]))
if diff < 90:
acaction.append(
action["timestamp"].split("T")[0].replace("-", ""))
else:
dcaction.append(
action["timestamp"].split("T")[0].replace("-", ""))
thmag = {"y": int(time.strftime('%Y')), "m": int(
time.strftime('%m')), "d": int(time.strftime('%d'))}
if (int(thmag["m"]) - 3) <= 0:
thmag["y"] = thmag["y"] - 1
thmag["m"] = thmag["m"] + 9
else:
thmag["m"] = thmag["m"] - 3
if thmag["m"] < 10:
thmag["m"] = "0" + str(thmag["m"])
if thmag["d"] < 10:
thmag["d"] = "0" + str(thmag["d"])
thmag1 = [str(thmag["y"]), str(thmag["m"]), str(thmag["d"])]
data2 = Request(site=site, action="query", list="usercontribs", ucuser=admin,
ucnamespace=8, ucend="%sT00:00:00Z" % "-".join(thmag1)).submit()
for actionmw in data2["query"]["usercontribs"]:
acaction.append(actionmw["timestamp"].split("T")[0].replace("-", ""))
if len(acaction) >= 10:
if re.search(ur"[ابپتثجچحخدذرزژسشصضطظعغفقکگلمنوهیآ]", admin[0]):
adminsac.append(u"!!!!!!!!!!!!!!!!!!!!!!!!!!!" + admin)
else:
adminsac.append(admin)
else:
acaction.sort()
dcaction.sort()
if re.search(ur"[ابپتثجچحخدذرزژسشصضطظعغفقکگلمنوهیآ]", admin[0]):
admin = u"!!!!!!!!!!!!!!!!!!!!!!!!!!!" + admin
try:
adminsdiac[admin] = acaction[-1]
except:
adminsdiac[admin] = dcaction[-1]
pywikibot.output(admin)
adminsac.sort()
activetext = u"\n{{ویکیپدیا:فهرست مدیران/سطرف|" + \
u"}}\n{{ویکیپدیا:فهرست مدیران/سطرف|".join(adminsac) + u"}}"
deactivetext = u"\n"
activetext = activetext.replace(u"!!!!!!!!!!!!!!!!!!!!!!!!!!!", u"")
ak = adminsdiac.keys()
ak.sort()
for admin in ak:
deactivetext = deactivetext + \
u"{{ویکیپدیا:فهرست مدیران/سطرغ|" + admin + \
u"|" + adminsdiac[admin] + u"}}\n"
deactivetext = deactivetext.replace(u"!!!!!!!!!!!!!!!!!!!!!!!!!!!", u"")
page = pywikibot.Page(site, u"ویکیپدیا:فهرست مدیران")
text = page.get()
pywikibot.output(deactivetext)
new_text = text.replace(text.split(u"<!-- Active -->")[1], activetext + u"\n")
new_text = new_text.replace(u"<!-- Deactive -->" + text.split(
u"<!-- Deactive -->")[1], u"<!-- Deactive -->" + deactivetext + u"\n")
page.put(new_text, u"ربات: بروزرسانی فهرست")
| mit | Python | |
31cc4f9c90ce231c3cb75318db31a711b325947a | add pixiv downloader. | stefco/dotfiles,stefco/dotfiles,stefco/dotfiles | bin/pixiv_dl.py | bin/pixiv_dl.py | # coding: utf-8
import os
import logging
from netrc import netrc
from random import random
from time import sleep
import pixivpy3
logging.basicConfig(level=logging.DEBUG)
LOG = logging.getLogger(__name__)
SIZES = {'large', 'original'}
DIRNAME = os.path.expanduser(os.path.join("~", "Downloads", "pixiv"))
if not os.path.isdir(DIRNAME):
os.makedirs(DIRNAME)
USERID = 38913802
def api_factory():
"""Return a function that returns a logged-in pixiv API instance. Keeps
returning the same API after initialization to avoid repeated logins. If
you need to refresh your login credentials, run `api().login(username,
password)` again."""
api_closure = []
def api_wrapper():
if api_closure:
return api_closure[0]
LOG.debug("Creating new AppPixivAPI instance and logging in.")
api_closure.append(pixivpy3.AppPixivAPI())
username, _, password = netrc().authenticators("pixiv.net")
api_closure[0].login(username, password)
return api_closure[0]
return api
api = api_factory() # pylint: disable=invalid-name
def bookmark_gen(userid):
"""Iterate through bookmarks, fetching the next page as necessary."""
# get the first 30
LOG.info("Getting the first bookmarks page.")
count = 0
bookmarks_json = api().user_bookmarks_illust(userid)
for bookmark in bookmarks_json['illusts']:
count += 1
yield bookmark
# get the next page and keep going
while bookmarks_json.next_url is not None:
LOG.info("Fetching another bookmarks page; %s bookmarks done.",
count)
next_qs = api().parse_qs(bookmarks_json.next_url)
bookmarks_json = api().user_bookmarks_illust(**next_qs)
for bookmark in bookmarks_json['illusts']:
count += 1
yield bookmark
def get_urls(bookmark):
pgs = bookmark['meta_pages'] if bookmark['meta_pages'] else [bookmark]
return [p['image_urls'].get('original', p['image_urls']['large'])
for p in pgs if SIZES.intersection(p['image_urls'])]
def download_bookmarks(userid, force_all=False):
"""Download all bookmarks for this userid. Stop iterating through
URLs once we encounter a file that has been downloaded unless
`force_all` is True. Returns the last processed URL."""
for bookmark in bookmark_gen(userid):
for url in get_urls(bookmark):
name = os.path.basename(url)
path = os.path.join(DIRNAME, name)
if os.path.exists(path) and not force_all:
return url
LOG.info("Downloading %s to %s then sleeping", url, path)
api().download(url, path=DIRNAME, name=name)
sleep(0.3*random() + 0.15)
return url
def main():
"""Download bookmarks."""
download_bookmarks(USERID)
if __name__ == "__main__":
main()
| mit | Python | |
bd68dd06ba24806d1c216d3fc818d5597dc469d2 | add leetcode Merge Two Sorted Lists | Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code,Fity/2code | leetcode/MergeTwoSortedLists/solution.py | leetcode/MergeTwoSortedLists/solution.py | # -*- coding:utf-8 -*-
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
# @param two ListNodes
# @return a ListNode
def mergeTwoLists(self, l1, l2):
if not l1:
return l2
if not l2:
return l1
head = None
if l1.val > l2.val:
head = l2
l2 = l2.next
else:
head = l1
l1 = l1.next
index = head
while l1 and l2:
if l1.val > l2.val:
index.next = l2
l2 = l2.next
else:
index.next = l1
l1 = l1.next
index = index.next
if l1:
index.next = l1
elif l2:
index.next = l2
return head
| mit | Python | |
94d8adf9d48c6118a3467947ad8b1ae0b6dd3d63 | Fix - add missed migrations | fidals/refarm-site,fidals/refarm-site,fidals/refarm-site | blog/migrations/0006_auto_20160513_1634.py | blog/migrations/0006_auto_20160513_1634.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-05-13 13:34
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0005_auto_20160422_1256'),
]
operations = [
migrations.AddField(
model_name='post',
name='content',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='post',
name='data_published',
field=models.DateField(auto_now_add=True, null=True),
),
migrations.AddField(
model_name='post',
name='description',
field=models.TextField(blank=True, null=True),
),
migrations.AddField(
model_name='post',
name='h1',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AddField(
model_name='post',
name='is_active',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='post',
name='keywords',
field=models.CharField(blank=True, max_length=255, null=True),
),
migrations.AddField(
model_name='post',
name='title',
field=models.CharField(blank=True, max_length=100, null=True),
),
migrations.AlterField(
model_name='post',
name='type',
field=models.CharField(choices=[('article', 'Articles'), ('navigation', 'Navigation'), ('news', 'Daily news')], default='article', max_length=100),
),
]
| mit | Python | |
17c9cd8fcd597f5ecef00ee6cf494d3b3dc77e6b | add nonlinear fv advection operator | michaellaier/pymor,michaellaier/pymor,michaellaier/pymor,michaellaier/pymor | src/pymor/operators/fv.py | src/pymor/operators/fv.py | # -*- coding: utf-8 -*-
# This file is part of the pyMor project (http://www.pymor.org).
# Copyright Holders: Felix Albrecht, Rene Milk, Stephan Rave
# License: BSD 2-Clause License (http://opensource.org/licenses/BSD-2-Clause)
from __future__ import absolute_import, division, print_function
import numpy as np
from scipy.sparse import coo_matrix, csr_matrix
from pymor.la import NumpyVectorArray
from pymor.grids.referenceelements import triangle, line
from pymor.operators.interfaces import OperatorInterface
from pymor.operators.numpy import NumpyLinearOperator
from pymor.tools.inplace import iadd_masked, isub_masked
class NonlinearAdvectionLaxFriedrichs(OperatorInterface):
'''Nonlinear Finite Volume Advection operator using Lax-Friedrichs-Flux.
Currently we assume Dirichlet-Zero on the whole boundary.
'''
type_source = type_range = NumpyVectorArray
def __init__(self, grid, flux, lmbda=1.0, name=None):
super(NonlinearAdvectionLaxFriedrichs, self).__init__()
self.grid = grid
self.flux = flux
self.lmbda = lmbda
self.name = name
self.build_parameter_type(inherits={'flux': flux})
self.dim_source = self.dim_range = grid.size(0)
def apply(self, U, ind=None, mu=None):
assert isinstance(U, NumpyVectorArray)
assert U.dim == self.dim_source
ind = xrange(len(U)) if ind is None else ind
U = U._array
R = np.zeros((len(ind), self.dim_source))
grid = self.grid
N = grid.neighbours(0, 0)
SUPE = grid.superentities(1, 0)
SUPI = grid.superentity_indices(1, 0)
assert SUPE.ndim == 2
VOLS = grid.volumes(1)
for i, j in enumerate(ind):
Ui = U[j]
Ri = R[i]
F = self.flux(Ui, self.map_parameter(mu, 'flux'))
F_edge = F[SUPE]
F_edge[SUPE == -1] = 0
F_edge = np.sum(np.sum(F_edge, axis=1) * grid.unit_outer_normals()[SUPE[:,0], SUPI[:,0]], axis=1)
U_edge = Ui[SUPE]
U_edge[SUPE == -1] = 0
U_edge = (U_edge[:,0] - U_edge[:,1]) * (1. / self.lmbda)
TOT_edge = F_edge + U_edge
TOT_edge *= 0.5 * VOLS
# for k in xrange(len(TOT_edge)):
# Ri[SUPE[k,0]] += TOT_edge[k]
# Ri[SUPE[k,1]] -= TOT_edge[k]
# Ri[SUPE[:,0]] += TOT_edge
# Ri[SUPE[:,1]] -= TOT_edge
iadd_masked(Ri, TOT_edge, SUPE[:,0])
isub_masked(Ri, TOT_edge, SUPE[:,1])
R /= grid.volumes(0)
return NumpyVectorArray(R)
| bsd-2-clause | Python | |
70c1772f339b3638e1ed29a56c2cab9aa9a29c1e | Create Battleship.py | THEMVFFINMAN/Python-Games,THEMVFFINMAN/PyttleShip | Battleship.py | Battleship.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import curses
import time
screen = curses.initscr()
curses.noecho()
curses.curs_set(2)
screen.keypad(1)
def createBoards():
enemyBoard = [ ['+', '=', '=', '=', '=', '=', '=', '=', '=', '=', '=', '+'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['+', '=', '=', '=', '=', '=', '=', '=', '=', '=', '=', '+']
]
userBoard = [ ['+', '=', '=', '=', '=', '=', '=', '=', '=', '=', '=', '+'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['|', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', 'O', '|'],
['+', '=', '=', '=', '=', '=', '=', '=', '=', '=', '=', '+']
]
return (enemyBoard, userBoard)
def mainMenu(screen):
# I know it looks bad but curses was giving me issues
Menu = " +======================+\n"
Menu = Menu + " | |\n"
Menu = Menu + " | PyttleShip |\n"
Menu = Menu + " | |\n"
Menu = Menu + " +======================+\n\n"
Menu = Menu + "\t1. Start Game \n"
Menu = Menu + "\t2. Credits \n"
Menu = Menu + "\t3. Quit\n\n"
Menu = Menu + " >> "
screen.addstr(Menu)
screen.refresh()
while True:
event = screen.getch()
if event == ord(str(1)):
break
if event == ord(str(2)):
screen.clear()
credits(screen)
if event == ord(str(3)):
curses.endwin()
quit(1)
def credits(screen):
Credits = "\n Author: JJ Lowe\n Email: joshuajordanlowe@gmail.com\n Github: THEMVFFINMAN\n\n"
Credits = Credits + " 1. Main Menu \n"
Credits = Credits + " 2. Quit\n\n"
Credits = Credits + " >> "
screen.addstr(Credits)
screen.refresh()
while True:
event = screen.getch()
if event == ord(str(1)):
screen.clear()
mainMenu(screen)
if event == ord(str(2)):
curses.endwin()
quit(1)
mainMenu(screen)
'''
for i in range(0,12):
screen.addstr(' ')
for j in range(0,12):
screen.addstr(enemyBoard[i][j] + ' ')
screen.addstr('\n')
y = 1
x = 3
screen.move(y, x)
screen.refresh()
'''
while True:
event = screen.getch()
if event == ord("w"):
if (y != 1):
y = y - 1
screen.move(y, x)
screen.refresh()
if event == ord("a"):
if (x != 3):
x = x - 2
screen.move(y, x)
screen.refresh()
if event == ord("s"):
if (y != 10):
y = y + 1
screen.move(y, x)
screen.refresh()
if event == ord("d"):
if (x != 21):
x = x + 2
screen.move(y, x)
screen.refresh()
if event == ord("q"): break
| mit | Python | |
a9c0b9f026ac10ca058afc2f7ad311e7431407f8 | add modules to follow | sdpython/pymyinstall,sdpython/pymyinstall,sdpython/pymyinstall,sdpython/pymyinstall | src/pymyinstall/packaged/packaged_config_C_tofollow.py | src/pymyinstall/packaged/packaged_config_C_tofollow.py | #-*- coding: utf-8 -*-
"""
@file
@brief Defines a set of modules to try
"""
import sys
from ..installhelper.module_install import ModuleInstall
def follow_set():
"""
modules to follow
"""
mod = [
ModuleInstall(
"langdetect", "pip", usage="WEB",
purpose="Language detection library ported from Google's language-detection."),
ModuleInstall(
"justext", "pip", usage="WEB",
purpose="Program jusText is a tool for removing boilerplate content, such as navigation links, headers, and footers from HTML pages. It is designed to preserve mainly text containing full sentences and it is therefore well suited for creating linguistic resources such as Web corpora."),
ModuleInstall(
"tldextract", "pip", usage="WEB",
purpose="Accurately separate the TLD from the registered domain and subdomains of a URL, using the Public Suffix List."),
ModuleInstall(
"cchardet", "wheel_xd", usage="WEB",
purpose="Universal encoding detector. This library is faster than chardet."),
ModuleInstall(
"aiohttp", "wheel_xd", usage="WEB",
purpose="http client/server for asyncio"),
ModuleInstall(
"sky", "pip", usage="WEB",
purpose="sky is a web scraping framework, implemented with the latest python versions in mind (3.4+). It uses the asynchronous asyncio framework, as well as many popular modules and extensions."),
]
return [_ for _ in mod if _ is not None]
| mit | Python | |
1bb393531771f877e9d4e9c2a0b99446f8873fc9 | Create gatherThreatIntel_ISC_IPs.py | ahhh/SPSE,thedarkcoder/SPSE | gatherThreatIntel_ISC_IPs.py | gatherThreatIntel_ISC_IPs.py | #!/bin/python
# Inspired by: https://bitbucket.org/whyJoseph/spse-whyjoseph/src/a9cb102ae93826cd7155ff77c2c4a24f584ff0a3/SANSTopXIP.py
from optparse import OptionParser
import logging
import urllib
import re
import sys
# Uses BeautifulSoup4
from bs4 import BeautifulSoup
def threatIPs(quantity):
page = urllib.urlopen("http://isc.sans.edu/sources.html")
parsed = BeautifulSoup(page.read(), "html.parser")
counter = 0
results = []
for link in parsed.find_all('a'):
if (re.search('ipinfo', str(link)) and (counter < int(quantity))):
results.append(link.string)
counter += 1
return results
def main():
# Setup the command line arguments.
optp = OptionParser()
# Output verbosity options
optp.add_option('-q', '--quiet', help='set logging to ERROR',
action='store_const', dest='loglevel',
const=logging.ERROR, default=logging.INFO)
optp.add_option('-d', '--debug', help='set logging to DEBUG',
action='store_const', dest='loglevel',
const=logging.DEBUG, default=logging.INFO)
optp.add_option('-v', '--verbose', help='set logging to COMM',
action='store_const', dest='loglevel',
const=5, default=logging.INFO)
optp.add_option("-n", "--quantity", dest="quantity",
help="The quantity of IP addresses to fetch from the threat feed")
opts, args = optp.parse_args()
if opts.quantity is None:
opts.quantity = 100
results = threatIPs(opts.quantity)
for result in results:
print result
if __name__ == '__main__':
main()
| mit | Python | |
75ce58aa8073e29fcc54d45b8f3dec24cacd05a4 | Add missed needs_update attribute, which is referenced from the RTPproxy client code. | sippy/b2bua,AVOXI/b2bua,AVOXI/b2bua,sippy/b2bua | sippy/SdpMediaDescription.py | sippy/SdpMediaDescription.py |
from SdpConnecton import SdpConnecton
from SdpMedia import SdpMedia
from SdpGeneric import SdpGeneric
f_types = {'m':SdpMedia, 'i':SdpGeneric, 'c':SdpConnecton, 'b':SdpGeneric, \
'k':SdpGeneric}
class SdpMediaDescription(object):
m_header = None
i_header = None
c_header = None
b_header = None
k_header = None
a_headers = None
all_headers = ('m', 'i', 'c', 'b', 'k')
needs_update = True
def __init__(self, cself = None):
if cself != None:
for header_name in [x + '_header' for x in self.all_headers]:
try:
setattr(self, header_name, getattr(cself, header_name).getCopy())
except AttributeError:
pass
self.a_headers = [x for x in cself.a_headers]
return
self.a_headers = []
def __str__(self):
s = ''
for name in self.all_headers:
header = getattr(self, name + '_header')
if header != None:
s += '%s=%s\r\n' % (name, str(header))
for header in self.a_headers:
s += 'a=%s\r\n' % str(header)
return s
def noCStr(self):
s = ''
for name in self.all_headers:
if name == 'c':
continue
header = getattr(self, name + '_header')
if header != None:
s += '%s=%s\r\n' % (name, str(header))
for header in self.a_headers:
s += 'a=%s\r\n' % str(header)
return s
def __iadd__(self, other):
self.addHeader(*other.strip().split('=', 1))
return self
def getCopy(self):
return SdpMediaDescription(cself = self)
def addHeader(self, name, header):
if name == 'a':
self.a_headers.append(header)
else:
setattr(self, name + '_header', f_types[name](header))
|
from SdpConnecton import SdpConnecton
from SdpMedia import SdpMedia
from SdpGeneric import SdpGeneric
f_types = {'m':SdpMedia, 'i':SdpGeneric, 'c':SdpConnecton, 'b':SdpGeneric, \
'k':SdpGeneric}
class SdpMediaDescription(object):
m_header = None
i_header = None
c_header = None
b_header = None
k_header = None
a_headers = None
all_headers = ('m', 'i', 'c', 'b', 'k')
def __init__(self, cself = None):
if cself != None:
for header_name in [x + '_header' for x in self.all_headers]:
try:
setattr(self, header_name, getattr(cself, header_name).getCopy())
except AttributeError:
pass
self.a_headers = [x for x in cself.a_headers]
return
self.a_headers = []
def __str__(self):
s = ''
for name in self.all_headers:
header = getattr(self, name + '_header')
if header != None:
s += '%s=%s\r\n' % (name, str(header))
for header in self.a_headers:
s += 'a=%s\r\n' % str(header)
return s
def noCStr(self):
s = ''
for name in self.all_headers:
if name == 'c':
continue
header = getattr(self, name + '_header')
if header != None:
s += '%s=%s\r\n' % (name, str(header))
for header in self.a_headers:
s += 'a=%s\r\n' % str(header)
return s
def __iadd__(self, other):
self.addHeader(*other.strip().split('=', 1))
return self
def getCopy(self):
return SdpMediaDescription(cself = self)
def addHeader(self, name, header):
if name == 'a':
self.a_headers.append(header)
else:
setattr(self, name + '_header', f_types[name](header))
| bsd-2-clause | Python |
86861db1c67ac4c5b069e0035ed33c18100d582e | Add a default tokenizer | mozilla/spicedham,mozilla/spicedham | spicedham/split_tokenizer.py | spicedham/split_tokenizer.py | from re import split
from spicedham.tokenizer import BaseTokenizer
class SplitTokenizer(BaseTokenizer):
def tokenize(self, text):
"""
Split the text on punctuation and newlines, lowercase everything, and
filter the empty strings
"""
text = split('[ ,.?!\n\r]', text)
is_not_blank = lambda x: x != ''
text = filter(is_not_blank, text)
lower_case = lambda x: x.lower()
text = map(lower_case, text)
return text
| mpl-2.0 | Python | |
40692d016b19c852364c879b7adb2672b023fe8e | add compile_commands.json gyp generator | nodejs/node-gyp,nodejs/node-gyp,nodegit/node-gyp,nodegit/node-gyp,nodegit/node-gyp,nodegit/node-gyp,nodejs/node-gyp,nodejs/node-gyp,nodegit/node-gyp,nodejs/node-gyp | tools/gyp/pylib/gyp/generator/compile_commands_json.py | tools/gyp/pylib/gyp/generator/compile_commands_json.py | # Copyright (c) 2016 Ben Noordhuis <info@bnoordhuis.nl>. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import gyp.common
import gyp.xcode_emulation
import json
import os
generator_additional_non_configuration_keys = []
generator_additional_path_sections = []
generator_extra_sources_for_rules = []
generator_filelist_paths = None
generator_supports_multiple_toolsets = True
generator_wants_sorted_dependencies = False
# Lifted from make.py. The actual values don't matter much.
generator_default_variables = {
'CONFIGURATION_NAME': '$(BUILDTYPE)',
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'INTERMEDIATE_DIR': '$(obj).$(TOOLSET)/$(TARGET)/geni',
'PRODUCT_DIR': '$(builddir)',
'RULE_INPUT_DIRNAME': '%(INPUT_DIRNAME)s',
'RULE_INPUT_EXT': '$(suffix $<)',
'RULE_INPUT_NAME': '$(notdir $<)',
'RULE_INPUT_PATH': '$(abspath $<)',
'RULE_INPUT_ROOT': '%(INPUT_ROOT)s',
'SHARED_INTERMEDIATE_DIR': '$(obj)/gen',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
}
def IsMac(params):
return 'mac' == gyp.common.GetFlavor(params)
def CalculateVariables(default_variables, params):
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
def AddCommandsForTarget(cwd, target, params, per_config_commands):
output_dir = params['generator_flags']['output_dir']
for configuration_name, configuration in target['configurations'].iteritems():
builddir_name = os.path.join(output_dir, configuration_name)
if IsMac(params):
xcode_settings = gyp.xcode_emulation.XcodeSettings(target)
cflags = xcode_settings.GetCflags(configuration_name)
cflags_c = xcode_settings.GetCflagsC(configuration_name)
cflags_cc = xcode_settings.GetCflagsCC(configuration_name)
else:
cflags = configuration.get('cflags', [])
cflags_c = configuration.get('cflags_c', [])
cflags_cc = configuration.get('cflags_cc', [])
cflags_c = cflags + cflags_c
cflags_cc = cflags + cflags_cc
defines = configuration.get('defines', [])
defines = ['-D' + s for s in defines]
# TODO(bnoordhuis) Handle generated source files.
sources = target.get('sources', [])
sources = [s for s in sources if s.endswith('.c') or s.endswith('.cc')]
def resolve(filename):
return os.path.abspath(os.path.join(cwd, filename))
# TODO(bnoordhuis) Handle generated header files.
include_dirs = configuration.get('include_dirs', [])
include_dirs = [s for s in include_dirs if not s.startswith('$(obj)')]
includes = ['-I' + resolve(s) for s in include_dirs]
defines = gyp.common.EncodePOSIXShellList(defines)
includes = gyp.common.EncodePOSIXShellList(includes)
cflags_c = gyp.common.EncodePOSIXShellList(cflags_c)
cflags_cc = gyp.common.EncodePOSIXShellList(cflags_cc)
commands = per_config_commands.setdefault(configuration_name, [])
for source in sources:
file = resolve(source)
isc = source.endswith('.c')
cc = 'cc' if isc else 'c++'
cflags = cflags_c if isc else cflags_cc
command = ' '.join((cc, defines, includes, cflags,
'-c', gyp.common.EncodePOSIXShellArgument(file)))
commands.append(dict(command=command, directory=output_dir, file=file))
def GenerateOutput(target_list, target_dicts, data, params):
per_config_commands = {}
for qualified_target, target in target_dicts.iteritems():
build_file, target_name, toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
if IsMac(params):
settings = data[build_file]
gyp.xcode_emulation.MergeGlobalXcodeSettingsToSpec(settings, target)
cwd = os.path.dirname(build_file)
AddCommandsForTarget(cwd, target, params, per_config_commands)
output_dir = params['generator_flags']['output_dir']
for configuration_name, commands in per_config_commands.iteritems():
filename = os.path.join(output_dir,
configuration_name,
'compile_commands.json')
gyp.common.EnsureDirExists(filename)
fp = open(filename, 'w')
json.dump(commands, fp=fp, indent=0, check_circular=False)
def PerformBuild(data, configurations, params):
pass
| mit | Python | |
dbe697ba4c74f2552678775ff25cb266d52cd312 | Update build script. | qiuwch/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv,unrealcv/unrealcv,qiuwch/unrealcv | build-plugin.py | build-plugin.py | import os
UnrealEnginePath='~/workspace/UnrealEngine/Engine'
UATScript = os.path.join(UnrealEnginePath, 'Build/BatchFiles/RunUAT.sh')
if not os.path.isfile(UATScript):
print('Can not find Automation Script of UE4 %s' % UATScript)
print('Please set UnrealEnginePath correctly first')
else:
FullPluginFile = os.path.abspath('UnrealCV.uplugin')
cmd = '%s BuildPlugin -plugin=%s' % (UATScript, FullPluginFile)
print(cmd)
os.system(cmd)
| import os
UnrealEnginePath='~/workspace/UnrealEngine/Engine'
UATScript = os.path.join(UnrealEnginePath, 'Build/BatchFiles/RunUAT.sh')
if not os.path.isfile(UATScript):
print('Can not find Automation Script of UE4 %s' % UATScript)
print('Please set UnrealEnginePath correctly first')
FullPluginFile = os.path.abspath('UnrealCV.uplugin')
os.system('%s BuildPlugin -plugin=%s' % (UATScript, FullPluginFile))
| mit | Python |
50c8258f657837f5be36e1d449702da58ee22531 | add an incredibly basic test script | dmc2015/census,sunlightlabs/census,joehand/census,UDST/census | census/tests.py | census/tests.py | import os
import unittest
from core import Census, UnsupportedYearException
KEY = os.environ.get('CENSUS_KEY', '')
class TestUnsupportedYears(unittest.TestCase):
def setUp(self):
self.client = Census(KEY, year=2008)
def test_acs(self):
self.assertRaises(UnsupportedYearException,
self.client.acs.state, ('NAME', '06'))
def test_sf1(self):
self.assertRaises(UnsupportedYearException,
self.client.sf1.state, ('NAME', '06'))
def test_sf3(self):
self.assertRaises(UnsupportedYearException,
self.client.sf3.state, ('NAME', '06'))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python | |
aec48ebe3b0431fe39e3f59dc92b8e66eaddb318 | Add languagebar.py | phuang/ibus,Keruspe/ibus,j717273419/ibus,ueno/ibus,ueno/ibus,ibus/ibus-cros,ibus/ibus-cros,j717273419/ibus,Keruspe/ibus,luoxsbupt/ibus,ibus/ibus,luoxsbupt/ibus,fujiwarat/ibus,luoxsbupt/ibus,phuang/ibus,luoxsbupt/ibus,Keruspe/ibus,ueno/ibus,fujiwarat/ibus,fujiwarat/ibus,luoxsbupt/ibus,j717273419/ibus,Keruspe/ibus,ibus/ibus-cros,j717273419/ibus,ibus/ibus,fujiwarat/ibus,ueno/ibus,ibus/ibus,phuang/ibus,ueno/ibus,phuang/ibus,ibus/ibus,ibus/ibus-cros | panel/languagebar.py | panel/languagebar.py | import gtk
import gtk.gdk as gdk
import gobject
from image import Image
from handle import Handle
class LanguageBar (gtk.Toolbar):
def __init__ (self):
gtk.Toolbar.__init__ (self)
self.set_property ("icon-size", gtk.ICON_SIZE_MENU)
# self.set_orientation (gtk.ORIENTATION_VERTICAL)
self._create_items ()
def insert (self, toolitem, pos):
gtk.Toolbar.insert (self, toolitem, pos)
self.check_resize ()
def _add_items (self):
btn = gtk.ToolButton (gtk.STOCK_NEW)
btn.connect ("clicked", lambda x: self._add_items ())
self.insert (btn, -1)
self.insert (gtk.ToolButton (gtk.STOCK_APPLY), -1)
self.insert (gtk.SeparatorToolItem (), -1)
self.show_all ()
def _create_items (self):
handle = Handle ()
item = gtk.ToolItem ()
item.add (handle)
self.insert (item, -1)
self._add_items ()
def do_realize (self):
gtk.Toolbar.do_realize (self)
self.check_resize ()
def do_check_resize (self):
width = 0
for item in self:
w, h = item.size_request ()
width += w
self.set_size_request (width + 2, -1)
gobject.type_register (LanguageBar, "IBusLanguageBar")
class LanguageBarWindow (gtk.Window):
def __init__ (self):
gtk.Window.__init__ (self, gtk.WINDOW_POPUP)
self._language_bar = LanguageBar ()
self._language_bar.connect ("size-request", self._size_request_cb)
self.add (self._language_bar)
self.show_all ()
def _size_request_cb (self, widget, size):
self.resize (size.width, size.height)
def do_size_allocate (self, allocation):
gtk.Window.do_size_allocate (self, allocation)
root = gdk.get_default_root_window ()
workarea = root.property_get ("_NET_WORKAREA")[2]
x, y = workarea[2] - allocation.width - 40, workarea[1] + workarea[3] - allocation.height
self.move (x, y)
def do_destroy (self):
gtk.main_quit ()
gtk.Window.do_destroy (self)
gobject.type_register (LanguageBarWindow, "IBusLanguageBarWindow")
| lgpl-2.1 | Python | |
4b356690500c7374acb166f8055edc8eb7e57509 | Create wget.py | shahsaifi/handystuff | wget.py | wget.py | import urllib
import sys
w = sys.argv[1]
def wget(w):
return urllib.urlopen(w).read()
def getbase(w):
return w.split("/")[-1]
def write(filename):
f = open((filename),"w")
f.write(wget(w))
f.close()
def main():
if w.endswith("/"):
write("index.html")
print "saving %s as index.html" % w
else:
write(getbase(w))
print "saving %s as %s" % (w, getbase(w))
if __name__ == "__main__":
main()
| unlicense | Python | |
c261ae123a3967af0e0bf5640d115f623ff6b446 | Add conpaas.core.agent.BaseAgent which all service agents can extend | mihaisoloi/conpaas,mihaisoloi/conpaas,mihaisoloi/conpaas,mihaisoloi/conpaas,mihaisoloi/conpaas | conpaas-services/src/conpaas/core/agent.py | conpaas-services/src/conpaas/core/agent.py | """
Copyright (c) 2010-2013, Contrail consortium.
All rights reserved.
Redistribution and use in source and binary forms,
with or without modification, are permitted provided
that the following conditions are met:
1. Redistributions of source code must retain the
above copyright notice, this list of conditions
and the following disclaimer.
2. Redistributions in binary form must reproduce
the above copyright notice, this list of
conditions and the following disclaimer in the
documentation and/or other materials provided
with the distribution.
3. Neither the name of the Contrail consortium nor the
names of its contributors may be used to endorse
or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
"""
from conpaas.core.expose import expose
from conpaas.core.log import create_logger
from conpaas.core.https.server import HttpJsonResponse
from conpaas.core.https.server import HttpErrorResponse
class BaseAgent(object):
"""Agent class with the following exposed methods:
check_agent_process() -- GET
"""
def __init__(self, config_parser, **kwargs):
self.logger = create_logger(__name__)
self.state = 'INIT'
service_type = config_parser.get('agent', 'TYPE')
user_id = config_parser.get('agent', 'USER_ID')
service_id = config_parser.get('agent', 'SERVICE_ID')
self.logger.info("'%s' agent started (uid=%s, sid=%s)" % (
service_type, user_id, service_id))
@expose('GET')
def check_agent_process(self, kwargs):
"""Check if agent process started - just return an empty response"""
if len(kwargs) != 0:
return HttpErrorResponse('ERROR: Arguments unexpected')
return HttpJsonResponse()
class AgentException(Exception):
E_CONFIG_NOT_EXIST = 0
E_CONFIG_EXISTS = 1
E_CONFIG_READ_FAILED = 2
E_CONFIG_CORRUPT = 3
E_CONFIG_COMMIT_FAILED = 4
E_ARGS_INVALID = 5
E_ARGS_UNEXPECTED = 6
E_ARGS_MISSING = 7
E_UNKNOWN = 8
E_STRINGS = [
'No configuration exists',
'Configuration already exists',
'Failed to read configuration state of %s from %s', # 2 params
'Configuration is corrupted',
'Failed to commit configuration',
'Invalid arguments',
'Unexpected arguments %s', # 1 param (a list)
'Missing argument "%s"', # 1 param
'Unknown error',
]
def __init__(self, code, *args, **kwargs):
self.code = code
self.args = args
if 'detail' in kwargs:
self.message = '%s DETAIL:%s' % ((self.E_STRINGS[code] % args),
str(kwargs['detail']))
else:
self.message = self.E_STRINGS[code] % args
| bsd-3-clause | Python | |
b420c9e24372d5c7057f7f3102cf7b364023f62c | Add files via upload | noxterrain/codex | SimpleGUI.py | SimpleGUI.py | """"
Creates a simple GUI for summing two numbers.
"""
import tkinter
from tkinter import ttk
class Adder(ttk.Frame):
"""The adders gui and functions."""
def __init__(self, parent, *args, **kwargs):
ttk.Frame.__init__(self, parent, *args, **kwargs)
self.root = parent
self.init_gui()
def on_quit(self):
"""Exits program."""
quit()
def calculate(self):
"""Calculates the sum of the two inputted numbers."""
num1 = int(self.num1_entry.get())
num2 = int(self.num2_entry.get())
num3 = num1 + num2
self.answer_label['text'] = num3
def init_gui(self):
"""Builds GUI."""
self.root.title('Number Adder')
self.root.option_add('*tearOff', 'FALSE')
self.grid(column=0, row=0, sticky='nsew')
self.menubar = tkinter.Menu(self.root)
self.menu_file = tkinter.Menu(self.menubar)
self.menu_file.add_command(label='Exit', command=self.on_quit)
self.menu_edit = tkinter.Menu(self.menubar)
self.menubar.add_cascade(menu=self.menu_file, label='File')
self.menubar.add_cascade(menu=self.menu_edit, label='Edit')
self.root.config(menu=self.menubar)
self.num1_entry = ttk.Entry(self, width=5)
self.num1_entry.grid(column=1, row = 2)
self.num2_entry = ttk.Entry(self, width=5)
self.num2_entry.grid(column=3, row=2)
self.calc_button = ttk.Button(self, text='Calculate',
command=self.calculate)
self.calc_button.grid(column=0, row=3, columnspan=4)
self.answer_frame = ttk.LabelFrame(self, text='Answer',
height=100)
self.answer_frame.grid(column=0, row=4, columnspan=4, sticky='nesw')
self.answer_label = ttk.Label(self.answer_frame, text='')
self.answer_label.grid(column=0, row=0)
# Labels that remain constant throughout execution.
ttk.Label(self, text='Number Adder').grid(column=0, row=0,
columnspan=4)
ttk.Label(self, text='Number one').grid(column=0, row=2,
sticky='w')
ttk.Label(self, text='Number two').grid(column=2, row=2,
sticky='w')
ttk.Separator(self, orient='horizontal').grid(column=0,
row=1, columnspan=4, sticky='ew')
for child in self.winfo_children():
child.grid_configure(padx=5, pady=5)
if __name__ == '__main__':
root = tkinter.Tk()
Adder(root) | mit | Python | |
34e2e605cbecbf9bcd7fee652095636a84e685b5 | Add : first attempt to autodetection. | xorpaul/shinken,staute/shinken_deb,Simage/shinken,xorpaul/shinken,savoirfairelinux/shinken,xorpaul/shinken,rednach/krill,staute/shinken_package,savoirfairelinux/shinken,naparuba/shinken,h4wkmoon/shinken,kaji-project/shinken,dfranco/shinken,xorpaul/shinken,fpeyre/shinken,geektophe/shinken,rednach/krill,mohierf/shinken,mohierf/shinken,peeyush-tm/shinken,naparuba/shinken,KerkhoffTechnologies/shinken,baloo/shinken,fpeyre/shinken,rednach/krill,Simage/shinken,tal-nino/shinken,ddurieux/alignak,mohierf/shinken,peeyush-tm/shinken,lets-software/shinken,dfranco/shinken,Aimage/shinken,baloo/shinken,titilambert/alignak,kaji-project/shinken,staute/shinken_deb,rledisez/shinken,ddurieux/alignak,fpeyre/shinken,peeyush-tm/shinken,claneys/shinken,Simage/shinken,peeyush-tm/shinken,tal-nino/shinken,claneys/shinken,kaji-project/shinken,Aimage/shinken,h4wkmoon/shinken,titilambert/alignak,Simage/shinken,xorpaul/shinken,Aimage/shinken,tal-nino/shinken,kaji-project/shinken,h4wkmoon/shinken,xorpaul/shinken,peeyush-tm/shinken,KerkhoffTechnologies/shinken,naparuba/shinken,savoirfairelinux/shinken,lets-software/shinken,dfranco/shinken,ddurieux/alignak,KerkhoffTechnologies/shinken,savoirfairelinux/shinken,xorpaul/shinken,fpeyre/shinken,tal-nino/shinken,claneys/shinken,dfranco/shinken,titilambert/alignak,KerkhoffTechnologies/shinken,staute/shinken_package,peeyush-tm/shinken,Aimage/shinken,h4wkmoon/shinken,Simage/shinken,savoirfairelinux/shinken,staute/shinken_deb,rledisez/shinken,rednach/krill,dfranco/shinken,staute/shinken_deb,staute/shinken_package,geektophe/shinken,staute/shinken_deb,claneys/shinken,gst/alignak,xorpaul/shinken,KerkhoffTechnologies/shinken,savoirfairelinux/shinken,kaji-project/shinken,lets-software/shinken,kaji-project/shinken,h4wkmoon/shinken,gst/alignak,geektophe/shinken,titilambert/alignak,ddurieux/alignak,lets-software/shinken,rednach/krill,baloo/shinken,lets-software/shinken,Aimage/shinken,rledisez/shinken,Alignak-monitoring/alignak,mohierf/shinken,rledisez/shinken,h4wkmoon/shinken,mohierf/shinken,h4wkmoon/shinken,kaji-project/shinken,fpeyre/shinken,mohierf/shinken,geektophe/shinken,naparuba/shinken,Alignak-monitoring/alignak,tal-nino/shinken,geektophe/shinken,naparuba/shinken,gst/alignak,fpeyre/shinken,claneys/shinken,staute/shinken_deb,rledisez/shinken,staute/shinken_package,dfranco/shinken,rledisez/shinken,lets-software/shinken,baloo/shinken,geektophe/shinken,Aimage/shinken,baloo/shinken,rednach/krill,tal-nino/shinken,KerkhoffTechnologies/shinken,baloo/shinken,h4wkmoon/shinken,staute/shinken_package,staute/shinken_package,ddurieux/alignak,claneys/shinken,naparuba/shinken,ddurieux/alignak,Simage/shinken,gst/alignak | libexec/show_nmap.py | libexec/show_nmap.py | from xml.etree.ElementTree import ElementTree
tree = ElementTree()
tree.parse("local.xml")
#tree.parse("www.google.com.xml")
p = tree.findall('host')
print "Number of host", len(p)
# Say if a host is up or not
def is_up(h):
status = h.find('status')
state = status.attrib['state']
return state == 'up'
class DetectedHost:
def __init__(self):
self.ip = ''
self.mac_vendor = ''
self.host_name = ''
self.os_possibilities = []
self.os = ('', '')
self.open_ports = []
# Keep the first name we got
def set_host_name(self, name):
if self.host_name == '':
self.host_name = name
# Fill the different os possibilities
def add_os_possibility(self, os, osgen, accuracy):
self.os_possibilities.append( (os, osgen, accuracy) )
# Look at ours oses and see which one is the better
def compute_os(self):
# bailout if we got no os :(
if len(self.os_possibilities) == 0:
return
max_accuracy = 0
for (os, osgen, accuracy) in self.os_possibilities:
if accuracy > max_accuracy:
max_accuracy = accuracy
# now get the entry with the max value
for (os, osgen, accuracy) in self.os_possibilities:
if accuracy == max_accuracy:
self.os = (os, osgen)
for h in p:
# Bypass non up hosts
if not is_up(h):
continue
dh = DetectedHost()
# Now we get the ipaddr and the mac vendor
# for future VMWare matching
#print h.__dict__
addrs = h.findall('address')
for addr in addrs:
#print "Address", addr.__dict__
addrtype = addr.attrib['addrtype']
if addrtype == 'ipv4':
dh.ip = addr.attrib['addr']
if addrtype == "mac":
dh.mac_vendor = addr.attrib['vendor']
# Now we got the hostnames
host_names = h.findall('hostnames')
for h_name in host_names:
h_names = h_name.findall('hostname')
for h_n in h_names:
#print 'hname', h_n.__dict__
#print 'Host name', h_n.attrib['name']
dh.set_host_name(h_n.attrib['name'])
# Now print the traceroute
traces = h.findall('trace')
for trace in traces:
hops = trace.findall('hop')
#for hop in hops:
# print hop.__dict__
# Now the OS detection
os = h.find('os')
#print os.__dict__
cls = os.findall('osclass')
for c in cls:
#print "Class", c.__dict__
family = c.attrib['osfamily']
accuracy = c.attrib['accuracy']
if 'osgen' in c.attrib:
osgen = c.attrib['osgen']
else:
osgen = None
#print "Type:", family, osgen, accuracy
dh.add_os_possibility(family, osgen, accuracy)
# Ok we can compute our OS now :)
dh.compute_os()
# Now the ports :)
allports = h.findall('ports')
for ap in allports:
ports = ap.findall('port')
for p in ports:
#print "Port", p.__dict__
p_id = p.attrib['portid']
s = p.find('state')
#print s.__dict__
state = s.attrib['state']
if state == 'open':
dh.open_ports.append(int(p_id))
print dh.__dict__
print "\n\n"
| agpl-3.0 | Python | |
79804cf72d096483d8c2483c0d02a21b715ddd5f | Add files via upload | Scaravex/clue-hackathon,Scaravex/clue-hackathon,adrinjalali/clue-hackathon,adrinjalali/clue-hackathon | ideas_v0.py | ideas_v0.py | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 15 23:23:24 2017
@author: Flaminia
"""
import pandas as pd
import random
import numpy as np
import seaborn as sb
sample = pd.read_csv("data/data_sample.csv") # the first sample, one single file
# List users
udser_id = list(pd.unique(sample.user_id))
# density is a kind of pre processing the data
density = pd.DataFrame()
# For each user
for u_id in udser_id:
single_user = sample[sample.user_id == u_id]
# Day of ovulation (assuming that the luteal phase lasts 14 days)
# luteal lenght uniformly distributed around 14 days ±1
# (We can try gaussian distribution)
ov_day = single_user.cycle_length - (14 + random.randrange(-1, 1, 1))
# Ov_day_mean becomes the day 0 for each user
ov_day_mean = int(np.mean(ov_day))
cycle_mean = int(np.mean(single_user.cycle_length))
cycle_std = int(np.std(single_user.cycle_length))
# select "energy" in id1
user_energy = single_user[single_user.category == 'energy']
symptom_keys = list(set(user_energy.symptom))
sk = pd.DataFrame(symptom_keys)
days = list(set(single_user.day_in_cycle))
colors = {'exhausted': 'black', 'low_energy': 'blue', 'energized': 'red', 'high_energy': 'purple'}
color = sk[0].apply(lambda x: colors[x]) # links each symptom with the relative color in colors
s = 80
fig1, ax1 = sb.plt.subplots(figsize=(12, 6))
# for each user predictions are made with respect to the day of ovulation, which becomes the day 0
# the day of ovulation represents a fixed point in a cycle length of each user
for day in days:
i = 1
for key in symptom_keys:
c = color[i-1]
points = user_energy.loc[(user_energy['day_in_cycle'] == day) &
(user_energy['symptom'] == key)]
diary = pd.DataFrame({'user_id': u_id, 'day_to_from_ov':day-ov_day_mean, 'symptom': key,
'occurrence': points.shape[0]})
density = density.append(diary)
# %%
# print(day,key,len(points))
ax1.scatter(day-ov_day_mean, points.shape[0], s, c, label=key)
ax1.scatter(0, 0, color='r', s=100, marker='^', alpha=.4, label='ovulation')
ax1.set_title('User ID:' + str(u_id))
props1 = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
textstr1 = 'number of cycles= %.0f' % (len(single_user.cycle_length))
ax1.text(0.55, 0.95, textstr1, transform=ax1.transAxes, fontsize=14, verticalalignment='top', bbox=props1)
i += 1
# handles, labels = ax1.get_legend_handles_labels()
# ax1.legend([handle for i, handle in enumerate(handles) if i == 0],
# [label for i, label in enumerate(labels) if i == 0],
# loc="upper left", bbox_to_anchor=[0, 1], ncol=1, shadow=True, title="Legend", fancybox=True)
# plt.legend((lo, ll, l, a, h, hh, ho),
# ('Low Outlier', 'LoLo', 'Lo', 'Average', 'Hi', 'HiHi', 'High Outlier'),
# scatterpoints=1,
# loc='lower left',
# ncol=3,
# fontsize=8)
sb.plt.show() | apache-2.0 | Python | |
b8f90d2f2ab7fb439ddf912a47b7856dcb040a39 | Add move subcommand for label handling | Anaconda-Platform/anaconda-client,Anaconda-Platform/anaconda-client,Anaconda-Platform/anaconda-client | binstar_client/commands/move.py | binstar_client/commands/move.py | # -*- coding: utf-8 -*-
"""
Move packages between labels.
"""
# Standard library imports
from __future__ import unicode_literals, print_function
import logging
# Local imports
from binstar_client import errors
from binstar_client.utils import get_server_api, parse_specs
logger = logging.getLogger('binstar.move')
def main(args):
aserver_api = get_server_api(args.token, args.site)
spec = args.spec
channels = aserver_api.list_channels(spec.user)
label_text = 'label' if (args.from_label and args.to_label) else 'channel'
from_label = args.from_label.lower()
to_label = args.to_label.lower()
if from_label not in channels:
raise errors.UserError(
"{} {} does not exist\n\tplease choose from: {}".format(
label_text.title(),
from_label,
', '.join(channels)
))
if from_label == to_label:
raise errors.UserError('--from-label and --to-label must be different')
# Add files to to_label
try:
aserver_api.add_channel(
to_label,
spec.user,
package=spec.package,
version=spec._version,
filename=spec._basename,
)
except Exception:
pass
# Remove files from from_label
try:
aserver_api.remove_channel(
from_label,
spec.user,
package=spec.package,
version=spec._version,
filename=spec._basename,
)
except Exception:
pass
# for binstar_file in files:
# print("Copied file: %(basename)s" % binstar_file)
# if files:
# logger.info("Copied %i files" % len(files))
# else:
# logger.warning("Did not copy any files. Please check your inputs "
# "with \n\n\tanaconda show %s" % spec)
def add_parser(subparsers):
parser = subparsers.add_parser(
'move',
help='Move packages between labels',
description=__doc__,
)
parser.add_argument(
'spec',
help='Package - written as user/package/version[/filename] '
'If filename is not given, move all files in the version',
type=parse_specs,
)
# TODO: To be implemented later on
# parser.add_argument(
# '--to-owner',
# help='User account to move package to (default: your account)',
# )
_from = parser.add_mutually_exclusive_group()
_to = parser.add_mutually_exclusive_group()
_from.add_argument(
'--from-label',
help='Label to move packages from',
default='main',
)
_to.add_argument(
'--to-label',
help='Label to move packages to',
default='main',
)
parser.set_defaults(main=main)
| bsd-3-clause | Python | |
50e2cfbcb2444fd34a77d42589afbf8929c1cf00 | add module that may be useful | biokit/biokit,biokit/biokit | biokit/rtools/nbstreamreader.py | biokit/rtools/nbstreamreader.py | # http://eyalarubas.com/python-subproc-nonblock.html
from threading import Thread
try:
from Queue import Queue, Empty
except ImportError:
from queue import Queue, Empty
class NonBlockingStreamReader:
def __init__(self, stream):
"""
stream: the stream to read from.
Usually a process' stdout or stderr.
"""
self._s = stream
self._q = Queue()
def _populateQueue(stream, queue):
"""
Collect lines from 'stream' and put them in 'quque'.
"""
while True:
line = stream.readline()
if line:
queue.put(line)
else:
raise UnexpectedEndOfStream
self._t = Thread(target = _populateQueue,
args = (self._s, self._q))
self._t.daemon = True
self._t.start() #start collecting lines from the stream
def readline(self, timeout = None):
try:
return self._q.get(block = timeout is not None,
timeout = timeout)
except Empty:
return None
class UnexpectedEndOfStream(Exception): pass
| bsd-2-clause | Python | |
7523a3212a79228b3cffe82d50a7383a5d101aec | Add watcher(pagerduty) bot | forter/boten | boten/bots_available/watcher.py | boten/bots_available/watcher.py | from __future__ import absolute_import
from boten import core
import pygerduty
import datetime
from boten.config import watcher as config
class Bot(core.BaseBot):
def __init__(self):
super(Bot, self).__init__()
self.pager = pygerduty.PagerDuty(config.ORG_NAME, config.PAGERDUTY_KEY)
def command_who(self):
now = datetime.datetime.now()
tomorrow = now + datetime.timedelta(days=1)
bufferstr = ""
for schedule in self.pager.schedules.list():
bufferstr += "for *{}* the watcher is:\n".format(schedule.name)
entries = self.pager.schedules.show(schedule.id)
entry = next(entries.entries.list(since=now.isoformat(), until=tomorrow.isoformat()))
info = entry.to_json()
bufferstr += "> "
bufferstr += info['user']['name']
bufferstr += "\n"
yield bufferstr
| apache-2.0 | Python | |
023ca9fac2963d5dc998c1ba501e5705ec7dfe26 | Create elasticsearch_storage.py | jmlong1027/multiscanner,jmlong1027/multiscanner,awest1339/multiscanner,mitre/multiscanner,awest1339/multiscanner,awest1339/multiscanner,mitre/multiscanner,jmlong1027/multiscanner,mitre/multiscanner,MITRECND/multiscanner,MITRECND/multiscanner,awest1339/multiscanner,jmlong1027/multiscanner | storage/elasticsearch_storage.py | storage/elasticsearch_storage.py | '''
Storage module that will interact with elasticsearch.
'''
from uuid import uuid4
from elasticsearch import Elasticsearch, helpers
import storage
class ElasticSearchStorage(storage.Storage):
'''
Subclass of Storage.
'''
DEFAULTCONF = {
'ENABLED': True,
'host': 'localhost',
'port': 9200,
'index': 'multiscanner_reports',
'doc_type': 'reports',
}
def setup(self):
self.host = self.config['host']
self.port = self.config['port']
self.index = self.config['index']
self.doc_type = self.config['doc_type']
self.es = Elasticsearch(
host=self.host,
port=self.port
)
return True
def store(self, report):
report_id_list = []
report_list = []
for filename in report:
report[filename]['filename'] = filename
try:
report_id = report[filename]['SHA256']
except KeyError:
report_id = uuid4()
report_id_list.append(report_id)
report_list.append(
{
'_index': self.index,
'_type': self.doc_type,
'_id': report_id,
'_source': report[filename]
}
)
result = helpers.bulk(self.es, report_list)
return report_id_list
def get_report(self, report_id):
try:
result = self.es.get(
index=self.index, doc_type=self.doc_type,
id=report_id
)
return result['_source']
except:
return None
def delete(self, report_id):
try:
self.es.delete(
index=self.index, doc_type=self.doc_type,
id=report_id
)
return True
except:
return False
def teardown(self):
pass
| mpl-2.0 | Python | |
4d5b94ac4eded101148e21b4faa0ca7dafe74770 | fix the pylint "Class 'Position' has no 'objects' member" error | yohanboniface/memopol-core,stfp/memopol2,stfp/memopol2,yohanboniface/memopol-core,yohanboniface/memopol-core,stfp/memopol2 | memopol2/main/models.py | memopol2/main/models.py | from django.db import models
from memopol2 import settings
from couchdbkit import *
class Mep(dict):
"""
Our Mep pseudo model. Currently we use couchdbkit as a glorified http client and json parser,
the objets we work with are just dicts. This is here to wrap things a little bit, and do our
fixups (which should be moved to the migration scripts anyway).
FIXME - this is kind of fugly
"""
def __init__(self, *args):
dict.__init__(self, *args)
self.fixup()
def fixup(self):
# fixup email.addr.text
try:
node = self["contact"]["email"]
if not(type(node) is dict and node.has_key("text")):
self["contact"]["email"] = { "text": node }
except Exception:
raise
@staticmethod
def get(key):
couch = Server(settings.COUCHDB)
return Mep(couch["meps"].get(key))
class Position(models.Model):
objects = models.Manager() # fix for pylint warnings
mep_id = models.CharField(max_length=128)
subject = models.CharField(max_length=128)
content = models.CharField(max_length=512)
submitter_username = models.CharField(max_length=30)
submitter_ip = models.IPAddressField()
submit_datetime = models.DateTimeField()
moderated = models.BooleanField()
moderated_by = models.CharField(max_length=30)
visible = models.BooleanField()
def __json__(self):
return {"mep_id": self.mep_id, "content": self.content}
def __unicode__(self):
return "<Position for mep id='%s'>" % (self.mep_id)
| from django.db import models
from memopol2 import settings
from couchdbkit import *
class Mep(dict):
"""
Our Mep pseudo model. Currently we use couchdbkit as a glorified http client and json parser,
the objets we work with are just dicts. This is here to wrap things a little bit, and do our
fixups (which should be moved to the migration scripts anyway).
FIXME - this is kind of fugly
"""
def __init__(self, *args):
dict.__init__(self, *args)
self.fixup()
def fixup(self):
# fixup email.addr.text
try:
node = self["contact"]["email"]
if not(type(node) is dict and node.has_key("text")):
self["contact"]["email"] = { "text": node }
except Exception:
raise
@staticmethod
def get(key):
couch = Server(settings.COUCHDB)
return Mep(couch["meps"].get(key))
class Position(models.Model):
mep_id = models.CharField(max_length=128)
subject = models.CharField(max_length=128)
content = models.CharField(max_length=512)
submitter_username = models.CharField(max_length=30)
submitter_ip = models.IPAddressField()
submit_datetime = models.DateTimeField()
moderated = models.BooleanField()
moderated_by = models.CharField(max_length=30)
visible = models.BooleanField()
def __json__(self):
return {"mep_id": self.mep_id, "content": self.content}
def __unicode__(self):
return "<Position for mep id='%s'>" % (self.mep_id)
| agpl-3.0 | Python |
4681ee081f5600cebf7540862efc60dbf1d190d7 | Rename test module and add test for login page content | mkiterian/bucket-list-app,mkiterian/bucket-list-app,mkiterian/bucket-list-app | test_app.py | test_app.py | import unittest
from unittest import TestCase
from user import User
from bucketlist import BucketList
from flask import url_for
from app import app
class BucketListTest(TestCase):
def setUp(self):
# creates a test client
self.client = app.test_client()
self.client.testing = True
def test_success(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/login')
self.assertEqual(result.status_code, 200)
def test_failure(self):
# sends HTTP GET request to the application
# on the specified path
result = self.client.get('/nonexistant.html')
self.assertEqual(result.status_code, 404)
def test_login_page_loads(self):
# assert login page loads correctly
result = self.client.get('/login')
self.assertTrue(b'The best way to keep track of your dreams and goals' in result.data)
'''
def test_signup(self):
# register a new account
response = self.client.post(url_for('/signup'), data={
'username': 'hermano',
'email': 'herm@email.com',
'password': 'hard',
'confirm_password': 'hard'
})
self.assertTrue(response.status_code == 302)
'''
if __name__ == '__main__':
unittest.main()
| mit | Python | |
f6c64846fc066403d39d7cb60ce0bcc455aff2d5 | Add conversions for MIDI to hertz and hertz to MIDI | TheUnderscores/midi-beeper-orchestra | src/server/convert.py | src/server/convert.py | # midi-beeper-orchestra - program to create an orchestra from PC speakers
# Copyright (C) 2015 The Underscores
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import math
def MIDItoHz(MIDIval):
"""
Converts a MIDI note, MIDIval, value to the equivalent hertz value
"""
return 69 + 12 * math.log((MIDIval/440), 2)
def hzToMIDI(hz):
"""
Converts hertz, hz, to MIDI note equivalent
"""
midi = 2**((hz-69)/12) * 440
return int(midi + 0.5)
| agpl-3.0 | Python | |
57abe384393d9ec14e8c066d674e49742d42d12c | Return result code. | tnotstar/pycalcstats,tnotstar/pycalcstats,bmcculley/pycalcstats,bmcculley/pycalcstats | src/stats/__main__.py | src/stats/__main__.py | #!/usr/bin/env python3
## Copyright (c) 2011 Steven D'Aprano.
## See the file __init__.py for the licence terms for this software.
"""
Run the stats package as if it were an executable module.
Usage:
$ python3 -m stats [options]
Options:
-h --help Print this help text.
-V --version Print the version number.
-v --verbose Run tests verbosely.
-q --quiet Don't print anything on success.
With no options, perform a self-test of the stats package by running all
doctests in the package. By default, failed tests will be printed. If all
tests pass, a count of how many tests were performed is printed.
To print details of all tests regardless of whether they succeed or fail,
pass the verbose flag after the package name:
$ python3 -m stats -v
To suppress output if all tests pass, pass the quiet flag:
$ python3 -m stats -q
"""
import sys
def process_options():
argv = sys.argv[1:]
if '-h' in argv or '--help' in argv:
print(__doc__)
sys.exit(0)
verbose = '-v' in argv or '--verbose' in argv
quiet = '-q' in argv or '--quiet' in argv
if verbose and quiet:
print('cannot be both quiet and verbose', file=sys.stderr)
sys.exit(1)
if '-V' in argv or '--version' in argv:
import stats
print(stats.__version__)
sys.exit(0)
return verbose, quiet
def self_test(verbose, quiet):
assert not (verbose and quiet)
import doctest
import stats, stats.co, stats.multivar, stats.order, \
stats.univar, stats.utils
modules = (stats, stats.co, stats.multivar, stats.order,
stats.univar, stats.utils
)
failed = tried = 0
for module in modules:
a, b = doctest.testmod(module, verbose=verbose)
failed += a
tried += b
if failed == 0 and not quiet:
print("Successfully run %d doctests from %d files."
% (tried, len(modules)))
return failed
if __name__ == '__main__' and __package__ is not None:
verbose, quiet = process_options()
sys.exit(self_test(verbose, quiet))
| #!/usr/bin/env python3
## Copyright (c) 2011 Steven D'Aprano.
## See the file __init__.py for the licence terms for this software.
"""
Run the stats package as if it were an executable module.
Usage:
$ python3 -m stats [options]
Options:
-h --help Print this help text.
-V --version Print the version number.
-v --verbose Run tests verbosely.
-q --quiet Don't print anything on success.
With no options, perform a self-test of the stats package by running all
doctests in the package. By default, failed tests will be printed. If all
tests pass, a count of how many tests were performed is printed.
To print details of all tests regardless of whether they succeed or fail,
pass the verbose flag after the package name:
$ python3 -m stats -v
To suppress output if all tests pass, pass the quiet flag:
$ python3 -m stats -q
"""
def process_options():
import sys
argv = sys.argv[1:]
if '-h' in argv or '--help' in argv:
print(__doc__)
sys.exit(0)
verbose = '-v' in argv or '--verbose' in argv
quiet = '-q' in argv or '--quiet' in argv
if verbose and quiet:
print('cannot be both quiet and verbose', file=sys.stderr)
sys.exit(1)
if '-V' in argv or '--version' in argv:
import stats
print(stats.__version__)
sys.exit(0)
return verbose, quiet
def self_test(verbose, quiet):
assert not (verbose and quiet)
import doctest
import stats, stats.co, stats.multivar, stats.order, \
stats.univar, stats.utils
modules = (stats, stats.co, stats.multivar, stats.order,
stats.univar, stats.utils
)
failed = tried = 0
for module in modules:
a, b = doctest.testmod(module, verbose=verbose)
failed += a
tried += b
if failed == 0 and not quiet:
print("Successfully run %d doctests from %d files."
% (tried, len(modules)))
if __name__ == '__main__' and __package__ is not None:
verbose, quiet = process_options()
self_test(verbose, quiet)
| mit | Python |
a26b56085598bed7afe7cfef43fd1e2547b3831d | Add script to generate TF-TRT model to be used for testing | tensorflow/tensorflow-pywrap_tf_optimizer,jhseu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,sarvex/tensorflow,gunan/tensorflow,cxxgtxy/tensorflow,freedomtan/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,petewarden/tensorflow,davidzchen/tensorflow,aam-at/tensorflow,freedomtan/tensorflow,yongtang/tensorflow,frreiss/tensorflow-fred,aldian/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,karllessard/tensorflow,sarvex/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,jhseu/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,tensorflow/tensorflow-pywrap_saved_model,Intel-Corporation/tensorflow,gunan/tensorflow,karllessard/tensorflow,freedomtan/tensorflow,sarvex/tensorflow,annarev/tensorflow,frreiss/tensorflow-fred,gautam1858/tensorflow,gautam1858/tensorflow,gautam1858/tensorflow,aam-at/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gunan/tensorflow,tensorflow/tensorflow,renyi533/tensorflow,xzturn/tensorflow,xzturn/tensorflow,paolodedios/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,petewarden/tensorflow,cxxgtxy/tensorflow,gunan/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,gunan/tensorflow,davidzchen/tensorflow,frreiss/tensorflow-fred,renyi533/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,jhseu/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,jhseu/tensorflow,Intel-tensorflow/tensorflow,freedomtan/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,xzturn/tensorflow,Intel-Corporation/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,renyi533/tensorflow,xzturn/tensorflow,aam-at/tensorflow,gautam1858/tensorflow,freedomtan/tensorflow,aldian/tensorflow,aldian/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,frreiss/tensorflow-fred,jhseu/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,renyi533/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,aldian/tensorflow,jhseu/tensorflow,karllessard/tensorflow,gunan/tensorflow,sarvex/tensorflow,Intel-Corporation/tensorflow,annarev/tensorflow,gunan/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,petewarden/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,annarev/tensorflow,cxxgtxy/tensorflow,jhseu/tensorflow,annarev/tensorflow,aam-at/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,cxxgtxy/tensorflow,jhseu/tensorflow,petewarden/tensorflow,xzturn/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,aam-at/tensorflow,annarev/tensorflow,gunan/tensorflow,sarvex/tensorflow,annarev/tensorflow,gunan/tensorflow,davidzchen/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,gunan/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,annarev/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,davidzchen/tensorflow,xzturn/tensorflow,petewarden/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,yongtang/tensorflow,davidzchen/tensorflow,karllessard/tensorflow,davidzchen/tensorflow,gautam1858/tensorflow,renyi533/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,Intel-tensorflow/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,renyi533/tensorflow,xzturn/tensorflow,tensorflow/tensorflow,petewarden/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,jhseu/tensorflow,gautam1858/tensorflow,davidzchen/tensorflow,annarev/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,aam-at/tensorflow,frreiss/tensorflow-fred,aam-at/tensorflow,jhseu/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,freedomtan/tensorflow,karllessard/tensorflow,annarev/tensorflow,davidzchen/tensorflow,gunan/tensorflow,freedomtan/tensorflow,petewarden/tensorflow,freedomtan/tensorflow,aam-at/tensorflow,paolodedios/tensorflow,davidzchen/tensorflow,Intel-Corporation/tensorflow,petewarden/tensorflow,paolodedios/tensorflow,paolodedios/tensorflow,petewarden/tensorflow,gautam1858/tensorflow,xzturn/tensorflow,yongtang/tensorflow,paolodedios/tensorflow,sarvex/tensorflow,karllessard/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,cxxgtxy/tensorflow,tensorflow/tensorflow-pywrap_saved_model,frreiss/tensorflow-fred,sarvex/tensorflow,Intel-tensorflow/tensorflow,Intel-Corporation/tensorflow,karllessard/tensorflow,aam-at/tensorflow,Intel-Corporation/tensorflow,aam-at/tensorflow,tensorflow/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,yongtang/tensorflow,renyi533/tensorflow,renyi533/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow,Intel-Corporation/tensorflow,gunan/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,freedomtan/tensorflow,davidzchen/tensorflow,aldian/tensorflow,petewarden/tensorflow,cxxgtxy/tensorflow,freedomtan/tensorflow,annarev/tensorflow,aldian/tensorflow,paolodedios/tensorflow,jhseu/tensorflow,renyi533/tensorflow,tensorflow/tensorflow,sarvex/tensorflow,cxxgtxy/tensorflow,xzturn/tensorflow,xzturn/tensorflow,tensorflow/tensorflow-pywrap_saved_model,davidzchen/tensorflow,cxxgtxy/tensorflow,aldian/tensorflow,renyi533/tensorflow,gautam1858/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once | tensorflow/python/compiler/tensorrt/test/testdata/gen_tftrt_model.py | tensorflow/python/compiler/tensorrt/test/testdata/gen_tftrt_model.py | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Saves a SavedModel after TensorRT conversion.
The saved model is loaded and executed by tests to ensure backward
compatibility across TF versions.
The script may not work in TF1.x.
Instructions on how to use this script:
- Execute the script as follows:
python gen_tftrt_model
- Rename tftrt_saved_model to what makes sense for your test.
- Delete directory tf_saved_model unless you want to use it.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.training.tracking import tracking
from tensorflow.python.eager import def_function
from tensorflow.python.framework import tensor_spec
from tensorflow.python.framework import dtypes
from tensorflow.python.ops import variables
from tensorflow.python.ops import array_ops
from tensorflow.python.saved_model import save
from tensorflow.python.compiler.tensorrt import trt_convert
def GetGraph(inp1, inp2, var):
"""Define graph."""
add = inp1 + var
mul = inp1 * add
add = mul + add
add = add + inp2
out = array_ops.identity(add, name="output")
return out
class SimpleModel(tracking.AutoTrackable):
"""Define model with a TF function."""
def __init__(self):
self.v = None
@def_function.function(input_signature=[
tensor_spec.TensorSpec(shape=[None, 1, 1], dtype=dtypes.float32),
tensor_spec.TensorSpec(shape=[None, 1, 1], dtype=dtypes.float32)
])
def run(self, inp1, inp2):
if self.v is None:
self.v = variables.Variable([[[1.0]]], dtype=dtypes.float32)
return GetGraph(inp1, inp2, self.v)
root = SimpleModel()
input_saved_model_dir = "tf_saved_model"
output_saved_model_dir = "tftrt_saved_model"
_SAVED_MODEL_SIGNATURE_KEY = "tftrt_test_predict"
# Saved TF model
save.save(root, input_saved_model_dir,
{_SAVED_MODEL_SIGNATURE_KEY: root.run})
# Convert TF model to TensorRT
converter = trt_convert.TrtGraphConverterV2(
input_saved_model_dir=input_saved_model_dir,
input_saved_model_signature_key=_SAVED_MODEL_SIGNATURE_KEY)
converter.convert()
def my_input_fn():
np_input1 = np.random.random_sample([4, 1, 1]).astype(np.float32)
np_input2 = np.random.random_sample([4, 1, 1]).astype(np.float32)
yield np_input1, np_input2,
converter.build(input_fn=my_input_fn)
# Convert TensorRT model
converter.save(output_saved_model_dir)
| apache-2.0 | Python | |
82761fcdce049ec0d8205a68c0899b8f03f0a38c | Add FileTree | coala-analyzer/coala-gui | source/support/FileTree.py | source/support/FileTree.py | import os
from gi.repository import Gtk
from gi.repository.GdkPixbuf import Pixbuf
class FileTree(Gtk.TreeStore):
def __init__(self, path):
Gtk.TreeStore.__init__(self, str, Pixbuf, str)
self.path = path
self.populateFileTree(self.path)
self.fileTreeView = Gtk.TreeView(self)
treeviewcol = Gtk.TreeViewColumn("File")
colcelltext = Gtk.CellRendererText()
colcellimg = Gtk.CellRendererPixbuf()
treeviewcol.pack_start(colcellimg, False)
treeviewcol.pack_start(colcelltext, True)
treeviewcol.add_attribute(colcelltext, "text", 0)
treeviewcol.add_attribute(colcellimg, "pixbuf", 1)
self.fileTreeView.append_column(treeviewcol)
self.fileTreeView.connect("row-expanded", self.onRowExpanded)
self.fileTreeView.connect("row-collapsed", self.onRowCollapsed)
self.fileTreeView.set_headers_visible(False)
self.fileTreeView.set_visible(True)
def populateFileTree(self, path, parent=None):
itemCounter = 0
for item in os.listdir(path):
itemFullname = os.path.join(path, item)
itemIsFolder = os.path.isdir(itemFullname)
itemIcon = Gtk.IconTheme.get_default().load_icon(
"folder" if itemIsFolder else "text-x-generic-symbolic", 22, 0)
currentIter = self.append(parent, [item, itemIcon, itemFullname])
if itemIsFolder:
self.append(currentIter, [None, None, None])
itemCounter += 1
if itemCounter < 1:
self.append(parent, [None, None, None])
def onRowExpanded(self, treeView, treeIter, treePath):
newPath = self.get_value(treeIter, 2)
self.populateFileTree(newPath, treeIter)
self.remove(self.iter_children(treeIter))
def onRowCollapsed(self, treeView, treeIter, treePath):
currentChildIter = self.iter_children(treeIter)
while currentChildIter:
self.remove(currentChildIter)
currentChildIter = self.iter_children(treeIter)
self.append(treeIter, [None, None, None]) | agpl-3.0 | Python | |
f1e600ed5dfa8955af93c6ecd05d27cd8c63b2e0 | Add migration | softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat | fat/migrations/0058_auto_20160808_1007.py | fat/migrations/0058_auto_20160808_1007.py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-08-08 10:07
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('fat', '0057_blog_reviewer'),
]
operations = [
migrations.AddField(
model_name='event',
name='can_be_advertise_after',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='event',
name='can_be_advertise_before',
field=models.BooleanField(default=False),
),
]
| bsd-3-clause | Python | |
fe3e90794c2f6ad2657dc1b02d71806d2d7dbfd0 | Add base feed collection models | jasonthomas/zamboni,ingenioustechie/zamboni,jamesthechamp/zamboni,Hitechverma/zamboni,diox/zamboni,mozilla/zamboni,shahbaz17/zamboni,ddurst/zamboni,elysium001/zamboni,mudithkr/zamboni,Jobava/zamboni,clouserw/zamboni,Jobava/zamboni,shahbaz17/zamboni,shahbaz17/zamboni,ayushagrawal288/zamboni,jamesthechamp/zamboni,kumar303/zamboni,ngokevin/zamboni,tsl143/zamboni,andymckay/zamboni,tsl143/zamboni,mudithkr/zamboni,ddurst/zamboni,andymckay/zamboni,clouserw/zamboni,Hitechverma/zamboni,luckylavish/zamboni,washort/zamboni,kumar303/zamboni,mudithkr/zamboni,mstriemer/zamboni,luckylavish/zamboni,eviljeff/zamboni,jamesthechamp/zamboni,diox/zamboni,clouserw/zamboni,mstriemer/zamboni,kumar303/zamboni,elysium001/zamboni,mozilla/zamboni,washort/zamboni,ddurst/zamboni,elysium001/zamboni,jasonthomas/zamboni,luckylavish/zamboni,eviljeff/zamboni,eviljeff/zamboni,tsl143/zamboni,clouserw/zamboni,eviljeff/zamboni,ayushagrawal288/zamboni,ddurst/zamboni,tsl143/zamboni,mozilla/zamboni,Jobava/zamboni,andymckay/zamboni,ayushagrawal288/zamboni,mudithkr/zamboni,Hitechverma/zamboni,washort/zamboni,jasonthomas/zamboni,washort/zamboni,Jobava/zamboni,ingenioustechie/zamboni,shahbaz17/zamboni,mstriemer/zamboni,diox/zamboni,ngokevin/zamboni,jasonthomas/zamboni,Hitechverma/zamboni,ayushagrawal288/zamboni,ingenioustechie/zamboni,mozilla/zamboni,ingenioustechie/zamboni,kumar303/zamboni,diox/zamboni,elysium001/zamboni,mstriemer/zamboni,ngokevin/zamboni,jamesthechamp/zamboni,luckylavish/zamboni | mkt/feed/models_base.py | mkt/feed/models_base.py | from django.db import models
import amo.models
from amo.decorators import use_master
from amo.models import SlugField
from addons.models import clean_slug
from mkt.webapps.models import Webapp
from mkt.webapps.tasks import index_webapps
class BaseFeedCollectionMembership(amo.models.ModelBase):
"""
"""
app = models.ForeignKey(Webapp)
order = models.SmallIntegerField(null=True)
obj = None
class Meta:
abstract = True
ordering = ('order',)
unique_together = ('obj', 'app',)
class BaseFeedCollection(amo.models.ModelBase):
"""
On the feed, there are a number of types of feed items that share a similar
structure: a slug, one or more apps,
This is a base class for those feed items, including:
- Editorial Brands: `FeedBrand`
- Collections: `FeedCollection`
- Operator Shelves: `FeedOperatorShelf`
Subclasses must do a few things:
- Define an M2M field named `_apps` with a custom through model that
inherits from `BaseFeedCollectionMembership`.
- Set the `membership_class` class property to the custom through model
used by `_apps`.
- Set the `membership_relation` class property to the name of the relation
on the model.
"""
_apps = None
slug = SlugField(blank=True, max_length=30,
help_text='Used in collection URLs.')
membership_class = None
membership_relation = None
objects = amo.models.ManagerBase()
class Meta:
abstract = True
ordering = ('-id',)
def save(self, **kw):
self.clean_slug()
return super(BaseFeedCollection, self).save(**kw)
@use_master
def clean_slug(self):
clean_slug(self, 'slug')
def apps(self):
"""
Public apps on the collection, ordered by their position in the
CollectionMembership model.
Use this method everytime you want to display apps for a collection to
an user.
"""
filters = {
'disabled_by_user': False,
'status': amo.STATUS_PUBLIC
}
return self._apps.filter(**filters).order_by(self.membership_relation)
def add_app(self, app, order=None):
"""
Add an app to this collection. If specified, the app will be created
with the specified `order`. If not, it will be added to the end of the
collection.
"""
qs = self.membership_class.objects.filter(obj=self)
if order is None:
aggregate = qs.aggregate(models.Max('order'))['order__max']
order = aggregate + 1 if aggregate is not None else 0
rval = self.membership_class.objects.create(obj=self, app=app,
order=order)
# Help django-cache-machine: it doesn't like many 2 many relations,
# the cache is never invalidated properly when adding a new object.
self.membership_class.objects.invalidate(*qs)
index_webapps.delay([app.pk])
return rval
def remove_app(self, app):
"""
Remove the passed app from this collection, returning a boolean
indicating whether a successful deletion took place.
"""
try:
membership = self.membership_class.objects.get(obj=self, app=app)
except self.membership_class.DoesNotExist:
return False
else:
membership.delete()
index_webapps.delay([app.pk])
return True
def reorder(self, new_order):
"""
Passed a list of app IDs, e.g.
[18, 24, 9]
will change the order of each item in the collection to match the
passed order. A ValueError will be raised if each app in the
collection is not included in the ditionary.
"""
existing_pks = self.apps().no_cache().values_list('pk', flat=True)
if set(existing_pks) != set(new_order):
raise ValueError('Not all apps included')
for order, pk in enumerate(new_order):
member = self.membership_class.objects.get(obj=self, app_id=pk)
member.update(order=order)
index_webapps.delay(new_order)
| bsd-3-clause | Python | |
4ffad6ba3804238b288be59c7e0fc33206f79ca4 | Create ReaderAuthenticated.py | stoeps13/ibmcnx2,stoeps13/ibmcnx2 | ibmcnx/config/j2ee/ReaderAuthenticated.py | ibmcnx/config/j2ee/ReaderAuthenticated.py | ######
# Set Reader Roles to Restricted
# no anonymous access possible
#
# Author: Christoph Stoettner
# Blog: http://www.stoeps.de
# E-Mail:
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-07-16
#
# License: Apache 2.0
#
# Description:
# Script is tested with IBM Connections 5
#
# History:
# 20131124 Christoph Stoettner Update with loop and try/except to handle errors, added group support
# 20140324 Christoph Stoettner Changed all reader roles to "All authenticated", Ajax proxy security is configured with this role!
# 20140716 Christoph Stoettner Set the script to only change reader role
apps = AdminApp.list()
appsList = apps.splitlines()
for app in appsList:
print "Setting Reader Role to Authenticated for %s" % app.upper()
try:
AdminApp.edit( app, '[-MapRolesToUsers [["reader" No Yes "" ""] ]]' )
AdminConfig.save()
except:
print "No Reader Role in %s" % app.upper()
| apache-2.0 | Python | |
d335496951e5979d9d8442369f47ca7c99f805e7 | Introduce a very simple vim modeline parser. | pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments,pathawks/pygments | pygments/modeline.py | pygments/modeline.py | # -*- coding: utf-8 -*-
"""
pygments.modeline
~~~~~~~~~~~~~~~~~
A simple modeline parser (based on pymodeline).
:copyright: Copyright 2006-2012 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import re
__all__ = ['get_filetype_from_buffer']
modeline_re = re.compile(r'''
(?: vi | vim | ex ) (?: [<=>]? \d* )? :
.* (?: ft | filetype | syn | syntax ) = ( [^:\s]+ )
''', re.VERBOSE)
def get_filetype_from_line(l):
m = modeline_re.search(l)
if m:
return m.group(1)
def get_filetype_from_buffer(buf, max_lines = 5):
"""
Scan the buffer for modelines and return filetype if one is found.
"""
lines = buf.splitlines()
for l in lines[-1:-max_lines-1:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
for l in lines[max_lines:0:-1]:
ret = get_filetype_from_line(l)
if ret:
return ret
return None
| bsd-2-clause | Python | |
27568f245f76aaf85f4a3db5b2a3486b37afd047 | add scripts that converts smiles to graph object | snap-stanford/ogb | examples/graphproppred/mol/smiles2graph.py | examples/graphproppred/mol/smiles2graph.py | from ogb.utils.features import (allowable_features, atom_to_feature_vector,
bond_to_feature_vector, atom_feature_vector_to_dict, bond_feature_vector_to_dict)
from rdkit import Chem
import numpy as np
def mol_to_data_obj(smile_string):
"""
Converts SMILE string to graph Data object
:input: SMILE string (str)
:return: graph object
"""
mol = Chem.MolFromSmiles(smile_string)
# atoms
atom_features_list = []
for atom in mol.GetAtoms():
atom_features_list.append(atom_to_feature_vector(atom))
x = np.array(atom_features_list, dtype = np.int64)
# bonds
num_bond_features = 3 # bond type, bond stereo, is_conjugated
if len(mol.GetBonds()) > 0: # mol has bonds
edges_list = []
edge_features_list = []
for bond in mol.GetBonds():
i = bond.GetBeginAtomIdx()
j = bond.GetEndAtomIdx()
edge_feature = bond_to_feature_vector(bond)
# add edges in both directions
edges_list.append((i, j))
edge_features_list.append(edge_feature)
edges_list.append((j, i))
edge_features_list.append(edge_feature)
# data.edge_index: Graph connectivity in COO format with shape [2, num_edges]
edge_index = np.array(edges_list, dtype = np.int64).T
# data.edge_attr: Edge feature matrix with shape [num_edges, num_edge_features]
edge_attr = np.array(edge_features_list, dtype = np.int64)
else: # mol has no bonds
edge_index = np.empty((2, 0), dtype = np.int64)
edge_attr = np.empty((0, num_bond_features), dtype = np.int64)
graph = dict()
graph['edge_index'] = edge_index
graph['edge_feat'] = edge_attr
graph['node_feat'] = x
graph['num_nodes'] = len(x)
return graph
if __name__ == '__main__':
graph = mol_to_data_obj('O1C=C[C@H]([C@H]1O2)c3c2cc(OC)c4c3OC(=O)C5=C4CCC(=O)5')
print(graph)
| mit | Python | |
d967163ee3bdd2f86041cace5b6e3d859d471f4f | add sample length test | Turan-no/Turan,Turan-no/Turan,Turan-no/Turan,Turan-no/Turan | tests/samplelength.py | tests/samplelength.py | from turan.models import *
eds = Exercise.objects.get(pk=3723).exercisedetail_set.all()
a = {}
previous = eds[0].time
for e in eds:
time_d = (e.time - previous).seconds
if not time_d in a:
a[time_d] = 0
a[time_d] += 1
previous = e.time
| agpl-3.0 | Python | |
ee0b02841427b8251d1ecb39a8b91c3bc033a400 | Add first version of vertical dispersion fitting | lnls-fac/apsuite | generic_scripts/fit_vertical_dispersion.py | generic_scripts/fit_vertical_dispersion.py | """Script to fit vertical dispersion with skew-quadrupoles.""""
import numpy as np
from mathphys.functions import load_pickle
import pyaccel as pa
from pymodels import si
import siriuspy.clientconfigdb as servconf
from apsuite.orbcorr import OrbRespmat
from apsuite.optics_analysis.tune_correction import TuneCorr
def get_dispersion_from_orm(orm, alpha, rf_freq):
"""."""
return - alpha * rf_freq * orm[:, -1]
def calc_model_dispersion(mod, bpmidx):
"""."""
twi, _ = pa.optics.calc_twiss(mod)
return np.hstack((twi.etax[bpmidx], twi.etay[bpmidx]))
def calc_rms(vec):
"""."""
return np.sqrt(np.mean(vec*vec))
def get_orm_data(name):
setup = load_pickle(name)
if 'data' in setup:
setup = setup['data']
return setup
def proces_data(setup, find_best_alpha=True):
# Get nominal model
simod = si.create_accelerator()
simod.cavity_on = True
simod.radiation_on = False
simod = adjust_tunes(simod, setup)
# Get nominal orbit matrix and dispersion
matrix_nominal = OrbRespmat(simod, 'SI', '6d').get_respm()
alpha0 = pa.optics.get_mcf(simod)
print('momentum compaction: {:e}'.format(alpha0))
idx = pa.lattice.find_indices(simod, 'pass_method', 'cavity_pass')[0]
rf_freq = simod[idx].frequency
disp_nominal = get_dispersion_from_orm(matrix_nominal, alpha0, rf_freq)
# Get measured orbit matrix from configuration server
client = servconf.ConfigDBClient(config_type='si_orbcorr_respm')
orbmat_name = setup['orbmat_name']
orbmat_meas = np.array(client.get_config_value(name=orbmat_name))
orbmat_meas = np.reshape(orbmat_meas, (320, 281))
orbmat_meas[:, -1] *= 1e-6
rf_freq_meas = setup['rf_frequency']
alpha_meas = alpha0
if find_best_alpha:
alphas = (1 + np.linspace(-10, 10, 10001)/100)*alpha0
errs = []
for al in alphas:
disp_meas = get_dispersion_from_orm(orbmat_meas, al, rf_freq_meas)
err = disp_meas - disp_nominal
err = np.sqrt(np.mean(err*err))
errs.append(err)
alpha_meas = alphas[np.argmin(errs)]
print('Factor needed for momentum compaction:')
print(alphas[np.argmin(errs)]/alpha0)
disp_meas = get_dispersion_from_orm(orbmat_meas, alpha_meas, rf_freq_meas)
def adjust_tunes(simod, setup):
"""."""
# Adjust tunes to match measured ones
tunex_goal = 49 + setup['tunex']
tuney_goal = 14 + setup['tuney']
print('--- correcting si tunes...')
tunecorr = TuneCorr(
simod, 'SI', method='Proportional', grouping='TwoKnobs')
tunecorr.get_tunes(simod)
print(' tunes init : ', tunecorr.get_tunes(simod))
tunemat = tunecorr.calc_jacobian_matrix()
tunecorr.correct_parameters(
model=simod,
goal_parameters=np.array([tunex_goal, tuney_goal]),
jacobian_matrix=tunemat)
print(' tunes final : ', tunecorr.get_tunes(simod))
return simod
def calc_dispmat(simod, dksl=1e-6):
fam = si.get_family_data(simod)
qsidx = np.array(fam['QS']['index']).ravel()
# get only chromatic skew quads
chrom = []
for qs in qsidx:
if '0' not in simod[qs].fam_name:
chrom.append(qs)
qsidx = np.array(chrom).ravel()
bpmidx = np.array(fam['BPM']['index']).ravel()
# eta0 = calc_model_dispersion(simod, bpmidx)
eta_mat = np.zeros((2*bpmidx.size, qsidx.size))
for idx, qs in enumerate(qsidx):
mod = simod[:]
mod[qs].KsL += dksl/2
etap = calc_model_dispersion(mod, bpmidx)
mod[qs].KsL -= dksl
etan = calc_model_dispersion(mod, bpmidx)
eta_mat[:, idx] = (etap-etan)/dksl
mod[qs].KsL += dksl
return eta_mat
def fit_dispersion(
simod, eta_mat, disp_meas, bpmidx, qsidx, svals=35, niter=10):
umat, smat, vhmat = np.linalg.svd(eta_mat, full_matrices=False)
ismat = 1/smat
svals = 35
ismat[svals:] = 0
imat = vhmat.T @ np.diag(ismat) @ umat.T
modcorr = simod[:]
for _ in range(niter):
eta = calc_model_dispersion(modcorr, bpmidx)
diff = disp_meas - eta
# minimize error of vertical dispersion
diff[:160] *= 0
print(calc_rms(diff)*1e3)
stren = imat @ diff
for idx, qs in enumerate(qsidx):
modcorr[qs].KsL += stren[idx]
| mit | Python | |
17cc4de3c0d6b7e3c843085a1f7f6694930a7e84 | Add Graham's Scan in Python | Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,manikTharaka/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Deepak345/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,Deepak345/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,manikTharaka/al-go-rithms,Cnidarias/al-go-rithms,Cnidarias/al-go-rithms | geometry/graham_scan/python/graham_scan.py | geometry/graham_scan/python/graham_scan.py | #!/usr/bin/env python
import Tkinter as tk
from random import random
def make_a_right_turn(a, b, c):
"""Going from a to b to c involves a right turn?"""
u = (c[0] - b[0], c[1] - b[1])
v = (a[0] - b[0], a[1] - b[1])
cross_product = u[0] * v[1] - u[1] * v[0]
return cross_product < 0
def graham_scan(points):
"""Calculate convex hull using Graham's Scan"""
points.sort()
upper_convex_hull = points[0:2]
for point in points[2:]:
while len(upper_convex_hull) > 1 and not make_a_right_turn(upper_convex_hull[-2], upper_convex_hull[-1], point):
upper_convex_hull.pop()
upper_convex_hull.append(point)
lower_convex_hull = points[-1:-3:-1]
for point in points[-3::-1]:
while len(lower_convex_hull) > 1 and not make_a_right_turn(lower_convex_hull[-2], lower_convex_hull[-1], point):
lower_convex_hull.pop()
lower_convex_hull.append(point)
return upper_convex_hull + lower_convex_hull[1:-1]
def test(n, width, height):
"""Test Graham's Scan algorithm with random points and draw the result"""
points = [(random() * width, random() * height) for _ in range(n)]
convex_hull = graham_scan(points)
# UI stuff
master = tk.Tk()
canvas = tk.Canvas(master, width=width, height=height)
canvas.pack()
canvas.create_polygon(*[coord for point in convex_hull for coord in point], outline='blue', width=2, fill='')
for (x, y) in points:
canvas.create_oval(x-1, y-1, x+1, y+1, fill='black')
tk.mainloop()
if __name__ == "__main__":
test(50, 640, 480)
| cc0-1.0 | Python | |
a9515ca8738520a9b963b74c509b3c32761528df | test convert | embali/imgpy | tests/test_convert.py | tests/test_convert.py | from tempfile import TemporaryFile
import pytest
from imgpy import Img
@pytest.mark.parametrize('image', ({
'sub': 'anima/bordered.gif',
'res': ('P', 38)
}, {
'sub': 'anima/clear.gif',
'res': ('P', 12)
}, {
'sub': 'fixed/bordered.jpg',
'res': ('L', 1)
}, {
'sub': 'fixed/clear.jpg',
'res': ('L', 1)
}, ))
def test_convert(path, image):
with Img(fp=path(image['sub'])) as src:
src.convert('L')
with TemporaryFile() as tf:
src.save(fp=tf)
tf.seek(0)
with Img(fp=tf) as dest:
res = (dest.mode, dest.n_frames)
assert res == image['res']
| mit | Python | |
c4de706925684bac2b540bef0b54f72821d0fdc8 | Add test that would have caught #135 | mwclient/mwclient,ubibene/mwclient | tests/test_listing.py | tests/test_listing.py | # encoding=utf-8
from __future__ import print_function
import unittest
import pytest
import logging
import requests
import responses
import mock
import mwclient
from mwclient.listing import List, GeneratorList
try:
import json
except ImportError:
import simplejson as json
if __name__ == "__main__":
print()
print("Note: Running in stand-alone mode. Consult the README")
print(" (section 'Contributing') for advice on running tests.")
print()
class TestList(unittest.TestCase):
def setUp(self):
pass
def setupDummyResponses(self, mock_site, result_member, ns=None):
if ns is None:
ns = [0, 0, 0]
mock_site.api.side_effect = [
{
'continue': {
'apcontinue': 'Kre_Mbaye',
'continue': '-||'
},
'query': {
result_member: [
{
"pageid": 19839654,
"ns": ns[0],
"title": "Kre'fey",
},
{
"pageid": 19839654,
"ns": ns[1],
"title": "Kre-O",
}
]
}
},
{
'query': {
result_member: [
{
"pageid": 30955295,
"ns": ns[2],
"title": "Kre-O Transformers",
}
]
}
},
]
@mock.patch('mwclient.client.Site')
def test_list_continuation(self, mock_site):
# Test that the list fetches all three responses
# and yields dicts when return_values not set
lst = List(mock_site, 'allpages', 'ap', limit=2)
self.setupDummyResponses(mock_site, 'allpages')
vals = [x for x in lst]
assert len(vals) == 3
assert type(vals[0]) == dict
@mock.patch('mwclient.client.Site')
def test_list_with_str_return_value(self, mock_site):
# Test that the List yields strings when return_values is string
lst = List(mock_site, 'allpages', 'ap', limit=2, return_values='title')
self.setupDummyResponses(mock_site, 'allpages')
vals = [x for x in lst]
assert len(vals) == 3
assert type(vals[0]) == str
@mock.patch('mwclient.client.Site')
def test_list_with_tuple_return_value(self, mock_site):
# Test that the List yields tuples when return_values is tuple
lst = List(mock_site, 'allpages', 'ap', limit=2,
return_values=('title', 'ns'))
self.setupDummyResponses(mock_site, 'allpages')
vals = [x for x in lst]
assert len(vals) == 3
assert type(vals[0]) == tuple
@mock.patch('mwclient.client.Site')
def test_generator_list(self, mock_site):
# Test that the GeneratorList yields Page objects
lst = GeneratorList(mock_site, 'pages', 'p')
self.setupDummyResponses(mock_site, 'pages', ns=[0, 6, 14])
vals = [x for x in lst]
assert len(vals) == 3
assert type(vals[0]) == mwclient.page.Page
assert type(vals[1]) == mwclient.image.Image
assert type(vals[2]) == mwclient.listing.Category
if __name__ == '__main__':
unittest.main()
| mit | Python | |
7ff804bb10b945d38e8ce98f176abcbe4255f65c | Test subdict | nvander1/skrt | tests/test_subdict.py | tests/test_subdict.py | from nose.tools import raises
from skrt.utils import subdict
@raises(KeyError)
def test_missing_key():
dict_ = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
subdict_ = subdict(['e'], dict_)
def test_subdict():
dict_ = {'a': 1, 'b': 2, 'c': 3, 'd': 4}
assert subdict(['a', 'c'], dict_) == {'c': 3, 'a': 1}
| mit | Python | |
d9780fc7151a719b9cbd88622b6c2868eaa64d34 | Add tasks to manage system services | vmalavolta/fabix | fabix/system.py | fabix/system.py | from cuisine import upstart_ensure
from fabric.api import sudo
from fabric.decorators import task
@task
def restart_service(service, force_start=True):
if force_start:
upstart_ensure(service)
else:
sudo('service {0} restart').format(service))
@task
def reload_service(service):
sudo('service {0} reload'.format(service))
| mit | Python | |
8e687ab4f85561e31e4da06d89e9505179abc127 | Modify the example of vgpu white_list set | openstack/nova,mikalstill/nova,mahak/nova,openstack/nova,klmitch/nova,gooddata/openstack-nova,klmitch/nova,mikalstill/nova,gooddata/openstack-nova,phenoxim/nova,rahulunair/nova,klmitch/nova,phenoxim/nova,mikalstill/nova,openstack/nova,mahak/nova,rahulunair/nova,rahulunair/nova,gooddata/openstack-nova,klmitch/nova,mahak/nova,gooddata/openstack-nova | nova/conf/devices.py | nova/conf/devices.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
devices_group = cfg.OptGroup(
name='devices',
title='physical or virtual device options')
vgpu_opts = [
cfg.ListOpt('enabled_vgpu_types',
default=[],
help="""
A list of the vGPU types enabled in the compute node.
Some pGPUs (e.g. NVIDIA GRID K1) support different vGPU types. User can use
this option to specify a list of enabled vGPU types that may be assigned to a
guest instance. But please note that Nova only supports a single type in the
Queens release. If more than one vGPU type is specified (as a comma-separated
list), only the first one will be used. An example is as the following:
[devices]
enabled_vgpu_types = GRID K100,Intel GVT-g,MxGPU.2,nvidia-11
""")
]
def register_opts(conf):
conf.register_group(devices_group)
conf.register_opts(vgpu_opts, group=devices_group)
def list_opts():
return {devices_group: vgpu_opts}
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
devices_group = cfg.OptGroup(
name='devices',
title='physical or virtual device options')
vgpu_opts = [
cfg.ListOpt('enabled_vgpu_types',
default=[],
help="""
A list of the vGPU types enabled in the compute node.
Some pGPUs (e.g. NVIDIA GRID K1) support different vGPU types. User can use
this option to specify a list of enabled vGPU types that may be assigned to a
guest instance. An example is as the following:
[devices]
enabled_vgpu_types = ['GRID K100', 'Intel GVT-g', 'MxGPU.2', 'nvidia-11']
""")
]
def register_opts(conf):
conf.register_group(devices_group)
conf.register_opts(vgpu_opts, group=devices_group)
def list_opts():
return {devices_group: vgpu_opts}
| apache-2.0 | Python |
215bef9d71097a18ad5ee6f20232a08ba88e9983 | Create example1.py | intelidomo/rpi_snippets | stepper_motor/example1.py | stepper_motor/example1.py | from stepper_motor import StepperMotor
# Instance the motor supplying GPIO ports
# Instancia al motor indicando los puertos GPIO a los que está conectado
motor = StepperMotor(12,16,20,21)
# Go 100 steps forward
# Avanza 100 pasos a la derecha
motor.goForward(100)
# Go 150 steps backwards
# Avanza 150 pasos a la izquierda
motor.goBackwards(150)
| mit | Python | |
1d311f7e53ac1081d801e902d8cb1d9a0ad8d1ec | Add test for iteration loop bytecode generation | ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang | tests/compiler/test_loop_compilation.py | tests/compiler/test_loop_compilation.py | from tests.compiler import compile_local, LST_ID, IMPLICIT_ITERATOR_ID, IMPLICIT_ITERATION_ID
from thinglang.compiler.opcodes import OpcodePushLocal, OpcodeCallInternal, OpcodePopLocal, OpcodeJumpConditional, \
OpcodeJump
from thinglang.foundation.definitions import INTERNAL_TYPE_ORDERING
from thinglang.lexer.values.identifier import Identifier
def test_access_in_method_args():
print(compile_local('for number a in lst'))
assert compile_local('for number x in lst') == [
OpcodePushLocal(LST_ID),
OpcodeCallInternal(INTERNAL_TYPE_ORDERING[Identifier("list")], 4), # Create iterator
OpcodePopLocal(IMPLICIT_ITERATOR_ID), # Insert it into the frame
OpcodePushLocal(IMPLICIT_ITERATOR_ID), # TODO: is this optimal?
OpcodeCallInternal(INTERNAL_TYPE_ORDERING[Identifier('iterator')], 1), # Call has_next
OpcodeJumpConditional(23), # Jump outside if not
OpcodePushLocal(IMPLICIT_ITERATOR_ID),
OpcodeCallInternal(INTERNAL_TYPE_ORDERING[Identifier('iterator')], 2), # Call next
OpcodePopLocal(IMPLICIT_ITERATION_ID), # Insert into frame
OpcodeJump(16)
]
| mit | Python | |
bd2e55e3e8d4c7c2e9af3aa43dff209a98fbf3d5 | test pabot suiten names io | mkorpela/pabot,mkorpela/pabot | tests/test_pabotsuitenames_io.py | tests/test_pabotsuitenames_io.py | import os
import shutil
import stat
import subprocess
import sys
import tempfile
import textwrap
import unittest
from pabot import pabot
class TestPabotSuiteNamesIO(unittest.TestCase):
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
robot_file = open('{}/test.robot'.format(self.tmpdir), 'w')
robot_file.write(textwrap.dedent(
'''
*** Variables ***
${LETSFAIL} PASS
*** Test Cases ***
Test 1
Should Not Be Equal ${LETSFAIL} FAIL
Log something
Set Suite Variable ${LETSFAIL} FAIL
Test 2
Should Not Be Equal ${LETSFAIL} FAIL
Log something too
Set Suite Variable ${LETSFAIL} FAIL
Test 3
Should Not Be Equal ${LETSFAIL} FAIL
Log something three
Set Suite Variable ${LETSFAIL} FAIL
'''))
robot_file.close()
def broken_store(hashes, suite_names):
raise IOError()
self.original = pabot.store_suite_names
pabot.store_suite_names = broken_store
self.original_curdir = os.getcwd()
os.chdir(self.tmpdir)
def test_unable_to_write_pabotsuitenames(self):
names = pabot.solve_suite_names('outs', [self.tmpdir], {}, {'testlevelsplit':True})
self.assertEqual([n.name[len(self.tmpdir):] for n in names], ['.Test 1', '.Test 2', '.Test 3'])
def tearDown(self):
shutil.rmtree(self.tmpdir)
pabot.store_suite_names = self.original
os.chdir(self.original_curdir)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python | |
0222cfe6555074cee63fd30dbd419efbfd0274b3 | add prof perf test | JensTimmerman/radical.pilot,JensTimmerman/radical.pilot,JensTimmerman/radical.pilot,JensTimmerman/radical.pilot | tests/test_profile_writer.py | tests/test_profile_writer.py | #!/usr/bin/env python
import os
import csv
import time
import threading
AGENT_THREADS = 'threads'
AGENT_MODE = AGENT_THREADS
profile_agent = True
# ------------------------------------------------------------------------------
#
timestamp_zero = float(os.environ.get('TIME_ZERO', time.time()))
def timestamp_now():
# relative timestamp seconds since TIME_ZERO (start)
return float(time.time()) - timestamp_zero
# ------------------------------------------------------------------------------
#
csvfile = open('test_profile_writer.csv', 'wb')
csvwriter = csv.writer (csvfile, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL)
def prof_csv(etype, uid="", msg="", logger=None):
if logger:
logger("%s (%10s) : %s", etype, msg, uid)
if not profile_agent:
return
now = timestamp_now()
# TODO: Layer violation?
if AGENT_MODE == AGENT_THREADS : tid = threading.current_thread().name
elif AGENT_MODE == AGENT_PROCESSES: tid = os.getpid()
else: raise Exception('Unknown Agent Mode')
csvwriter.writerow([now, tid, uid, etype, msg])
# ------------------------------------------------------------------------------
#
profile_handle = open('test_profile_writer.prof', 'a')
def prof_write(etype, uid="", msg="", logger=None):
if logger:
logger("%s (%10s) : %s", etype, msg, uid)
if not profile_agent:
return
now = timestamp_now()
# TODO: Layer violation?
if AGENT_MODE == AGENT_THREADS : tid = threading.current_thread().name
elif AGENT_MODE == AGENT_PROCESSES: tid = os.getpid()
else: raise Exception('Unknown Agent Mode')
profile_handle.write(" %12.4f : %-17s : %-24s : %-40s : %s\n" \
% (now, tid, uid, etype, msg))
# ------------------------------------------------------------------------------
#
NUM = 1000 * 1000
start = time.time()
for i in range(NUM):
prof_write (str(i), str(i*i))
stop = time.time()
print 'write: %f' % (stop-start)
start = time.time()
for i in range(NUM):
prof_csv (str(i), str(i*i))
stop = time.time()
print 'write: %f' % (stop-start)
os.system ('rm -f test_profile_writer.prof')
os.system ('rm -f test_profile_writer.csv')
| mit | Python | |
7c609188df1ef457440543beb9dc4dbf286abd87 | Add some source cache tests. | girder/large_image,girder/large_image,girder/large_image | test/test_cache_source.py | test/test_cache_source.py | import pytest
import large_image
from large_image.cache_util import cachesClear
from .datastore import datastore
@pytest.mark.singular
def testCacheSourceStyle():
cachesClear()
imagePath = datastore.fetch('sample_image.ptif')
ts1 = large_image.open(imagePath)
ts2 = large_image.open(imagePath, style={'max': 128})
ts3 = large_image.open(imagePath, style={'max': 160})
tile1 = ts1.getTile(0, 0, 4)
assert ts1.getTile(0, 0, 4) is not None
assert ts2.getTile(0, 0, 4) is not None
assert ts3.getTile(0, 0, 4) is not None
cachesClear()
assert ts1.getTile(0, 0, 4) == tile1
del ts1
assert ts2.getTile(1, 0, 4) is not None
cachesClear()
assert ts2.getTile(2, 0, 4) is not None
ts1 = large_image.open(imagePath)
assert ts1.getTile(0, 0, 4) == tile1
@pytest.mark.singular
def testCacheSourceStyleFirst():
cachesClear()
imagePath = datastore.fetch('sample_image.ptif')
ts2 = large_image.open(imagePath, style={'max': 128})
ts1 = large_image.open(imagePath)
tile1 = ts1.getTile(0, 0, 4)
assert ts1.getTile(0, 0, 4) is not None
assert ts2.getTile(0, 0, 4) is not None
del ts1
assert ts2.getTile(1, 0, 4) is not None
cachesClear()
assert ts2.getTile(2, 0, 4) is not None
ts1 = large_image.open(imagePath)
assert ts1.getTile(0, 0, 4) == tile1
| apache-2.0 | Python | |
5d58788f75a7334def3dc5a2471c9e0ed2893589 | Test ConfigItem created in __init__ goes to parent | lhupfeldt/multiconf | test/item_in_init_test.py | test/item_in_init_test.py | # Copyright (c) 2012 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from multiconf import mc_config, ConfigItem
from multiconf.envs import EnvFactory
ef = EnvFactory()
prod = ef.Env('prod')
def test_item_in_init_goes_to_parent():
parent = [None]
class X(ConfigItem):
def __init__(self, aa=1):
super(X, self).__init__()
self.aa = aa
class Y(X):
def __init__(self, aa=37):
parent[0] = self.contained_in
bb = X() # X is created in parent and ref assigned to bb
super(Y, self).__init__(aa)
self.bb = bb
self.cc = None
@mc_config(ef)
def _(_):
with ConfigItem():
with ConfigItem():
Y()
it = ef.config(prod).ConfigItem.ConfigItem
assert it == parent[0]
assert it.X.aa == 1
assert it.Y.aa == 37
assert it.Y.bb == it.X
assert it.Y.cc is None
| bsd-3-clause | Python | |
2b8b1f80febc621e64c5bfcd0adcc6e5e0d5fa07 | add rss module base | p22co/edaemon,p22co/edaemon,paulsnar/edaemon,paulsnar/edaemon,p22co/edaemon,paulsnar/edaemon | vendor/edaemon/rss.py | vendor/edaemon/rss.py | from flask import Blueprint, Response
bp = Blueprint('rss', __name__)
| bsd-3-clause | Python | |
54844ebf6903cdbac108cda01e2bd5e3962edaa4 | Add registration unit test | wjchen84/lfd,rll/lfd,wjchen84/lfd,rll/lfd,wjchen84/lfd,rll/lfd | test/test_registration.py | test/test_registration.py | #!/usr/bin/env python
from __future__ import division
import numpy as np
from core.demonstration import Demonstration, SceneState
from registration.registration import TpsRpmRegistrationFactory
from registration import solver, solver_gpu
from tempfile import mkdtemp
import sys, time
import unittest
class TestRegistration(unittest.TestCase):
def setUp(self):
np.random.seed(0)
def generate_cloud(x_center_pert=0, max_noise=0.02):
# generates 40 cm by 60 cm cloud with optional pertubation along the x-axis
grid = np.array(np.meshgrid(np.linspace(-.2,.2,21), np.linspace(-.3,.3,31))).T.reshape((-1,2))
grid = np.c_[grid, np.zeros(len(grid))]
cloud = grid + x_center_pert * np.c_[(0.3 - np.abs(grid[:,1]-0))/0.3, np.zeros((len(grid),2))] + (np.random.random((len(grid), 3)) - 0.5) * 2 * max_noise
return cloud
self.demos = {}
for x_center_pert in np.arange(-0.1, 0.6, 0.1):
demo_name = "demo_{}".format(x_center_pert)
demo_cloud = generate_cloud(x_center_pert=x_center_pert)
demo_scene_state = SceneState(demo_cloud, downsample_size=0.025)
demo = Demonstration(demo_name, demo_scene_state, None)
self.demos[demo_name] = demo
test_cloud = generate_cloud(x_center_pert=0.2)
self.test_scene_state = SceneState(test_cloud, downsample_size=0.025)
def test_tps_rpm_solvers(self):
tmp_cachedir = mkdtemp()
reg_factory = TpsRpmRegistrationFactory(self.demos, f_solver_factory=None)
sys.stdout.write("computing costs: no solver... ")
sys.stdout.flush()
start_time = time.time()
costs = reg_factory.batch_cost(self.test_scene_state)
print "done in {}s".format(time.time() - start_time)
reg_factory_solver = TpsRpmRegistrationFactory(self.demos, f_solver_factory=solver.TpsSolverFactory(cachedir=tmp_cachedir))
sys.stdout.write("computing costs: solver... ")
sys.stdout.flush()
start_time = time.time()
costs_solver = reg_factory_solver.batch_cost(self.test_scene_state)
print "done in {}s".format(time.time() - start_time)
sys.stdout.write("computing costs: cached solver... ")
sys.stdout.flush()
start_time = time.time()
costs_solver_cached = reg_factory_solver.batch_cost(self.test_scene_state)
print "done in {}s".format(time.time() - start_time)
reg_factory_gpu = TpsRpmRegistrationFactory(self.demos, f_solver_factory=solver_gpu.TpsGpuSolverFactory(cachedir=tmp_cachedir))
sys.stdout.write("computing costs: gpu solver... ")
sys.stdout.flush()
start_time = time.time()
costs_gpu = reg_factory_gpu.batch_cost(self.test_scene_state)
print "done in {}s".format(time.time() - start_time)
sys.stdout.write("computing costs: cached gpu solver... ")
sys.stdout.flush()
start_time = time.time()
costs_gpu_cached = reg_factory_gpu.batch_cost(self.test_scene_state)
print "done in {}s".format(time.time() - start_time)
for demo_name in self.demos.keys():
self.assertTrue(np.allclose(costs[demo_name], costs_solver[demo_name]))
self.assertTrue(np.allclose(costs[demo_name], costs_solver_cached[demo_name]))
self.assertTrue(np.allclose(costs[demo_name], costs_gpu[demo_name]))
self.assertTrue(np.allclose(costs[demo_name], costs_gpu_cached[demo_name]))
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | Python | |
12616c6cf3957d9d7b3e20fa20d7d597d5b3cc6e | Add validator to import_series | v17al/Flexget,xfouloux/Flexget,oxc/Flexget,LynxyssCZ/Flexget,X-dark/Flexget,Flexget/Flexget,X-dark/Flexget,jawilson/Flexget,dsemi/Flexget,lildadou/Flexget,tvcsantos/Flexget,malkavi/Flexget,qvazzler/Flexget,ratoaq2/Flexget,cvium/Flexget,Flexget/Flexget,qvazzler/Flexget,tsnoam/Flexget,X-dark/Flexget,patsissons/Flexget,tobinjt/Flexget,jacobmetrick/Flexget,crawln45/Flexget,gazpachoking/Flexget,sean797/Flexget,v17al/Flexget,drwyrm/Flexget,spencerjanssen/Flexget,poulpito/Flexget,qk4l/Flexget,ibrahimkarahan/Flexget,tarzasai/Flexget,drwyrm/Flexget,crawln45/Flexget,Danfocus/Flexget,thalamus/Flexget,tobinjt/Flexget,offbyone/Flexget,drwyrm/Flexget,voriux/Flexget,grrr2/Flexget,lildadou/Flexget,Flexget/Flexget,qk4l/Flexget,dsemi/Flexget,jacobmetrick/Flexget,oxc/Flexget,thalamus/Flexget,asm0dey/Flexget,ZefQ/Flexget,Danfocus/Flexget,gazpachoking/Flexget,JorisDeRieck/Flexget,ratoaq2/Flexget,grrr2/Flexget,ianstalk/Flexget,crawln45/Flexget,spencerjanssen/Flexget,v17al/Flexget,offbyone/Flexget,tarzasai/Flexget,patsissons/Flexget,Flexget/Flexget,JorisDeRieck/Flexget,LynxyssCZ/Flexget,OmgOhnoes/Flexget,OmgOhnoes/Flexget,jawilson/Flexget,vfrc2/Flexget,offbyone/Flexget,spencerjanssen/Flexget,ZefQ/Flexget,tsnoam/Flexget,vfrc2/Flexget,poulpito/Flexget,ibrahimkarahan/Flexget,ianstalk/Flexget,Pretagonist/Flexget,asm0dey/Flexget,patsissons/Flexget,jawilson/Flexget,antivirtel/Flexget,tarzasai/Flexget,sean797/Flexget,dsemi/Flexget,xfouloux/Flexget,JorisDeRieck/Flexget,malkavi/Flexget,Pretagonist/Flexget,JorisDeRieck/Flexget,malkavi/Flexget,Pretagonist/Flexget,tvcsantos/Flexget,tobinjt/Flexget,qvazzler/Flexget,cvium/Flexget,ratoaq2/Flexget,camon/Flexget,vfrc2/Flexget,ianstalk/Flexget,malkavi/Flexget,ibrahimkarahan/Flexget,camon/Flexget,LynxyssCZ/Flexget,lildadou/Flexget,Danfocus/Flexget,crawln45/Flexget,asm0dey/Flexget,ZefQ/Flexget,xfouloux/Flexget,LynxyssCZ/Flexget,sean797/Flexget,oxc/Flexget,qk4l/Flexget,Danfocus/Flexget,tobinjt/Flexget,OmgOhnoes/Flexget,cvium/Flexget,tsnoam/Flexget,antivirtel/Flexget,antivirtel/Flexget,thalamus/Flexget,jacobmetrick/Flexget,jawilson/Flexget,grrr2/Flexget,voriux/Flexget,poulpito/Flexget | flexget/plugins/plugin_import_series.py | flexget/plugins/plugin_import_series.py | from flexget.plugin import register_plugin, get_plugin_by_name, get_plugins_by_event, PluginError
from flexget.plugins.filter_series import FilterSeriesBase
import logging
log = logging.getLogger('imp_series')
class ImportSeries(FilterSeriesBase):
"""Generates series configuration from any input (supporting API version 2, soon all)
Configuration:
import_series:
[settings]:
# same configuration as series plugin
from:
[input plugin]: <configuration>
Example:
import_series:
settings:
quality: 720p
from:
listdir:
- /media/series
"""
def validator(self):
from flexget import validator
root = validator.factory('dict')
self.build_options_validator(root.accept('dict', key='settings'))
from_section = root.accept('dict', key='from', required=True)
# Get a list of apiv2 input plugins
valid_inputs = [plugin for plugin in get_plugins_by_event('input') if plugin.api_ver > 1]
# Build a dict validator that accepts the available input plugins and their settings
for plugin in valid_inputs:
if hasattr(plugin.instance, 'validator'):
from_section.valid[plugin.name] = [plugin.instance.validator()]
else:
from_section.valid[plugin.name] = [validator.factory('any')]
return root
def on_feed_start(self, feed, config):
series = set()
for input_name, input_config in config.get('from', {}).iteritems():
input = get_plugin_by_name(input_name)
if input.api_ver == 1:
raise PluginError('Plugin %s does not support API v2' % input_name)
method = input.event_handlers['on_feed_input']
result = method(feed, input_config)
if not result:
log.warning('Input %s did not return anything' % input_name)
series_names = [x['title'] for x in result]
series = set.union(series, set(series_names))
if not series:
log.info('Did not get any series to generate series configuration')
return
series_config = {}
if 'settings' in config:
series_config.setdefault('settings', {})
series_config['settings'].setdefault('generated_series', config['settings'])
series_config.setdefault('generated_series', list(series))
self.merge_config(feed, series_config)
register_plugin(ImportSeries, 'import_series', api_ver=2)
| from flexget.plugin import register_plugin, get_plugin_by_name, PluginError
from flexget.plugins.filter_series import FilterSeriesBase
import logging
log = logging.getLogger('imp_series')
class ImportSeries(FilterSeriesBase):
"""Generates series configuration from any input (supporting API version 2, soon all)
Configuration:
import_series:
[settings]:
# same configuration as series plugin
from:
[input plugin]: <configuration>
Example:
import_series:
settings:
quality: 720p
from:
listdir:
- /media/series
"""
def on_feed_start(self, feed, config):
series = set()
for input_name, input_config in config.get('from', {}).iteritems():
input = get_plugin_by_name(input_name)
if input.api_ver == 1:
raise PluginError('Plugin %s does not support API v2' % input_name)
method = input.event_handlers['on_feed_input']
result = method(feed, input_config)
if not result:
log.warning('Input %s did not return anything' % input_name)
series_names = [x['title'] for x in result]
series = set.union(series, set(series_names))
if not series:
log.info('Did not get any series to generate series configuration')
return
series_config = {}
if 'settings' in config:
series_config.setdefault('settings', {})
series_config['settings'].setdefault('generated_series', config['settings'])
series_config.setdefault('generated_series', list(series))
self.merge_config(feed, series_config)
register_plugin(ImportSeries, 'import_series', api_ver=2)
| mit | Python |
8618c68046487d475c077cb30070c9080cc4fbc7 | Test prototype for WOA from a netCDF file. | castelao/oceansdb,castelao/pyWOA | tests/test_WOA_from_nc.py | tests/test_WOA_from_nc.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
"""
from datetime import datetime
from WOA.woa import WOA
def test_import():
# A shortcut
from WOA import WOA
db = WOA()
def test_available_vars():
db = WOA()
for v in ['TEMP', 'PSAL']:
assert v in db.keys()
def test_get_profile():
db = WOA()
db['TEMP'].get_profile(var='mn', doy=datetime.now(),
depth=0, lat=10, lon=330)
db['TEMP'].get_profile(var='mn', doy=datetime.now(),
depth=[0,10], lat=10, lon=330)
db['TEMP'].get_profile(doy=datetime.now(),
depth=[0,10], lat=10, lon=330)
| bsd-3-clause | Python | |
2c8a867a810b0e7af6e14be7ed3a70a8fb134252 | Add the py-vine package (#13857) | LLNL/spack,iulian787/spack,iulian787/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack,iulian787/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-vine/package.py | var/spack/repos/builtin/packages/py-vine/package.py | # Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyVine(PythonPackage):
"""Promises, promises, promises."""
homepage = "https://pypi.org/project/vine/"
url = "https://pypi.io/packages/source/v/vine/vine-1.2.0.tar.gz"
version('1.3.0', sha256='133ee6d7a9016f177ddeaf191c1f58421a1dcc6ee9a42c58b34bed40e1d2cd87')
version('1.2.0', sha256='ee4813e915d0e1a54e5c1963fde0855337f82655678540a6bc5996bca4165f76')
depends_on('py-setuptools', type='build')
| lgpl-2.1 | Python | |
a746c674747fe25bed4af2d56738cf5075d475ad | add integration test | alessandrod/twiggy,alessandrod/twiggy | tests/test_integration.py | tests/test_integration.py | import unittest
import twiggy
import StringIO
import time
from . import when
def fake_gmtime():
return when
class IntegrationTestCase(unittest.TestCase):
def setUp(self):
twiggy._populate_globals()
twiggy.log._fields['time'] = fake_gmtime
def tearDown(self):
twiggy._del_globals()
def test_integration(self):
out1 = twiggy.outputs.StreamOutput(stream=StringIO.StringIO(), format=twiggy.formats.line_format)
out2 = twiggy.outputs.StreamOutput(stream=StringIO.StringIO(), format=twiggy.formats.line_format)
twiggy.addEmitters(('first', twiggy.levels.INFO, None, out1),
('second', twiggy.levels.DEBUG, twiggy.filters.glob_names('second.*'), out2),
('first-filter', twiggy.levels.DEBUG, ".*pants.*", out1))
def something():
return "something cool"
twiggy.log.debug("oh hi")
twiggy.log.name("second").info("do you like cheese?")
twiggy.log.name("second.child").fields(america="hate").warning("No")
twiggy.log.name("first").error("Can you do {}", something)
twiggy.log.name("bob").debug("I wear pants")
try:
raise RuntimeError("Oh Noes!")
except:
twiggy.log.trace().critical("Went boom")
print "********************************************"
print out1.stream.getvalue(),
print "********************************************"
print out2.stream.getvalue(),
print "********************************************"
# XXX this should really be done with a regex, but I'm feeling lazy
assert out1.stream.getvalue().startswith( \
"""2010-10-28T02:15:57Z:INFO:second:do you like cheese?
2010-10-28T02:15:57Z:ERROR:first:Can you do something cool
2010-10-28T02:15:57Z:DEBUG:bob:I wear pants
2010-10-28T02:15:57Z:CRITICAL:Went boom
TRACE Traceback (most recent call last):
""")
#"""TRACE File "/home/pfein/Projects/python-twiggy/tests/test_integration.py", line 39, in test_integration
assert out1.stream.getvalue().endswith( \
"""TRACE raise RuntimeError("Oh Noes!")
TRACE RuntimeError: Oh Noes!
""")
assert out2.stream.getvalue() == \
"""2010-10-28T02:15:57Z:WARNING:second.child:america=hate:No
"""
| bsd-3-clause | Python | |
82e186c25971008444e33c6b4924232d690b6d15 | Add git diff python tool that outputs file:line in front of diff output. | dushu1203/chromium.src,anirudhSK/chromium,M4sse/chromium.src,hujiajie/pa-chromium,Chilledheart/chromium,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,rogerwang/chromium,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,zcbenz/cefode-chromium,zcbenz/cefode-chromium,ondra-novak/chromium.src,rogerwang/chromium,bright-sparks/chromium-spacewalk,M4sse/chromium.src,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,timopulkkinen/BubbleFish,zcbenz/cefode-chromium,Chilledheart/chromium,rogerwang/chromium,Pluto-tv/chromium-crosswalk,dednal/chromium.src,patrickm/chromium.src,dednal/chromium.src,timopulkkinen/BubbleFish,ondra-novak/chromium.src,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,nacl-webkit/chrome_deps,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,chuan9/chromium-crosswalk,Just-D/chromium-1,Jonekee/chromium.src,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,rogerwang/chromium,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,axinging/chromium-crosswalk,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,patrickm/chromium.src,ChromiumWebApps/chromium,nacl-webkit/chrome_deps,bright-sparks/chromium-spacewalk,anirudhSK/chromium,zcbenz/cefode-chromium,Chilledheart/chromium,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,hujiajie/pa-chromium,zcbenz/cefode-chromium,Just-D/chromium-1,hujiajie/pa-chromium,dushu1203/chromium.src,hujiajie/pa-chromium,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,rogerwang/chromium,zcbenz/cefode-chromium,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,krieger-od/nwjs_chromium.src,keishi/chromium,dushu1203/chromium.src,fujunwei/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,ondra-novak/chromium.src,ltilve/chromium,nacl-webkit/chrome_deps,crosswalk-project/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,mogoweb/chromium-crosswalk,junmin-zhu/chromium-rivertrail,ltilve/chromium,Fireblend/chromium-crosswalk,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,littlstar/chromium.src,axinging/chromium-crosswalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,dednal/chromium.src,dednal/chromium.src,jaruba/chromium.src,krieger-od/nwjs_chromium.src,axinging/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,M4sse/chromium.src,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,ondra-novak/chromium.src,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,krieger-od/nwjs_chromium.src,Jonekee/chromium.src,rogerwang/chromium,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,hujiajie/pa-chromium,robclark/chromium,mohamed--abdel-maksoud/chromium.src,nacl-webkit/chrome_deps,patrickm/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,littlstar/chromium.src,ChromiumWebApps/chromium,Just-D/chromium-1,krieger-od/nwjs_chromium.src,rogerwang/chromium,chuan9/chromium-crosswalk,jaruba/chromium.src,hujiajie/pa-chromium,dushu1203/chromium.src,chuan9/chromium-crosswalk,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,robclark/chromium,robclark/chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,chuan9/chromium-crosswalk,keishi/chromium,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,nacl-webkit/chrome_deps,dushu1203/chromium.src,chuan9/chromium-crosswalk,anirudhSK/chromium,jaruba/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,keishi/chromium,chuan9/chromium-crosswalk,rogerwang/chromium,dushu1203/chromium.src,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,littlstar/chromium.src,nacl-webkit/chrome_deps,anirudhSK/chromium,Jonekee/chromium.src,markYoungH/chromium.src,Jonekee/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,junmin-zhu/chromium-rivertrail,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,mogoweb/chromium-crosswalk,keishi/chromium,patrickm/chromium.src,anirudhSK/chromium,robclark/chromium,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,timopulkkinen/BubbleFish,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,M4sse/chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,keishi/chromium,bright-sparks/chromium-spacewalk,robclark/chromium,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,hgl888/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,jaruba/chromium.src,junmin-zhu/chromium-rivertrail,ltilve/chromium,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,anirudhSK/chromium,keishi/chromium,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,bright-sparks/chromium-spacewalk,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,robclark/chromium,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,M4sse/chromium.src,nacl-webkit/chrome_deps,Just-D/chromium-1,mogoweb/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,markYoungH/chromium.src,rogerwang/chromium,PeterWangIntel/chromium-crosswalk,Just-D/chromium-1,anirudhSK/chromium,patrickm/chromium.src,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,TheTypoMaster/chromium-crosswalk,patrickm/chromium.src,Just-D/chromium-1,littlstar/chromium.src,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,M4sse/chromium.src,mogoweb/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,markYoungH/chromium.src,Just-D/chromium-1,nacl-webkit/chrome_deps,hgl888/chromium-crosswalk-efl,timopulkkinen/BubbleFish,keishi/chromium,keishi/chromium,dednal/chromium.src,timopulkkinen/BubbleFish,junmin-zhu/chromium-rivertrail,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,junmin-zhu/chromium-rivertrail,hgl888/chromium-crosswalk,markYoungH/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,fujunwei/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,pozdnyakov/chromium-crosswalk,junmin-zhu/chromium-rivertrail,jaruba/chromium.src,patrickm/chromium.src,robclark/chromium,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,nacl-webkit/chrome_deps,zcbenz/cefode-chromium,robclark/chromium,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,keishi/chromium,markYoungH/chromium.src,littlstar/chromium.src,Chilledheart/chromium,Jonekee/chromium.src,ondra-novak/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,dednal/chromium.src,Jonekee/chromium.src,zcbenz/cefode-chromium,hgl888/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,keishi/chromium,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,robclark/chromium,hgl888/chromium-crosswalk,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,zcbenz/cefode-chromium,littlstar/chromium.src,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,markYoungH/chromium.src,bright-sparks/chromium-spacewalk,anirudhSK/chromium,ondra-novak/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,keishi/chromium,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,timopulkkinen/BubbleFish,mogoweb/chromium-crosswalk,nacl-webkit/chrome_deps,Chilledheart/chromium,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,Jonekee/chromium.src,Chilledheart/chromium,junmin-zhu/chromium-rivertrail,ltilve/chromium,rogerwang/chromium,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,robclark/chromium,nacl-webkit/chrome_deps,Just-D/chromium-1,bright-sparks/chromium-spacewalk,zcbenz/cefode-chromium,ChromiumWebApps/chromium,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,dushu1203/chromium.src | tools/git/git-diff-ide.py | tools/git/git-diff-ide.py | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Invokes git diff [args...] and inserts file:line in front of each line of diff
output where possible.
This is useful from an IDE that allows you to double-click lines that begin
with file:line to open and jump to that point in the file.
Synopsis:
%prog [git diff args...]
Examples:
%prog
%prog HEAD
"""
import subprocess
import sys
def GitShell(args, ignore_return=False):
"""A shell invocation suitable for communicating with git. Returns
output as list of lines, raises exception on error.
"""
job = subprocess.Popen(args,
shell=True,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
(out, err) = job.communicate()
if job.returncode != 0 and not ignore_return:
print out
raise Exception("Error %d running command %s" % (
job.returncode, args))
return out.split('\n')
def PrintGitDiff(extra_args):
"""Outputs git diff extra_args with file:line inserted into relevant lines."""
current_file = '';
line_num = 0;
lines = GitShell('git diff %s' % ' '.join(extra_args))
for line in lines:
# Pass-through lines:
# diff --git a/file.c b/file.c
# index 0e38c2d..8cd69ae 100644
# --- a/file.c
if (line.startswith('diff ') or
line.startswith('index ') or
line.startswith('--- ')):
print line
continue
# Get the filename from the +++ line:
# +++ b/file.c
if line.startswith('+++ '):
# Filename might be /dev/null or a/file or b/file.
# Skip the first two characters unless it starts with /.
current_file = line[4:] if line[4] == '/' else line[6:]
print line
continue
# Update line number from the @@ lines:
# @@ -41,9 +41,9 @@ def MyFunc():
# ^^
if line.startswith('@@ '):
_, old_nr, new_nr, _ = line.split(' ', 3)
line_num = int(new_nr.split(',')[0])
print line
continue
print current_file + ':' + repr(line_num) + ':' + line
# Increment line number for lines that start with ' ' or '+':
# @@ -41,4 +41,4 @@ def MyFunc():
# file.c:41: // existing code
# file.c:42: // existing code
# file.c:43:-// deleted code
# file.c:43:-// deleted code
# file.c:43:+// inserted code
# file.c:44:+// inserted code
if line.startswith(' ') or line.startswith('+'):
line_num += 1
def main():
PrintGitDiff(sys.argv[1:])
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
7593923070766f53a35d3c404523199f68accd3e | Implement tests for a new _config_file fixture | hackebrot/pytest-cookies | tests/test_user_config.py | tests/test_user_config.py | # -*- coding: utf-8 -*-
def test_config(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
import json
def test_user_dir(tmpdir_factory, _config_file):
basetemp = tmpdir_factory.getbasetemp()
assert _config_file.basename == 'config'
user_dir = _config_file.dirpath()
assert user_dir.fnmatch('user_dir?')
assert user_dir.dirpath() == basetemp
def test_valid_cookiecutter_config(_config_file):
config_text = _config_file.read()
config = json.loads(config_text)
user_dir = _config_file.dirpath()
expected = {
'cookiecutters_dir': str(user_dir.join('cookiecutters')),
'replay_dir': str(user_dir.join('cookiecutter_replay')),
'default_context': {}
}
assert config == expected
""")
# run pytest with the following cmd args
result = testdir.runpytest('-vv')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_user_dir PASSED',
'*::test_valid_cookiecutter_config PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
| mit | Python | |
4973cf7fda38168c8189d77ced2ee2a2c89cadfa | Add py solution for 605. Can Place Flowers | ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode | py/can-place-flowers.py | py/can-place-flowers.py | from itertools import groupby
class Solution(object):
def canPlaceFlowers(self, flowerbed, n):
"""
:type flowerbed: List[int]
:type n: int
:rtype: bool
"""
prev = None
l = len(flowerbed)
for i, f in enumerate(flowerbed):
if f == 0:
if not prev and (i >= l - 1 or flowerbed[i + 1] == 0):
flowerbed[i] = 1
n -= 1
prev = flowerbed[i]
return n <= 0
| apache-2.0 | Python | |
5dc2ad1bf129ba2b4f77602678f8e62d26d132a9 | Add new utility script to add sample feeds as files | flacerdk/smoke-signal,flacerdk/smoke-signal,flacerdk/smoke-signal | utils/add_sample_feeds.py | utils/add_sample_feeds.py | from smoke_signal import app, init_db
from smoke_signal.database.helpers import add_feed
from utils.generate_feed import SampleFeed
from os import walk
feeds_dir = app.root_path + "/test_resources/feeds/"
app.config['DATABASE_PATH'] = 'sqlite:///smoke_signal/test_resources/posts.db'
def create_sample_feed_files(num_feeds, num_items):
for i in range(num_feeds):
feed = SampleFeed("Test feed {}".format(i))
for j in range(num_items):
feed.add_item()
filename = feeds_dir + "feed{}.xml".format(i)
with open(filename, "w+") as f:
f.write(feed.__str__())
def add_feeds_to_db():
filenames = next(walk(feeds_dir))[2]
with app.app_context():
init_db()
for filename in filenames:
add_feed("file://" + feeds_dir + filename)
| mit | Python | |
d8968cd7697fd165194e6692e9322e7992564323 | Add xor_gate | yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program,yukihirai0505/tutorial-program | python/ch02/xor_gate.py | python/ch02/xor_gate.py | from ch02.and_gate02 import AND
from ch02.nand_gate import NAND
from ch02.or_gate import OR
def XOR(x1, x2):
s1 = NAND(x1, x2)
s2 = OR(x1, x2)
return AND(s1, s2)
if __name__ == '__main__':
for xs in [(0, 0), (1, 0), (0, 1), (1, 1)]:
y = XOR(xs[0], xs[1])
print(str(xs) + " -> " + str(y))
| mit | Python | |
399568bbb0c88b2aa3919ac3552483a9dd8f01ab | Add an example that uses the interators | Vector35/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api,joshwatson/binaryninja-api,joshwatson/binaryninja-api,Vector35/binaryninja-api,joshwatson/binaryninja-api,Vector35/binaryninja-api,Vector35/binaryninja-api | python/examples/instruction-iterator.py | python/examples/instruction-iterator.py | #!/usr/bin/env python
import sys
try:
import binaryninja
except ImportError:
sys.path.append("/Applications/Binary Ninja.app/Contents/Resources/python/")
import binaryninja
import time
if sys.platform.lower().startswith("linux"):
bintype="ELF"
elif sys.platform.lower() == "darwin":
bintype="Mach-O"
else:
raise Exception, "%s is not supported on this plugin" % sys.platform
if len(sys.argv) > 1:
target = sys.argv[1]
else:
target = "/bin/ls"
bv = binaryninja.BinaryViewType[bintype].open(target)
bv.update_analysis()
"""Until update_analysis_and_wait is complete, sleep is necessary as the analysis is multi-threaded."""
time.sleep(1)
print "-------- %s --------" % target
print "START: 0x%x" % bv.start
print "ENTRY: 0x%x" % bv.entry_point
print "ARCH: %s" % bv.arch.name
print "\n-------- Function List --------"
""" print all the functions, their basic blocks, and their il instructions """
for func in bv.functions:
print repr(func)
for block in func.low_level_il:
print "\t{0}".format(block)
for insn in block:
print "\t\t{0}".format(insn)
""" print all the functions, their basic blocks, and their mc instructions """
for func in bv.functions:
print repr(func)
for block in func:
print "\t{0}".format(block)
for insn in block:
print "\t\t{0}".format(insn)
| mit | Python | |
3474dd5d406b47c6d29e296b3c2f0fe622e4d7ba | Create new package. (#6595) | mfherbst/spack,EmreAtes/spack,iulian787/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack,mfherbst/spack,LLNL/spack,krafczyk/spack,EmreAtes/spack,mfherbst/spack,matthiasdiener/spack,tmerrick1/spack,tmerrick1/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,iulian787/spack,tmerrick1/spack,LLNL/spack,mfherbst/spack,EmreAtes/spack,krafczyk/spack,EmreAtes/spack,krafczyk/spack,tmerrick1/spack,matthiasdiener/spack,krafczyk/spack,LLNL/spack,iulian787/spack,matthiasdiener/spack,krafczyk/spack,iulian787/spack,EmreAtes/spack,iulian787/spack | var/spack/repos/builtin/packages/r-tidycensus/package.py | var/spack/repos/builtin/packages/r-tidycensus/package.py | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class RTidycensus(RPackage):
"""An integrated R interface to the decennial US Census and American
Community Survey APIs and the US Census Bureau's geographic boundary
files. Allows R users to return Census and ACS data as tidyverse-ready
data frames, and optionally returns a list-column with feature
geometry for many geographies."""
homepage = "https://cran.r-project.org/package=tidycensus"
url = "https://cran.rstudio.com/src/contrib/tidycensus_0.3.1.tar.gz"
list_url = "https://cran.rstudio.com/src/contrib/Archive/tidycensus"
version('0.3.1', '420d046b5a408d321e775c3d410e7699')
depends_on('r-httr', type=('build', 'run'))
depends_on('r-sf', type=('build', 'run'))
depends_on('r-dplyr', type=('build', 'run'))
depends_on('r-tigris', type=('build', 'run'))
depends_on('r-stringr', type=('build', 'run'))
depends_on('r-jsonlite', type=('build', 'run'))
depends_on('r-purrr', type=('build', 'run'))
depends_on('r-rvest', type=('build', 'run'))
depends_on('r-tidyr', type=('build', 'run'))
depends_on('r-rappdirs', type=('build', 'run'))
depends_on('r-readr', type=('build', 'run'))
depends_on('r-xml2', type=('build', 'run'))
depends_on('r-units', type=('build', 'run'))
| lgpl-2.1 | Python | |
2d1d7630cb0d74e18fd9ca359569069a841acae6 | Create intent_bifuz.py | Android-leak/bifuz,fuzzing/bifuz,Android-leak/bifuz,fuzzing/bifuz,fuzzing/bifuz,Android-leak/bifuz,Android-leak/bifuz,fuzzing/bifuz | intent_bifuz.py | intent_bifuz.py | #!/usr/bin/env python
# Intent bifuz.
#
# Copyright (C) 2015 Intel Corporation
# Author: Andreea Brindusa Proca <andreea.brindusa.proca@intel.com>
# Author: Razvan-Costin Ionescu <razvan.ionescu@intel.com>
#
# Licensed under the MIT license, see COPYING.MIT for details
import os, sys
import re
import pprint
import random, string
import multiprocessing
from common import *
#list with domains used to generate random URIs
domains=[".com",".org",".net",".int",".gov",".mil"]
def generate_random_uri():
return random.choice(["http","https"])+"://"+str(string_generator(random.randint(10,100)))+random.choice(domains)
def string_generator(size=8, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
| mit | Python | |
467ffbc0b15a290eb20f5971ba0fca6aea0dc6f4 | add handler for application v43 with preliminary fields | funginstitute/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor,yngcan/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,yngcan/patentprocessor | lib/handlers/application_handler_v43.py | lib/handlers/application_handler_v43.py | #!/usr/bin/env python
"""
Uses the extended ContentHandler from xml_driver to extract the needed fields
from patent grant documents
"""
from cStringIO import StringIO
from datetime import datetime
from unidecode import unidecode
from handler import Patobj, PatentHandler
import re
import uuid
import xml.sax
import xml_util
import xml_driver
claim_num_regex = re.compile(r'^\d+\. *') # removes claim number from claim text
class Patent(PatentHandler):
def __init__(self, xml_string, is_string=False):
xh = xml_driver.XMLHandler()
parser = xml_driver.make_parser()
parser.setContentHandler(xh)
parser.setFeature(xml_driver.handler.feature_external_ges, False)
l = xml.sax.xmlreader.Locator()
xh.setDocumentLocator(l)
if is_string:
parser.parse(StringIO(xml_string))
else:
parser.parse(xml_string)
self.attributes = ['app','application']
self.xml = xh.root.us_patent_application
self.country = self.xml.publication_reference.contents_of('country', upper=False)[0]
self.application = xml_util.normalize_document_identifier(self.xml.publication_reference.contents_of('doc_number')[0])
self.kind = self.xml.publication_reference.contents_of('kind')[0]
self.date_app = self.xml.publication_reference.contents_of('date')[0]
if self.xml.application_reference:
self.pat_type = self.xml.application_reference[0].get_attribute('appl-type', upper=False)
else:
self.pat_type = None
self.clm_num = len(self.xml.claims.claim)
self.abstract = self.xml.abstract.contents_of('p', '', as_string=True, upper=False)
self.invention_title = self._invention_title()
self.app = {
"id": self.application,
"type": self.pat_type,
"number": self.application,
"country": self.country,
"date": self._fix_date(self.date_app),
"abstract": self.abstract,
"title": self.invention_title,
"kind": self.kind,
"num_claims": self.clm_num
}
self.app["id"] = str(self.app["date"])[:4] + "/" + self.app["number"]
print(self.app)
def _invention_title(self):
original = self.xml.contents_of('invention_title', upper=False)[0]
if isinstance(original, list):
original = ''.join(original)
return original
def _name_helper(self, tag_root):
"""
Returns dictionary of firstname, lastname with prefix associated
with lastname
"""
firstname = tag_root.contents_of('first_name', as_string=True, upper=False)
lastname = tag_root.contents_of('last_name', as_string=True, upper=False)
return xml_util.associate_prefix(firstname, lastname)
def _name_helper_dict(self, tag_root):
"""
Returns dictionary of firstname, lastname with prefix associated
with lastname
"""
firstname = tag_root.contents_of('first_name', as_string=True, upper=False)
lastname = tag_root.contents_of('last_name', as_string=True, upper=False)
firstname, lastname = xml_util.associate_prefix(firstname, lastname)
return {'name_first': firstname, 'name_last': lastname}
def _fix_date(self, datestring):
"""
Converts a number representing YY/MM to a Date
"""
if not datestring:
return None
elif datestring[:4] < "1900":
return None
# default to first of month in absence of day
if datestring[-4:-2] == '00':
datestring = datestring[:-4] + '01' + datestring[-2:]
if datestring[-2:] == '00':
datestring = datestring[:6] + '01'
try:
datestring = datetime.strptime(datestring, '%Y%m%d')
return datestring
except Exception as inst:
print inst, datestring
return None
| bsd-2-clause | Python | |
6b2a84f1199615b09e1a14210302a814730869ed | Solve quicksort2 | rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank | python/quicksort2.py | python/quicksort2.py | def print_arr(arr):
print(" ".join(map(str, arr)))
def divide(arr, pivot_i=0):
if len(arr) <= 1:
return arr
pivot = arr[pivot_i]
left = []
right = []
for i in arr:
if i < pivot:
left.append(i)
elif i > pivot:
right.append(i)
sorted_arr = divide(left) + [pivot] + divide(right)
print_arr(sorted_arr)
return sorted_arr
n = int(input().strip())
arr = list(map(int, input().strip().split(" ")))
divide(arr)
| mit | Python | |
d4f3e8a4c18fa21cb00af85dac3955f79ae5f886 | Add gcps command | brendan-ward/rasterio,brendan-ward/rasterio,brendan-ward/rasterio | rasterio/rio/gcps.py | rasterio/rio/gcps.py | """Command access to dataset metadata, stats, and more."""
import json
import click
from cligj import (
compact_opt, use_rs_opt, geojson_type_collection_opt,
geojson_type_feature_opt, projection_geographic_opt,
projection_projected_opt, precision_opt, indent_opt)
import rasterio
import rasterio.crs
from rasterio.rio import options
from rasterio.warp import transform_geom
# Feature collection or feature sequence switch, defaulting to the
# latter, the opposite of cligj's default.
sequence_opt = click.option(
'--sequence/--no-sequence',
default=True,
help="Write a LF-delimited sequence of texts containing individual "
"objects or write a single JSON text containing a feature "
"collection object (the default).")
@click.command(short_help="Print ground control points as GeoJSON.")
@options.file_in_arg
@geojson_type_collection_opt()
@geojson_type_feature_opt(default=True)
@projection_geographic_opt
@projection_projected_opt
@precision_opt
@use_rs_opt
@indent_opt
@compact_opt
@click.pass_context
def gcps(ctx, input, geojson_type, projection, precision, use_rs, indent,
compact):
"""Print GeoJSON representations of a dataset's control points.
Each ground control point is represented as a GeoJSON feature. The
'properties' member of each feature contains a JSON representation
of the control point with the following items:
\b
row, col:
row (or line) and col (or pixel) coordinates.
x, y, z:
x, y, and z spatial coordinates.
crs:
The coordinate reference system for x, y, and z.
id:
A unique (within the dataset) identifier for the control
point.
info:
A brief description of the control point.
"""
# Handle the invalid combinations of args.
if geojson_type == 'feature' and indent and not use_rs:
raise click.BadParameter(
"Pretty-printing a sequence of Features requires the --rs option")
with ctx.obj['env'], rasterio.open(input) as src:
gcps, crs = src.gcps
proj = crs.to_string()
proj = proj.split('=')[1].upper() if proj.startswith('+init=epsg') else proj
def update_props(data, **kwds):
data['properties'].update(**kwds)
return data
def transform(feat):
dst_crs = 'epsg:4326' if projection == 'geographic' else crs
geom = transform_geom(crs, dst_crs, feat['geometry'],
precision=precision)
feat['geometry'] = geom
return feat
# Specifying --collection overrides --sequence.
if geojson_type == 'collection':
if projection == 'geographic' or precision >= 0:
features = [transform(update_props(p.__geo_interface__, crs=proj)) for p in gcps]
else:
features = [update_props(p.__geo_interface__, crs=proj) for p in gcps]
click.echo(json.dumps(
{'type': 'FeatureCollection', 'features': features},
separators=(',', ':') if compact else None,
indent=indent))
else:
for p in gcps:
if use_rs:
click.echo(u'\x1e', nl=False)
if projection == 'geographic' or precision >= 0:
feat = transform(update_props(p.__geo_interface__, crs=proj))
else:
feat = update_props(p.__geo_interface__, crs=proj)
click.echo(json.dumps(
feat,
separators=(',', ':') if compact else None,
indent=indent))
| bsd-3-clause | Python | |
fdc403cb477d6dbadd4b7b744f5107465c8ce697 | Add _compat module | Xion/recursely | recursely/_compat.py | recursely/_compat.py | """
Compatibility shims for different Python versions and platforms.
"""
import sys
IS_PY3 = sys.version_info[0] == 3
class metaclass(object):
"""Decorator for creating a class through a metaclass.
Unlike ``__metaclass__`` attribute from Python 2, or ``metaclass=`` keyword
argument from Python 3, the ``@metaclass`` decorator works with both
versions of the language.
Example::
@metaclass(MyMetaclass)
class MyClass(object):
pass
"""
def __init__(self, meta):
if not issubclass(meta, type):
raise TypeError("expected a metaclass, got %s" % type(meta))
self.metaclass = meta
def __call__(self, cls):
"""Apply the decorator to given class.
This recreates the class using the previously supplied metaclass.
"""
# Copyright (c) Django Software Foundation and individual contributors.
# All rights reserved.
original_dict = cls.__dict__.copy()
original_dict.pop('__dict__', None)
original_dict.pop('__weakref__', None)
slots = original_dict.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slot in slots:
original_dict.pop(slot)
return self.metaclass(cls.__name__, cls.__bases__, original_dict)
| bsd-2-clause | Python | |
e868f47852ee814975861c61167b82243dc310ad | add script for extracting a list of translators from the .po files | gogobook/wagtail,gasman/wagtail,rsalmaso/wagtail,wagtail/wagtail,nealtodd/wagtail,chrxr/wagtail,rsalmaso/wagtail,quru/wagtail,rsalmaso/wagtail,nilnvoid/wagtail,davecranwell/wagtail,quru/wagtail,kurtrwall/wagtail,nealtodd/wagtail,davecranwell/wagtail,mixxorz/wagtail,timorieber/wagtail,wagtail/wagtail,mikedingjan/wagtail,jnns/wagtail,timorieber/wagtail,takeflight/wagtail,thenewguy/wagtail,kurtw/wagtail,chrxr/wagtail,gasman/wagtail,hanpama/wagtail,kaedroho/wagtail,timorieber/wagtail,nutztherookie/wagtail,mixxorz/wagtail,gogobook/wagtail,JoshBarr/wagtail,takeflight/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,zerolab/wagtail,hamsterbacke23/wagtail,Toshakins/wagtail,Toshakins/wagtail,serzans/wagtail,Toshakins/wagtail,nutztherookie/wagtail,Tivix/wagtail,FlipperPA/wagtail,inonit/wagtail,jnns/wagtail,mikedingjan/wagtail,zerolab/wagtail,Tivix/wagtail,inonit/wagtail,wagtail/wagtail,davecranwell/wagtail,iansprice/wagtail,jnns/wagtail,inonit/wagtail,mikedingjan/wagtail,kaedroho/wagtail,nimasmi/wagtail,Tivix/wagtail,JoshBarr/wagtail,kurtrwall/wagtail,thenewguy/wagtail,rsalmaso/wagtail,iansprice/wagtail,jnns/wagtail,kurtw/wagtail,kaedroho/wagtail,davecranwell/wagtail,hamsterbacke23/wagtail,mikedingjan/wagtail,nilnvoid/wagtail,zerolab/wagtail,gasman/wagtail,mixxorz/wagtail,thenewguy/wagtail,hamsterbacke23/wagtail,nealtodd/wagtail,serzans/wagtail,zerolab/wagtail,wagtail/wagtail,mixxorz/wagtail,thenewguy/wagtail,torchbox/wagtail,nutztherookie/wagtail,nilnvoid/wagtail,torchbox/wagtail,gasman/wagtail,takeflight/wagtail,Tivix/wagtail,JoshBarr/wagtail,gogobook/wagtail,kurtw/wagtail,kurtrwall/wagtail,nealtodd/wagtail,thenewguy/wagtail,iansprice/wagtail,torchbox/wagtail,hanpama/wagtail,nimasmi/wagtail,nimasmi/wagtail,rsalmaso/wagtail,nimasmi/wagtail,hanpama/wagtail,inonit/wagtail,iansprice/wagtail,gasman/wagtail,mixxorz/wagtail,chrxr/wagtail,zerolab/wagtail,gogobook/wagtail,FlipperPA/wagtail,nutztherookie/wagtail,serzans/wagtail,chrxr/wagtail,kaedroho/wagtail,FlipperPA/wagtail,Toshakins/wagtail,quru/wagtail,FlipperPA/wagtail,quru/wagtail,timorieber/wagtail,takeflight/wagtail,hanpama/wagtail,nilnvoid/wagtail,kaedroho/wagtail,JoshBarr/wagtail,kurtw/wagtail,serzans/wagtail,torchbox/wagtail,kurtrwall/wagtail | scripts/get-translator-credits.py | scripts/get-translator-credits.py | import subprocess
import re
from collections import defaultdict
authors_by_locale = defaultdict(set)
file_listing = subprocess.Popen('find ../wagtail -iname *.po', shell=True, stdout=subprocess.PIPE)
for file_listing_line in file_listing.stdout:
filename = file_listing_line.strip()
# extract locale string from filename
locale = re.search(r'locale/(\w+)/LC_MESSAGES', filename).group(1)
if locale == 'en':
continue
# read author list from each file
with file(filename) as f:
has_found_translators_heading = False
for line in f:
line = line.strip()
if line.startswith('#'):
if has_found_translators_heading:
author = re.match(r'\# (.*), [\d\-]+', line).group(1)
authors_by_locale[locale].add(author)
elif line.startswith('# Translators:'):
has_found_translators_heading = True
else:
if has_found_translators_heading:
break
else:
raise Exception("No 'Translators:' heading found in %s" % filename)
locales = sorted(authors_by_locale.keys())
for locale in locales:
print(locale)
print("-----")
for author in sorted(authors_by_locale[locale]):
print(author)
print('')
| bsd-3-clause | Python | |
214c81265db7dc23a805717126fad6f97d391fe8 | Add marker for 105 error | Facenapalm/NapalmBot | scripts/markers/mark_error_105.py | scripts/markers/mark_error_105.py | """Marks all fixed errors #105 on ruwiki's CheckWikipedia."""
import re
import pywikibot
from checkwiki import load_page_list, mark_error_done, log
NUMBER = "105"
def main():
"""Main script function."""
site = pywikibot.Site()
for pagename in load_page_list(NUMBER):
page = pywikibot.Page(site, pagename)
error = False
for line in page.text.split("\n"):
match = re.search(r"==+$", line)
if not match:
continue
if line.startswith(match.group(0)):
continue
error = True
break
if error:
log(pagename, success=False)
else:
mark_error_done(NUMBER, page.title())
log(pagename, success=True)
if __name__ == "__main__":
main()
| mit | Python | |
2ea6920a498dcc2dee4c1b7d7dc5e454f8071aa8 | Add DeliveryBox table tests. | LegionXI/pydarkstar,AdamGagorik/pydarkstar | tests/tables/delivery_box.py | tests/tables/delivery_box.py | """
.. moduleauthor:: Adam Gagorik <adam.gagorik@gmail.com>
"""
import unittest
import pydarkstar.logutils
import pydarkstar.tables.delivery_box
pydarkstar.logutils.setDebug()
class TestDeliveryBox(unittest.TestCase):
def test_init(self):
pydarkstar.tables.delivery_box.DeliveryBox()
if __name__ == '__main__':
unittest.main() | mit | Python | |
7a669705a4870f04dfee620f24fb98d60b31b901 | add initial skeleton for carpenter tests | IanDCarroll/xox | tests/test_carpenter_shop.py | tests/test_carpenter_shop.py | import unittest
from source.carpenter_shop import *
class CarpenterTestCase(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
pass
| mit | Python | |
052a176e0c3b38dc9390edc66b7ca3125106bead | add a new test case TestImport_01 | alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl | tests/unit/test_Import_01.py | tests/unit/test_Import_01.py | import AlphaTwirl
import unittest
import inspect
##__________________________________________________________________||
hasPandas = False
try:
import pandas
hasPandas = True
except ImportError:
pass
##__________________________________________________________________||
hasROOT = False
try:
import ROOT
hasROOT = True
except ImportError:
pass
##__________________________________________________________________||
class TestImport_01(unittest.TestCase):
@unittest.skipUnless(hasPandas, "has no pandas")
def test_with_pandas(self):
self.assertTrue(inspect.isfunction(AlphaTwirl.Aggregate.combine_MC_yields_in_datasets_into_xsec_in_processes))
self.assertTrue(inspect.isfunction(AlphaTwirl.Aggregate.stack_counts_categories))
self.assertTrue(inspect.isfunction(AlphaTwirl.Aggregate.sumOverCategories))
self.assertTrue(inspect.isfunction(AlphaTwirl.buildBinningFromTbl))
self.assertTrue(inspect.isclass(AlphaTwirl.CombineIntoPandasDataFrame))
self.assertTrue(inspect.isclass(AlphaTwirl.WritePandasDataFrameToFile))
@unittest.skipUnless(hasROOT, "has no ROOT")
def test_with_ROOT(self):
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.BEventBuilder))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.EventBuilder))
def test_functions(self):
self.assertTrue(inspect.isfunction(AlphaTwirl.mkdir_p))
def test_classes(self):
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.Collector))
self.assertTrue(inspect.isclass(AlphaTwirl.AlphaTwirl))
self.assertTrue(inspect.isclass(AlphaTwirl.Binning.Binning))
self.assertTrue(inspect.isclass(AlphaTwirl.Binning.Echo))
self.assertTrue(inspect.isclass(AlphaTwirl.Binning.Round))
self.assertTrue(inspect.isclass(AlphaTwirl.Binning.RoundLog))
self.assertTrue(inspect.isclass(AlphaTwirl.Combine))
self.assertTrue(inspect.isclass(AlphaTwirl.CombineIntoList))
self.assertTrue(inspect.isclass(AlphaTwirl.Counter.Counter))
self.assertTrue(inspect.isclass(AlphaTwirl.Counter.CounterFactory))
self.assertTrue(inspect.isclass(AlphaTwirl.Counter.Counts))
self.assertTrue(inspect.isclass(AlphaTwirl.Counter.GenericKeyComposer))
self.assertTrue(inspect.isclass(AlphaTwirl.Counter.GenericKeyComposerB))
self.assertTrue(inspect.isclass(AlphaTwirl.Counter.NextKeyComposer))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.Collector))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventLoop))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventLoopProgressReportWriter))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventLoopRunner))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventReaderBundle))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventReaderCollectorAssociator))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventReaderCollectorAssociatorComposite))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.EventReaderComposite))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.MPEventLoopRunner))
self.assertTrue(inspect.isclass(AlphaTwirl.EventReader.NullProgressMonitor))
self.assertTrue(inspect.isclass(AlphaTwirl.Events.BEvents))
self.assertTrue(inspect.isclass(AlphaTwirl.Events.Branch))
self.assertTrue(inspect.isclass(AlphaTwirl.Events.BranchAddressManager))
self.assertTrue(inspect.isclass(AlphaTwirl.Events.Events))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.Analyzer))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.Component))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.ComponentLoop))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.ComponentReaderComposite))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.HeppyResult))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.ReadComponentConfig))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.ReadCounter))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.ReadVersionInfo))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.TblCounter))
self.assertTrue(inspect.isclass(AlphaTwirl.HeppyResult.TblXsec))
self.assertTrue(inspect.isclass(AlphaTwirl.ProgressBar.ProgressBar))
self.assertTrue(inspect.isclass(AlphaTwirl.ProgressBar.ProgressMonitor))
self.assertTrue(inspect.isclass(AlphaTwirl.ProgressBar.ProgressReport))
self.assertTrue(inspect.isclass(AlphaTwirl.ProgressBar.ProgressReporter))
self.assertTrue(inspect.isclass(AlphaTwirl.WriteListToFile))
##________________________________._________________________________||
| bsd-3-clause | Python | |
d438f1e54743b64eb0d6518df0f90956711568a6 | Add trace api module. | jagguli/intellij-community,fitermay/intellij-community,fitermay/intellij-community,mglukhikh/intellij-community,vvv1559/intellij-community,slisson/intellij-community,apixandru/intellij-community,fengbaicanhe/intellij-community,kdwink/intellij-community,ThiagoGarciaAlves/intellij-community,mglukhikh/intellij-community,nicolargo/intellij-community,petteyg/intellij-community,SerCeMan/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,apixandru/intellij-community,gnuhub/intellij-community,tmpgit/intellij-community,fengbaicanhe/intellij-community,hurricup/intellij-community,kool79/intellij-community,salguarnieri/intellij-community,orekyuu/intellij-community,pwoodworth/intellij-community,wreckJ/intellij-community,izonder/intellij-community,ryano144/intellij-community,caot/intellij-community,asedunov/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,FHannes/intellij-community,idea4bsd/idea4bsd,MER-GROUP/intellij-community,SerCeMan/intellij-community,idea4bsd/idea4bsd,supersven/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,fitermay/intellij-community,muntasirsyed/intellij-community,MER-GROUP/intellij-community,signed/intellij-community,mglukhikh/intellij-community,fengbaicanhe/intellij-community,xfournet/intellij-community,Distrotech/intellij-community,ibinti/intellij-community,fitermay/intellij-community,jagguli/intellij-community,hurricup/intellij-community,dslomov/intellij-community,orekyuu/intellij-community,FHannes/intellij-community,da1z/intellij-community,amith01994/intellij-community,ahb0327/intellij-community,orekyuu/intellij-community,da1z/intellij-community,amith01994/intellij-community,pwoodworth/intellij-community,youdonghai/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,samthor/intellij-community,jagguli/intellij-community,xfournet/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,da1z/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,apixandru/intellij-community,suncycheng/intellij-community,amith01994/intellij-community,apixandru/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,fengbaicanhe/intellij-community,holmes/intellij-community,FHannes/intellij-community,ThiagoGarciaAlves/intellij-community,lucafavatella/intellij-community,diorcety/intellij-community,supersven/intellij-community,vladmm/intellij-community,vvv1559/intellij-community,FHannes/intellij-community,SerCeMan/intellij-community,salguarnieri/intellij-community,signed/intellij-community,gnuhub/intellij-community,adedayo/intellij-community,retomerz/intellij-community,jagguli/intellij-community,akosyakov/intellij-community,pwoodworth/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,TangHao1987/intellij-community,vladmm/intellij-community,apixandru/intellij-community,akosyakov/intellij-community,michaelgallacher/intellij-community,robovm/robovm-studio,ivan-fedorov/intellij-community,izonder/intellij-community,orekyuu/intellij-community,fitermay/intellij-community,MER-GROUP/intellij-community,izonder/intellij-community,jagguli/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,tmpgit/intellij-community,ThiagoGarciaAlves/intellij-community,ahb0327/intellij-community,diorcety/intellij-community,semonte/intellij-community,xfournet/intellij-community,muntasirsyed/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,dslomov/intellij-community,MER-GROUP/intellij-community,TangHao1987/intellij-community,izonder/intellij-community,da1z/intellij-community,ibinti/intellij-community,gnuhub/intellij-community,alphafoobar/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,youdonghai/intellij-community,petteyg/intellij-community,fnouama/intellij-community,allotria/intellij-community,slisson/intellij-community,tmpgit/intellij-community,ahb0327/intellij-community,pwoodworth/intellij-community,ivan-fedorov/intellij-community,MichaelNedzelsky/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,robovm/robovm-studio,pwoodworth/intellij-community,slisson/intellij-community,asedunov/intellij-community,TangHao1987/intellij-community,ThiagoGarciaAlves/intellij-community,tmpgit/intellij-community,nicolargo/intellij-community,ftomassetti/intellij-community,SerCeMan/intellij-community,MichaelNedzelsky/intellij-community,pwoodworth/intellij-community,signed/intellij-community,retomerz/intellij-community,ryano144/intellij-community,ivan-fedorov/intellij-community,gnuhub/intellij-community,ivan-fedorov/intellij-community,fnouama/intellij-community,ThiagoGarciaAlves/intellij-community,pwoodworth/intellij-community,samthor/intellij-community,robovm/robovm-studio,kdwink/intellij-community,kdwink/intellij-community,pwoodworth/intellij-community,adedayo/intellij-community,idea4bsd/idea4bsd,amith01994/intellij-community,retomerz/intellij-community,ol-loginov/intellij-community,semonte/intellij-community,ftomassetti/intellij-community,nicolargo/intellij-community,nicolargo/intellij-community,orekyuu/intellij-community,supersven/intellij-community,diorcety/intellij-community,lucafavatella/intellij-community,kdwink/intellij-community,Lekanich/intellij-community,hurricup/intellij-community,supersven/intellij-community,samthor/intellij-community,TangHao1987/intellij-community,ahb0327/intellij-community,muntasirsyed/intellij-community,SerCeMan/intellij-community,dslomov/intellij-community,robovm/robovm-studio,wreckJ/intellij-community,hurricup/intellij-community,asedunov/intellij-community,diorcety/intellij-community,blademainer/intellij-community,robovm/robovm-studio,Lekanich/intellij-community,izonder/intellij-community,fitermay/intellij-community,ibinti/intellij-community,xfournet/intellij-community,akosyakov/intellij-community,blademainer/intellij-community,retomerz/intellij-community,supersven/intellij-community,michaelgallacher/intellij-community,apixandru/intellij-community,signed/intellij-community,suncycheng/intellij-community,hurricup/intellij-community,vvv1559/intellij-community,blademainer/intellij-community,amith01994/intellij-community,asedunov/intellij-community,vladmm/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,fnouama/intellij-community,youdonghai/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,FHannes/intellij-community,diorcety/intellij-community,supersven/intellij-community,allotria/intellij-community,ivan-fedorov/intellij-community,blademainer/intellij-community,vladmm/intellij-community,allotria/intellij-community,apixandru/intellij-community,adedayo/intellij-community,ol-loginov/intellij-community,fitermay/intellij-community,vladmm/intellij-community,tmpgit/intellij-community,suncycheng/intellij-community,kdwink/intellij-community,retomerz/intellij-community,da1z/intellij-community,youdonghai/intellij-community,allotria/intellij-community,semonte/intellij-community,supersven/intellij-community,SerCeMan/intellij-community,orekyuu/intellij-community,salguarnieri/intellij-community,dslomov/intellij-community,Distrotech/intellij-community,retomerz/intellij-community,clumsy/intellij-community,holmes/intellij-community,dslomov/intellij-community,signed/intellij-community,mglukhikh/intellij-community,izonder/intellij-community,Lekanich/intellij-community,MER-GROUP/intellij-community,kdwink/intellij-community,gnuhub/intellij-community,blademainer/intellij-community,kool79/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,ThiagoGarciaAlves/intellij-community,ibinti/intellij-community,FHannes/intellij-community,robovm/robovm-studio,asedunov/intellij-community,asedunov/intellij-community,allotria/intellij-community,alphafoobar/intellij-community,kool79/intellij-community,robovm/robovm-studio,samthor/intellij-community,salguarnieri/intellij-community,semonte/intellij-community,Distrotech/intellij-community,caot/intellij-community,idea4bsd/idea4bsd,kool79/intellij-community,ftomassetti/intellij-community,robovm/robovm-studio,SerCeMan/intellij-community,slisson/intellij-community,MER-GROUP/intellij-community,MER-GROUP/intellij-community,retomerz/intellij-community,samthor/intellij-community,ahb0327/intellij-community,MichaelNedzelsky/intellij-community,petteyg/intellij-community,clumsy/intellij-community,youdonghai/intellij-community,ol-loginov/intellij-community,alphafoobar/intellij-community,TangHao1987/intellij-community,wreckJ/intellij-community,fnouama/intellij-community,apixandru/intellij-community,caot/intellij-community,vvv1559/intellij-community,michaelgallacher/intellij-community,MichaelNedzelsky/intellij-community,alphafoobar/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,gnuhub/intellij-community,idea4bsd/idea4bsd,ThiagoGarciaAlves/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,muntasirsyed/intellij-community,semonte/intellij-community,slisson/intellij-community,holmes/intellij-community,xfournet/intellij-community,asedunov/intellij-community,petteyg/intellij-community,caot/intellij-community,ol-loginov/intellij-community,kdwink/intellij-community,samthor/intellij-community,apixandru/intellij-community,da1z/intellij-community,adedayo/intellij-community,hurricup/intellij-community,slisson/intellij-community,fengbaicanhe/intellij-community,clumsy/intellij-community,ibinti/intellij-community,semonte/intellij-community,clumsy/intellij-community,ahb0327/intellij-community,ryano144/intellij-community,holmes/intellij-community,ibinti/intellij-community,xfournet/intellij-community,vladmm/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,Lekanich/intellij-community,blademainer/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,muntasirsyed/intellij-community,ryano144/intellij-community,kool79/intellij-community,fengbaicanhe/intellij-community,michaelgallacher/intellij-community,ThiagoGarciaAlves/intellij-community,SerCeMan/intellij-community,xfournet/intellij-community,vvv1559/intellij-community,kool79/intellij-community,vvv1559/intellij-community,signed/intellij-community,signed/intellij-community,ahb0327/intellij-community,semonte/intellij-community,nicolargo/intellij-community,suncycheng/intellij-community,samthor/intellij-community,MichaelNedzelsky/intellij-community,da1z/intellij-community,ol-loginov/intellij-community,orekyuu/intellij-community,asedunov/intellij-community,Distrotech/intellij-community,jagguli/intellij-community,signed/intellij-community,allotria/intellij-community,michaelgallacher/intellij-community,hurricup/intellij-community,pwoodworth/intellij-community,Distrotech/intellij-community,akosyakov/intellij-community,slisson/intellij-community,lucafavatella/intellij-community,hurricup/intellij-community,ivan-fedorov/intellij-community,holmes/intellij-community,amith01994/intellij-community,ibinti/intellij-community,vvv1559/intellij-community,fengbaicanhe/intellij-community,kool79/intellij-community,mglukhikh/intellij-community,supersven/intellij-community,hurricup/intellij-community,ryano144/intellij-community,youdonghai/intellij-community,slisson/intellij-community,petteyg/intellij-community,ahb0327/intellij-community,semonte/intellij-community,petteyg/intellij-community,jagguli/intellij-community,michaelgallacher/intellij-community,tmpgit/intellij-community,xfournet/intellij-community,salguarnieri/intellij-community,robovm/robovm-studio,adedayo/intellij-community,jagguli/intellij-community,clumsy/intellij-community,idea4bsd/idea4bsd,fnouama/intellij-community,suncycheng/intellij-community,xfournet/intellij-community,lucafavatella/intellij-community,jagguli/intellij-community,petteyg/intellij-community,mglukhikh/intellij-community,wreckJ/intellij-community,tmpgit/intellij-community,MichaelNedzelsky/intellij-community,TangHao1987/intellij-community,kool79/intellij-community,ThiagoGarciaAlves/intellij-community,Distrotech/intellij-community,ivan-fedorov/intellij-community,salguarnieri/intellij-community,youdonghai/intellij-community,fitermay/intellij-community,holmes/intellij-community,allotria/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,retomerz/intellij-community,allotria/intellij-community,fengbaicanhe/intellij-community,ibinti/intellij-community,xfournet/intellij-community,kdwink/intellij-community,akosyakov/intellij-community,muntasirsyed/intellij-community,suncycheng/intellij-community,semonte/intellij-community,petteyg/intellij-community,fengbaicanhe/intellij-community,apixandru/intellij-community,izonder/intellij-community,jagguli/intellij-community,ftomassetti/intellij-community,orekyuu/intellij-community,dslomov/intellij-community,fitermay/intellij-community,samthor/intellij-community,ahb0327/intellij-community,kdwink/intellij-community,nicolargo/intellij-community,fengbaicanhe/intellij-community,youdonghai/intellij-community,wreckJ/intellij-community,apixandru/intellij-community,da1z/intellij-community,TangHao1987/intellij-community,ftomassetti/intellij-community,slisson/intellij-community,michaelgallacher/intellij-community,holmes/intellij-community,ol-loginov/intellij-community,idea4bsd/idea4bsd,jagguli/intellij-community,kdwink/intellij-community,signed/intellij-community,FHannes/intellij-community,clumsy/intellij-community,adedayo/intellij-community,dslomov/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,suncycheng/intellij-community,FHannes/intellij-community,izonder/intellij-community,signed/intellij-community,akosyakov/intellij-community,lucafavatella/intellij-community,ftomassetti/intellij-community,ryano144/intellij-community,MER-GROUP/intellij-community,vladmm/intellij-community,Distrotech/intellij-community,petteyg/intellij-community,caot/intellij-community,orekyuu/intellij-community,ftomassetti/intellij-community,allotria/intellij-community,MichaelNedzelsky/intellij-community,caot/intellij-community,hurricup/intellij-community,gnuhub/intellij-community,Distrotech/intellij-community,semonte/intellij-community,diorcety/intellij-community,idea4bsd/idea4bsd,asedunov/intellij-community,robovm/robovm-studio,dslomov/intellij-community,robovm/robovm-studio,FHannes/intellij-community,suncycheng/intellij-community,wreckJ/intellij-community,akosyakov/intellij-community,amith01994/intellij-community,muntasirsyed/intellij-community,ol-loginov/intellij-community,holmes/intellij-community,FHannes/intellij-community,orekyuu/intellij-community,caot/intellij-community,robovm/robovm-studio,FHannes/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,gnuhub/intellij-community,ryano144/intellij-community,signed/intellij-community,ryano144/intellij-community,suncycheng/intellij-community,retomerz/intellij-community,hurricup/intellij-community,Distrotech/intellij-community,ol-loginov/intellij-community,ivan-fedorov/intellij-community,michaelgallacher/intellij-community,samthor/intellij-community,lucafavatella/intellij-community,gnuhub/intellij-community,vladmm/intellij-community,ahb0327/intellij-community,asedunov/intellij-community,retomerz/intellij-community,tmpgit/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,mglukhikh/intellij-community,clumsy/intellij-community,signed/intellij-community,suncycheng/intellij-community,idea4bsd/idea4bsd,alphafoobar/intellij-community,blademainer/intellij-community,fnouama/intellij-community,ivan-fedorov/intellij-community,adedayo/intellij-community,adedayo/intellij-community,ryano144/intellij-community,TangHao1987/intellij-community,muntasirsyed/intellij-community,holmes/intellij-community,Distrotech/intellij-community,kool79/intellij-community,da1z/intellij-community,FHannes/intellij-community,ibinti/intellij-community,tmpgit/intellij-community,vladmm/intellij-community,clumsy/intellij-community,nicolargo/intellij-community,kool79/intellij-community,izonder/intellij-community,petteyg/intellij-community,ThiagoGarciaAlves/intellij-community,ivan-fedorov/intellij-community,amith01994/intellij-community,ibinti/intellij-community,alphafoobar/intellij-community,fnouama/intellij-community,wreckJ/intellij-community,MichaelNedzelsky/intellij-community,amith01994/intellij-community,amith01994/intellij-community,idea4bsd/idea4bsd,ibinti/intellij-community,clumsy/intellij-community,samthor/intellij-community,dslomov/intellij-community,SerCeMan/intellij-community,semonte/intellij-community,slisson/intellij-community,vladmm/intellij-community,blademainer/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,youdonghai/intellij-community,gnuhub/intellij-community,caot/intellij-community,Lekanich/intellij-community,jagguli/intellij-community,vvv1559/intellij-community,nicolargo/intellij-community,fitermay/intellij-community,dslomov/intellij-community,semonte/intellij-community,SerCeMan/intellij-community,kdwink/intellij-community,idea4bsd/idea4bsd,caot/intellij-community,slisson/intellij-community,salguarnieri/intellij-community,salguarnieri/intellij-community,muntasirsyed/intellij-community,retomerz/intellij-community,ryano144/intellij-community,orekyuu/intellij-community,apixandru/intellij-community,caot/intellij-community,ryano144/intellij-community,allotria/intellij-community,TangHao1987/intellij-community,fitermay/intellij-community,orekyuu/intellij-community,holmes/intellij-community,clumsy/intellij-community,wreckJ/intellij-community,mglukhikh/intellij-community,salguarnieri/intellij-community,Lekanich/intellij-community,nicolargo/intellij-community,tmpgit/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,asedunov/intellij-community,izonder/intellij-community,fitermay/intellij-community,youdonghai/intellij-community,salguarnieri/intellij-community,blademainer/intellij-community,da1z/intellij-community,vladmm/intellij-community,akosyakov/intellij-community,ryano144/intellij-community,hurricup/intellij-community,fnouama/intellij-community,vvv1559/intellij-community,alphafoobar/intellij-community,diorcety/intellij-community,youdonghai/intellij-community,alphafoobar/intellij-community,Lekanich/intellij-community,MichaelNedzelsky/intellij-community,tmpgit/intellij-community,blademainer/intellij-community,xfournet/intellij-community,Lekanich/intellij-community,lucafavatella/intellij-community,adedayo/intellij-community,pwoodworth/intellij-community,izonder/intellij-community,caot/intellij-community,amith01994/intellij-community,michaelgallacher/intellij-community,ol-loginov/intellij-community,ftomassetti/intellij-community,mglukhikh/intellij-community,lucafavatella/intellij-community,petteyg/intellij-community,petteyg/intellij-community,MER-GROUP/intellij-community,vvv1559/intellij-community,mglukhikh/intellij-community,alphafoobar/intellij-community,wreckJ/intellij-community,SerCeMan/intellij-community,Lekanich/intellij-community,fnouama/intellij-community,muntasirsyed/intellij-community,supersven/intellij-community,fengbaicanhe/intellij-community,ftomassetti/intellij-community,da1z/intellij-community,blademainer/intellij-community,akosyakov/intellij-community,ftomassetti/intellij-community,dslomov/intellij-community,MichaelNedzelsky/intellij-community,ahb0327/intellij-community,supersven/intellij-community,da1z/intellij-community,idea4bsd/idea4bsd,holmes/intellij-community,diorcety/intellij-community,pwoodworth/intellij-community,da1z/intellij-community,supersven/intellij-community,supersven/intellij-community,kool79/intellij-community,hurricup/intellij-community,kool79/intellij-community,gnuhub/intellij-community,signed/intellij-community,clumsy/intellij-community,diorcety/intellij-community,slisson/intellij-community,gnuhub/intellij-community,Lekanich/intellij-community,fengbaicanhe/intellij-community,ol-loginov/intellij-community,TangHao1987/intellij-community,samthor/intellij-community,vladmm/intellij-community,MER-GROUP/intellij-community,wreckJ/intellij-community,clumsy/intellij-community,semonte/intellij-community,ibinti/intellij-community,allotria/intellij-community,suncycheng/intellij-community,nicolargo/intellij-community,TangHao1987/intellij-community,fnouama/intellij-community,allotria/intellij-community,ThiagoGarciaAlves/intellij-community,apixandru/intellij-community,lucafavatella/intellij-community,fnouama/intellij-community,apixandru/intellij-community,tmpgit/intellij-community,dslomov/intellij-community,ibinti/intellij-community,michaelgallacher/intellij-community,allotria/intellij-community,lucafavatella/intellij-community,retomerz/intellij-community,nicolargo/intellij-community,xfournet/intellij-community,retomerz/intellij-community,fitermay/intellij-community,izonder/intellij-community,youdonghai/intellij-community,ivan-fedorov/intellij-community,caot/intellij-community,diorcety/intellij-community,adedayo/intellij-community,mglukhikh/intellij-community,TangHao1987/intellij-community,SerCeMan/intellij-community,muntasirsyed/intellij-community | python/helpers/pydev/pydevd_trace_api.py | python/helpers/pydev/pydevd_trace_api.py | def add_line_breakpoint(plugin, pydb, type, file, line, condition, expression, func_name):
return None
def add_exception_breakpoint(plugin, pydb, type, exception):
return False
def remove_exception_breakpoint(plugin, pydb, type, exception):
return False
def get_breakpoints(plugin, pydb):
return None
def can_not_skip(plugin, pydb, pydb_frame, frame):
return False
def has_exception_breaks(plugin, pydb):
return False
def cmd_step_into(plugin, pydb, frame, event, args, stop_info):
return False
def cmd_step_over(plugin, pydb, frame, event, args, stop_info):
return False
def stop(plugin, pydb, frame, event, args, stop_info, arg, step_cmd):
return False
def get_breakpoint(plugin, pydb, pydb_frame, frame, event, args):
return None
def suspend(plugin, pydb, thread, frame):
return None
def exception_break(plugin, pydb, pydb_frame, frame, args, arg):
return None | apache-2.0 | Python | |
ac39bd6abfaf715ed30f5836cc3021fa87dccae7 | Add tests for dtypes.py | aleju/imgaug,aleju/ImageAugmenter,aleju/imgaug | test/test_dtypes.py | test/test_dtypes.py | from __future__ import print_function, division, absolute_import
import time
import matplotlib
matplotlib.use('Agg') # fix execution of tests involving matplotlib on travis
import numpy as np
from imgaug import dtypes as iadt
def main():
time_start = time.time()
test_copy_dtypes_for_restore()
time_end = time.time()
print("<%s> Finished without errors in %.4fs." % (__file__, time_end - time_start,))
def test_copy_dtypes_for_restore():
# TODO using dtype=np.bool is causing this to fail as it ends up being <type bool> instead of
# <type 'numpy.bool_'>. Any problems from that for the library?
images = [
np.zeros((1, 1, 3), dtype=np.uint8),
np.zeros((10, 16, 3), dtype=np.float32),
np.zeros((20, 10, 6), dtype=np.int32)
]
dtypes_copy = iadt.copy_dtypes_for_restore(images, force_list=False)
assert all([dtype_i.type == dtype_j for dtype_i, dtype_j in zip(dtypes_copy, [np.uint8, np.float32, np.int32])])
dts = [np.uint8, np.float32, np.int32]
for dt in dts:
images = np.zeros((10, 16, 32, 3), dtype=dt)
dtypes_copy = iadt.copy_dtypes_for_restore(images)
assert isinstance(dtypes_copy, np.dtype)
assert dtypes_copy.type == dt
dtypes_copy = iadt.copy_dtypes_for_restore(images, force_list=True)
assert isinstance(dtypes_copy, list)
assert all([dtype_i.type == dt for dtype_i in dtypes_copy])
# TODO remove these tests once a similar test for restore_dtypes_() was added
"""
def test_restore_augmented_image_dtype_():
image = np.zeros((16, 32, 3), dtype=np.uint8)
image_result = iaa.restore_augmented_image_dtype_(image, np.int32)
assert image_result.dtype.type == np.int32
def test_restore_augmented_image_dtype():
image = np.zeros((16, 32, 3), dtype=np.uint8)
image_result = iaa.restore_augmented_image_dtype(image, np.int32)
assert image_result.dtype.type == np.int32
def test_restore_augmented_images_dtypes_():
images = np.zeros((10, 16, 32, 3), dtype=np.int32)
dtypes = iaa.copy_dtypes_for_restore(images)
images = images.astype(np.uint8)
assert images.dtype.type == np.uint8
images_result = iaa.restore_augmented_images_dtypes_(images, dtypes)
assert images_result.dtype.type == np.int32
images = [np.zeros((16, 32, 3), dtype=np.int32) for _ in sm.xrange(10)]
dtypes = iaa.copy_dtypes_for_restore(images)
images = [image.astype(np.uint8) for image in images]
assert all([image.dtype.type == np.uint8 for image in images])
images_result = iaa.restore_augmented_images_dtypes_(images, dtypes)
assert all([image_result.dtype.type == np.int32 for image_result in images_result])
def test_restore_augmented_images_dtypes():
images = np.zeros((10, 16, 32, 3), dtype=np.int32)
dtypes = iaa.copy_dtypes_for_restore(images)
images = images.astype(np.uint8)
assert images.dtype.type == np.uint8
images_restored = iaa.restore_augmented_images_dtypes(images, dtypes)
assert images_restored.dtype.type == np.int32
images = [np.zeros((16, 32, 3), dtype=np.int32) for _ in sm.xrange(10)]
dtypes = iaa.copy_dtypes_for_restore(images)
images = [image.astype(np.uint8) for image in images]
assert all([image.dtype.type == np.uint8 for image in images])
images_restored = iaa.restore_augmented_images_dtypes(images, dtypes)
assert all([image_restored.dtype.type == np.int32 for image_restored in images_restored])
"""
if __name__ == "__main__":
main()
| mit | Python | |
579b73cbeeddbba66e5fa0d9026a8a94036de0b0 | Make sure that CacheDir() works even when timestamp signatures are used. | azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons | test/CacheDir/timestamp.py | test/CacheDir/timestamp.py | #!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that CacheDir() works even when using timestamp signatures.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
SourceSignatures('timestamp')
TargetSignatures('content')
CacheDir('cache')
Command('file.out', 'file.in', Copy('$TARGET', '$SOURCE'))
""")
test.write('file.in', "file.in\n")
test.run()
test.must_match('file.out', "file.in\n")
test.pass_test()
| mit | Python | |
ad96a3c1269f5615ad22b02620e1da7dfc02b9e3 | Add filtering by CIV equivalent width | sbird/fake_spectra,sbird/fake_spectra,sbird/fake_spectra | civspectra.py | civspectra.py | # -*- coding: utf-8 -*-
"""Class to gather and analyse various metal line statistics"""
import numpy as np
import hdfsim
import spectra
class CIVSpectra(spectra.Spectra):
"""Generate metal line spectra from simulation snapshot"""
def __init__(self,num, base, numlos=5000, res = 1., cdir = None, thresh=0.05, savefile="civ_spectra_DLA.hdf5", savedir=None):
#Load halos to push lines through them
f = hdfsim.get_file(num, base, 0)
self.box = f["Header"].attrs["BoxSize"]
f.close()
self.NumLos = numlos
#All through y axis
axis = np.ones(self.NumLos)
#Sightlines at random positions
#Re-seed for repeatability
np.random.seed(23)
cofm = self.get_cofm()
spectra.Spectra.__init__(self,num, base, cofm, axis, res, cdir, savefile=savefile,savedir=savedir,reload_file=True)
if thresh > 0:
self.replace_not_DLA(thresh)
print "Found DLAs"
def get_cofm(self, num = None):
"""Find a bunch more sightlines: should be overriden by child classes"""
if num == None:
num = self.NumLos
cofm = self.box*np.random.random_sample((num,3))
return cofm
def replace_not_DLA(self, thresh=0.05):
"""
Replace those sightlines which do not contain CIV with eq. width > thresh with new sightlines.
Must implement get_cofm for this to work
"""
#Declare variables
found = 0
wanted = self.NumLos
cofm_DLA = np.empty_like(self.cofm)
#Filter
eqw = self.equivalent_width("C",4,1550)
del self.tau[("C",4,1550)]
ind = np.where(eqw > thresh)
#Update saves
top = np.min([wanted, found+np.size(ind)])
cofm_DLA[found:top] = self.cofm[ind][:top,:]
found += np.size(ind)
self.discarded = wanted-np.size(ind)
print "Discarded: ",self.discarded
while found < wanted:
#Get a bunch of new spectra
self.cofm = self.get_cofm()
eqw = self.equivalent_width("C",4,1550)
ind = np.where(eqw > thresh)
del self.tau[("C",4,1550)]
#Update saves
top = np.min([wanted, found+np.size(ind)])
cofm_DLA[found:top] = self.cofm[ind][:top-found,:]
found += np.size(ind)
self.discarded += wanted-np.size(ind)
print "Discarded: ",self.discarded
#Copy back
self.cofm=cofm_DLA
#Finalise the cofm array
self.cofm_final = True
| mit | Python | |
857b962e4a0e2209ff50508783b6856b6dd689d6 | Test object loader | alexandermendes/pybossa-analyst,LibCrowds/libcrowds-analyst,alexandermendes/pybossa-analyst,alexandermendes/pybossa-analyst | test/test_object_loader.py | test/test_object_loader.py | # -*- coding: utf8 -*-
"""Test the object loader module for libcrowds-analyst."""
from libcrowds_analyst import object_loader
class TestObjectLoader(object):
def test_correct_objects_are_loaded(self, result):
"""Test that objects are loaded."""
func = lambda **x: [result]
object_list = object_loader.load(func)
assert object_list == [result]
def test_all_objects_are_loaded(self, result):
"""Test that all objects are loaded."""
long_list = [result]*100
func = lambda **x: long_list
object_list = object_loader.load(func)
assert len(object_list) == 200
| unknown | Python | |
c8fc95cfe4dc8d8d5956b4e1cce994c3350eca8b | add plugins_redis | yixuanzi/blackmoon | plugins/exploit/plugin_redis.py | plugins/exploit/plugin_redis.py | #!/usr/bin/env python
# -*- coding:utf-8 -*-
import socket
import urlparse
from bmplugin import *
info={'desc':"redis unauth vulns and get a shell",
'cve':'',
'link':"https://www.sebug.net/vuldb/ssvid-89715"}
def init_plugin(main):
active=main.maintive
active.regcommand('redis',redis_act,"redis vuln use",__file__)
class redis:
def __init__(self,host=None,port=6379):
if host:
self.host=host
self.port=port
def setaddress(self,host=None,port=None):
if host:
self.host=host
if port:
self.port=port
def verify_unauth(self):
result = {}
payload = '\x2a\x31\x0d\x0a\x24\x34\x0d\x0a\x69\x6e\x66\x6f\x0d\x0a'
s = socket.socket()
socket.setdefaulttimeout(10)
try:
if not lib_func.isip(self.host):
self.host = urlparse.urlparse(self.host).netloc
s.connect((self.host, self.port))
s.send(payload)
recvdata = s.recv(1024)
if recvdata and 'redis_version' in recvdata:
result['vulns']="unauth vuln"
result['VerifyInfo'] = {}
result['VerifyInfo']['URL'] = self.host
result['VerifyInfo']['Port'] = self.port
except:
pass
s.close()
return result
red=redis()
def redis_act(paras):
"""redis [-h host] [-p port] [-v]"""
try:
pd=lib_func.getparasdict(paras,"vh:p:")
if (not pd):
lib_func.printstr("You should input the vaild parameters",1)
lib_func.printstr(redis_act.__doc__)
return
except Exception:
lib_func.printstr(redis_act.__doc__,1)
return
if pd.has_key('h'):
red.setaddress(host=pd['h'])
if pd.has_key('p'):
red.setaddress(port=int(pd['p']))
if pd.has_key('v'):
rs=red.verify_unauth()
print rs | apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.