commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
659af8d29ec1839217c74522d8fa5bcf61e48451 | FIX category | linkitspa/l10n-italy,alessandrocamilli/l10n-italy,maxhome1/l10n-italy,linkitspa/l10n-italy,andrea4ever/l10n-italy,odoo-isa/l10n-italy,abstract-open-solutions/l10n-italy,yvaucher/l10n-italy,scigghia/l10n-italy,OpenCode/l10n-italy,luca-vercelli/l10n-italy,hurrinico/l10n-italy,ApuliaSoftware/l10n-italy,linkitspa/l10n-italy | l10n_it_CEE_balance/__openerp__.py | l10n_it_CEE_balance/__openerp__.py | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Italian OpenERP Community (<http://www.openerp-italia.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Italy - Generic Chart of Accounts",
"version" : "0.1",
"depends" : ['l10n_it',],
"author" : "OpenERP Italian Community",
"description": """
Riclassificazione IV normativa UE per un piano dei conti italiano di un'impresa generica (compreso in l10n_it)
""",
"license": "AGPL-3",
"category" : "Localisation/Italy",
'website': 'http://www.openerp-italia.org/',
'init_xml': [
],
'update_xml': [
'data/account.account.type.csv',
'data/account.account.csv',
'account_view.xml',
],
'demo_xml': [
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2010 Italian OpenERP Community (<http://www.openerp-italia.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name" : "Italy - Generic Chart of Accounts",
"version" : "0.1",
"depends" : ['l10n_it',],
"author" : "OpenERP Italian Community",
"description": """
Riclassificazione IV normativa UE per un piano dei conti italiano di un'impresa generica (compreso in l10n_it)
""",
"license": "AGPL-3",
"category" : "Localisation",
'website': 'http://www.openerp-italia.org/',
'init_xml': [
],
'update_xml': [
'data/account.account.type.csv',
'data/account.account.csv',
'account_view.xml',
],
'demo_xml': [
],
'installable': True,
'active': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | Python |
e920d0dc413dacd2b1084c2cdcacb4bae3bc3d8b | Create problem10.py | Amapolita/MITx--6.00.1x- | W2/L4/problem10.py | W2/L4/problem10.py | #L4 PROBLEM 10
def isVowel(char):
if char == 'a' or char == 'e' or char == 'i' or char == 'o' or char == 'u' or char == 'A' or char == 'E' or char == 'I' or char == 'O' or char == 'U':
return True
else:
return False
| unlicense | Python | |
799599e1b841db8c9eff9de4cdf7dbc5cb396c37 | Create dml.py | sheabrown/faraday_complexity | dml.py | dml.py | dml
| mit | Python | |
349e63385ea71d1a69e094aeee1268c44e951b53 | Update code funny | Murillo/Hackerrank-Algorithms | Strings/funny-string.py | Strings/funny-string.py | # Funny String
# Developer: Murillo Grubler
# Link: https://www.hackerrank.com/challenges/funny-string/problem
import string
# ca - xz
# xc - cx
# zx - ac
#acxz - 3
#acxza
def funnyString(s):
str_inverted = s[::-1]
for i in range(len(s) - 1):
print(s[i])
return ""
q = int(input().strip())
for a0 in range(q):
print(funnyString(input().strip()))
| mit | Python | |
4091fbf0051d022730b11da6d6786a25c38a6dd3 | Add a simple audio recorder. Note that this would normally not work on a raspberry pi as it has no audio in. You'll have to add an usb microphone for this to work. | bramvonk/blind-dialer | src/util/audiorecorder.py | src/util/audiorecorder.py | # simple audio recorder... source: http://stackoverflow.com/questions/892199/detect-record-audio-in-python
# you need pyaudio, install with python -m pip install pyaudio
from sys import byteorder
from array import array
from struct import pack
import pyaudio
import wave
THRESHOLD = 500
CHUNK_SIZE = 1024
FORMAT = pyaudio.paInt16
RATE = 44100
def is_silent(snd_data):
"Returns 'True' if below the 'silent' threshold"
return max(snd_data) < THRESHOLD
def normalize(snd_data):
"Average the volume out"
MAXIMUM = 16384
times = float(MAXIMUM)/max(abs(i) for i in snd_data)
r = array('h')
for i in snd_data:
r.append(int(i*times))
return r
def trim(snd_data):
"Trim the blank spots at the start and end"
def _trim(snd_data):
snd_started = False
r = array('h')
for i in snd_data:
if not snd_started and abs(i)>THRESHOLD:
snd_started = True
r.append(i)
elif snd_started:
r.append(i)
return r
# Trim to the left
snd_data = _trim(snd_data)
# Trim to the right
snd_data.reverse()
snd_data = _trim(snd_data)
snd_data.reverse()
return snd_data
def add_silence(snd_data, seconds):
"Add silence to the start and end of 'snd_data' of length 'seconds' (float)"
r = array('h', [0 for i in xrange(int(seconds*RATE))])
r.extend(snd_data)
r.extend([0 for i in xrange(int(seconds*RATE))])
return r
def record():
"""
Record a word or words from the microphone and
return the data as an array of signed shorts.
Normalizes the audio, trims silence from the
start and end, and pads with 0.5 seconds of
blank sound to make sure VLC et al can play
it without getting chopped off.
"""
p = pyaudio.PyAudio()
stream = p.open(format=FORMAT, channels=1, rate=RATE,
input=True, output=True,
frames_per_buffer=CHUNK_SIZE)
num_silent = 0
snd_started = False
r = array('h')
while 1:
# little endian, signed short
snd_data = array('h', stream.read(CHUNK_SIZE))
if byteorder == 'big':
snd_data.byteswap()
r.extend(snd_data)
silent = is_silent(snd_data)
if silent and snd_started:
num_silent += 1
elif not silent and not snd_started:
snd_started = True
if snd_started and num_silent > 30:
break
sample_width = p.get_sample_size(FORMAT)
stream.stop_stream()
stream.close()
p.terminate()
r = normalize(r)
r = trim(r)
r = add_silence(r, 0.5)
return sample_width, r
def record_to_file(path):
"Records from the microphone and outputs the resulting data to 'path'"
sample_width, data = record()
data = pack('<' + ('h'*len(data)), *data)
wf = wave.open(path, 'wb')
wf.setnchannels(1)
wf.setsampwidth(sample_width)
wf.setframerate(RATE)
wf.writeframes(data)
wf.close()
if __name__ == '__main__':
print("please speak a word into the microphone")
record_to_file('demo.wav')
print("done - result written to demo.wav") | mit | Python | |
45b0e958aa377afed2c62bf1e6f7c4933ccde39b | Add a test for main | robbie-c/git-lang-guesser | test/test_main.py | test/test_main.py | from git_lang_guesser import main
from git_lang_guesser import git_requester
LANGUAGE = "language"
test_username = "TestUser"
example_data = [
{LANGUAGE: "HTML"},
{LANGUAGE: "Java"},
{LANGUAGE: "Python"},
{LANGUAGE: "Python"},
{LANGUAGE: "C"},
]
expected_count = {
"HTML": 1,
"Java": 1,
"Python": 2,
"C": 1,
}
expected_favourite = "Python"
class TestDoGuess(object):
def test_basic(self, monkeypatch, capsys):
"""Test that basic usage works"""
def mock_request(username):
assert(username == test_username)
return example_data
monkeypatch.setattr(git_requester, "get_public_repos_for_user", mock_request)
main.do_guess(username=test_username, list_all=False)
out, err = capsys.readouterr()
assert(out.strip() == expected_favourite)
| mit | Python | |
2dc94daed7c0475e84f829d48d97afc63f2e803d | add type_vec4 test | mackst/glm | test/type_vec4.py | test/type_vec4.py | # -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2014 mack stone
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import unittest
from glm.type_vec4 import Vec4
class TestVec4(unittest.TestCase):
def test_initialization(self):
# none argument init
v = Vec4()
msg = 'none argument initialzation failure'
self.assertEqual(v.x, 0.0, msg)
self.assertEqual(v.y, 0.0, msg)
self.assertEqual(v.z, 0.0, msg)
self.assertEqual(v.w, 0.0, msg)
# init with key words
v = Vec4(x=1.)
msg = 'key word initialzation failure'
self.assertEqual(v.x, 1.0, msg)
self.assertEqual(v.y, 0.0, msg)
self.assertEqual(v.z, 0.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4(y=5., x=2.)
self.assertEqual(v.x, 2., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, 0.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4(y=3., z=2., x=5.)
self.assertEqual(v.x, 5., msg)
self.assertEqual(v.y, 3., msg)
self.assertEqual(v.z, 2.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4(x=1., y=5., z=2., w=.5)
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, 2.0, msg)
self.assertEqual(v.w, .5, msg)
# init with float argument
v = Vec4(1.)
self.assertEqual((v.x, v.y, v.z, v.w), (1., 1., 1., 1.), 'float argument init failure')
# init with list or tuple
v = Vec4([1., 5.])
msg = 'list init failure'
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, 0.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4([1., 5., -2.])
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, -2.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4([1., 5., -2., .5])
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, -2.0, msg)
self.assertEqual(v.w, 0.5, msg)
v = Vec4((1., 5.))
msg = 'tuple init failure'
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, 0.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4((1., 5., -2.))
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, -2.0, msg)
self.assertEqual(v.w, 0.0, msg)
v = Vec4((1., 5., -2., .5))
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, -2.0, msg)
self.assertEqual(v.w, 0.5, msg)
msg = 'Vec4 init failure'
bv = Vec4(1., 5., -2., .5)
v = Vec4(bv)
self.assertEqual(v.x, 1., msg)
self.assertEqual(v.y, 5., msg)
self.assertEqual(v.z, -2.0, msg)
self.assertEqual(v.w, 0.5, msg)
if __name__ == '__main__':
unittest.main()
| mit | Python | |
c80a015151fb6648aa34e7b79cd29f4cd2c97560 | add a example | mongodb/mongo-disco,johntut/MongoDisco,sajal/MongoDisco,10genNYUITP/MongoDisco | test_disco_job.py | test_disco_job.py | from DiscoJob import DiscoJob
import logging
config = {
"split_size": 1, #MB
"input_uri": "mongodb://localhost/test.modforty",
"create_input_splits": True,
"split_key": {'_id' : 1},
"output_uri":"mongodb://localhost/test.out",
#"job_output_key":"I am the key",
"job_output_value":"I ame the value",
"job_wait":True
}
def map(record, params):
yield record.get('name', "NoName"), 1
def reduce(iter, params):
from disco.util import kvgroup
for word, counts in kvgroup(sorted(iter)):
yield word, sum(counts)
if __name__ == '__main__':
'''
job = Job().run(
#input=["mongodb://localhost/test.modforty"],
input= do_split(config),
map=map,
reduce=reduce,
map_input_stream = mongodb_input_stream,
reduce_output_stream=mongodb_output_stream)
job.wait(show=True)
'''
DiscoJob(config = config,map = map,reduce = reduce).run()
| apache-2.0 | Python | |
ee78590d2a6f0a509b08bf1b59b3f27560375524 | add conftest | CamDavidsonPilon/lifetimes,statwonk/lifetimes,aprotopopov/lifetimes,luke14free/lifetimes | tests/conftest.py | tests/conftest.py | from __future__ import print_function
import numpy as np
def pytest_runtest_setup(item):
seed = np.random.randint(1000)
print("Seed used in np.random.seed(): %d" % seed)
np.random.seed(seed)
| mit | Python | |
e8853997b7ba28da48e1620fd1466fbe8ca1d0c0 | Add setup.py | plajjan/pyarrfs | setup.py | setup.py | #!/usr/bin/env python
from distutils.core import setup
from pyarrfs import pyarrfs
ver = pyarrfs.__version__
long_desc = open("README").read()
short_desc = long_desc.split('\n')[0].split(' - ')[1].strip()
setup(
name = 'pyarrfs',
version = pyarrfs.__version__,
description = short_desc,
long_description = long_desc,
author = pyarrfs.__author__,
license = pyarrfs.__license__,
author_email = pyarrfs.__author_email__,
url = pyarrfs.__url__,
scripts = ['pyarrfs'],
keywords = ['rar', 'fuse'],
classifiers = [
'Development Status :: 4 - Beta',
'Intended Audience :: End Users/Desktop',
'Intended Audience :: System Administrators',
'License :: OSI Approved :: MIT License',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python :: 2',
'Topic :: System :: Archiving :: Compression',
'Topic :: System :: Filesystems'
]
)
| mit | Python | |
b0ada080d8c8890152a57168e6b7b449a5588f10 | Add setup.py for PyPI | kyleconroy/clocktower | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
setup(
name='clocktower',
version='0.1.0',
author='Kyle Conroy',
author_email='kyle@twilio.com',
url='https://github.com/derferman/clocktower',
description='Download websites from Wayback Machine',
install_requires=['lxml'],
data_files=[],
classifiers=[
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
],
entry_points={
'console_scripts': [
'clocktower = clocktower:main']
},
)
| mit | Python | |
9c6ba2717fb71755d31d8c7b7066730171be8b20 | bump version, add missing dependency - Django | bradleyg/django-ajaximage,subhaoi/kioskuser,subhaoi/kioskuser,bradleyg/django-ajaximage,bradleyg/django-ajaximage,subhaoi/kioskuser | setup.py | setup.py | import os
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
setup(
name='django-ajaximage',
version='0.1.16-rohanza',
description='Add ajax image upload functionality with a progress bar to file input fields within Django admin. Images are optionally resized.',
long_description=readme,
author="Bradley Griffiths",
author_email='bradley.griffiths@gmail.com',
url='https://github.com/bradleyg/django-ajaximage',
packages=['ajaximage'],
include_package_data=True,
install_requires=[
'Django',
'Pillow',
],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| import os
from setuptools import setup
f = open(os.path.join(os.path.dirname(__file__), 'README.md'))
readme = f.read()
f.close()
setup(
name='django-ajaximage',
version='0.1.16',
description='Add ajax image upload functionality with a progress bar to file input fields within Django admin. Images are optionally resized.',
long_description=readme,
author="Bradley Griffiths",
author_email='bradley.griffiths@gmail.com',
url='https://github.com/bradleyg/django-ajaximage',
packages=['ajaximage'],
include_package_data=True,
install_requires=['setuptools', 'pillow'],
zip_safe=False,
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
)
| mit | Python |
fac7c76ea056179653574dffd297f927f76daa42 | add setuptools routine | bertmcmeyer/opti_ssr,fietew/opti_ssr | setup.py | setup.py | import os
from setuptools import setup
setup(
name = "opti_ssr",
version = "0.0.4",
py_modules=['opti_ssr', 'ssr_network', 'opti_network'],
author = "Felix Immohr, Fiete Winter",
author_email = "test@te.st, fiete.winter@gmail.com",
description = ("Using the OptiTrack system for different applications "
"of the SoundScape Renderer"),
license = "MIT",
keywords = "optitrack motive natnet ssr soundscaperenderer".split(),
url = "",
long_description=open('README').read(),
platforms='any',
classifiers=[
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Scientific/Engineering",
],
)
| mit | Python | |
230cae4f6cce8e064b5b74f87ec09181e41f57c2 | Add MDR setup file | EpistasisLab/scikit-mdr,EpistasisLab/scikit-mdr | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
def calculate_version():
initpy = open('mdr/_version.py').read().split('\n')
version = list(filter(lambda x: '__version__' in x, initpy))[0].split('\'')[1]
return version
package_version = calculate_version()
setup(
name='scikit-MDR',
version=package_version,
author='Randal S. Olson',
author_email='rso@randalolson.com',
packages=find_packages(),
url='https://github.com/rhiever/scikit-mdr',
license='License :: OSI Approved :: MIT License',
#entry_points={'console_scripts': ['mdr=mdr:main', ]},
description=('A sklearn-compatible Python implementation of Multifactor Dimensionality Reduction (MDR) for feature construction.'),
long_description='''
A sklearn-compatible Python implementation of Multifactor Dimensionality Reduction (MDR) for feature construction.
Contact
=============
If you have any questions or comments about scikit-MDR, please feel free to contact me via:
E-mail: rso@randalolson.com
or Twitter: https://twitter.com/randal_olson
This project is hosted at https://github.com/rhiever/scikit-mdr
''',
zip_safe=True,
install_requires=['numpy', 'scipy', 'pandas', 'scikit-learn'],
classifiers=[
'Intended Audience :: Science/Research',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords=['bioinformatics', 'GWAS', 'feature construction', 'single nucleotide polymorphisms', 'epistasis', 'dimesionality reduction'],
)
| mit | Python | |
612517b15a40af29fa57d3a5507ba778ff32fa51 | Add setup.py | HearSys/pattern_finder_gpu,HearSys/pattern_finder_gpu | setup.py | setup.py | from setuptools import setup
setup(name='pattern_finder_gpu',
version='1.0',
description='Brute force OpenCL based pattern localization in images that supports masking and weighting.',
url='https://github.com/HearSys/pattern_finder_gpu',
author='Samuel John (HörSys GmbH)',
author_email='john.samuel@hoersys.de',
license='MIT',
packages=['pattern_finder_gpu'],
install_requires=['pyopencl', 'numpy', 'scipy', 'matplotlib', 'skimage'],
zip_safe=False)
| mit | Python | |
380fb1e3b5b8fbde868f7fffbe0a6f22fc037e55 | Remove download_url from setup.py | bsipocz/astropy-helpers,bsipocz/astropy-helpers,larrybradley/astropy-helpers,Cadair/astropy-helpers,bsipocz/astropy-helpers,larrybradley/astropy-helpers,astropy/astropy-helpers,larrybradley/astropy-helpers,dpshelio/astropy-helpers,dpshelio/astropy-helpers,astropy/astropy-helpers,Cadair/astropy-helpers | setup.py | setup.py | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import ah_bootstrap
import pkg_resources
from setuptools import setup
from astropy_helpers.setup_helpers import register_commands, get_package_info
from astropy_helpers.version_helpers import generate_version_py
NAME = 'astropy_helpers'
VERSION = '2.0.dev'
RELEASE = 'dev' not in VERSION
generate_version_py(NAME, VERSION, RELEASE, False, uses_git=not RELEASE)
# Use the updated version including the git rev count
from astropy_helpers.version import version as VERSION
cmdclass = register_commands(NAME, VERSION, RELEASE)
# This package actually doesn't use the Astropy test command
del cmdclass['test']
setup(
name=pkg_resources.safe_name(NAME), # astropy_helpers -> astropy-helpers
version=VERSION,
description='Utilities for building and installing Astropy, Astropy '
'affiliated packages, and their respective documentation.',
author='The Astropy Developers',
author_email='astropy.team@gmail.com',
license='BSD',
url=' https://github.com/astropy/astropy-helpers',
long_description=open('README.rst').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Framework :: Setuptools Plugin',
'Framework :: Sphinx :: Extension',
'Framework :: Sphinx :: Theme',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Archiving :: Packaging'
],
cmdclass=cmdclass,
zip_safe=False,
**get_package_info(exclude=['astropy_helpers.tests'])
)
| #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import ah_bootstrap
import pkg_resources
from setuptools import setup
from astropy_helpers.setup_helpers import register_commands, get_package_info
from astropy_helpers.version_helpers import generate_version_py
NAME = 'astropy_helpers'
VERSION = '2.0.dev'
RELEASE = 'dev' not in VERSION
DOWNLOAD_BASE_URL = 'http://pypi.io/packages/source/a/astropy-helpers'
generate_version_py(NAME, VERSION, RELEASE, False, uses_git=not RELEASE)
# Use the updated version including the git rev count
from astropy_helpers.version import version as VERSION
cmdclass = register_commands(NAME, VERSION, RELEASE)
# This package actually doesn't use the Astropy test command
del cmdclass['test']
setup(
name=pkg_resources.safe_name(NAME), # astropy_helpers -> astropy-helpers
version=VERSION,
description='Utilities for building and installing Astropy, Astropy '
'affiliated packages, and their respective documentation.',
author='The Astropy Developers',
author_email='astropy.team@gmail.com',
license='BSD',
url=' https://github.com/astropy/astropy-helpers',
long_description=open('README.rst').read(),
download_url='{0}/astropy-helpers-{1}.tar.gz'.format(DOWNLOAD_BASE_URL,
VERSION),
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Framework :: Setuptools Plugin',
'Framework :: Sphinx :: Extension',
'Framework :: Sphinx :: Theme',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Topic :: Software Development :: Build Tools',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: System :: Archiving :: Packaging'
],
cmdclass=cmdclass,
zip_safe=False,
**get_package_info(exclude=['astropy_helpers.tests'])
)
| bsd-3-clause | Python |
a9f0d310e967bea276f01cd558f48fc102ea24fc | add setup file for installation as module | JamesJeffryes/MINE-Database | setup.py | setup.py | from setuptools import setup
setup(name='minedatabase',
version='0.1',
description='Metabolic In silico Network Expansions',
url='http://github.com/JamesJeffryes/mine-database',
author='James Jeffryes',
author_email='jamesgjeffryes@gmail.com',
license='MIT',
packages=['minedatabase',
'minedatabase.NP_Score'],
install_requires=['pymongo'],
extras_require={},
)
| mit | Python | |
77700d265fbf3fafa21b335585b8060a0b025143 | add setup.py | indygreg/lua-protobuf,mickem/lua-protobuf | setup.py | setup.py | # Copyright 2010 Gregory Szorc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup, find_packages
setup(
name = 'lua-protobuf',
version = '0.0.1',
packages = [ 'lua_protobuf' ],
scripts = ['protoc-gen-lua'],
install_requires = [ 'protobuf>=2.3.0' ],
author = 'Gregory Szorc',
author_email = 'gregory.szorc@gmail.com',
description = 'Lua protocol buffer code generator',
license = 'Apache 2.0',
url = 'http://github.com/indygreg/lua-protobuf'
)
| apache-2.0 | Python | |
092bf8bc2e558420ca51384a3dd1019ab1115ad2 | Fix conditional dependencies when using wheels | grigno/djangocms-link,brente/djangocms-link,addgene/djangocms-link,garmoncheg/djangocms-link,yakky/djangocms-link,yakky/djangocms-link,brente/djangocms-link,addgene/djangocms-link,grigno/djangocms-link,garmoncheg/djangocms-link | setup.py | setup.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from djangocms_link import __version__
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
]
setup(
name='djangocms-link',
version=__version__,
description='Link Plugin for django CMS',
author='Divio AG',
author_email='info@divio.ch',
url='https://github.com/divio/djangocms-link',
packages=['djangocms_link', 'djangocms_link.migrations', 'djangocms_link.migrations_django'],
install_requires=[],
extras_require={
":python_version=='3.3'": ['django-select2-py3'],
":python_version=='3.4'": ['django-select2-py3'],
":python_version=='2.6'": ['django-select2'],
":python_version=='2.7'": ['django-select2'],
},
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.md').read(),
include_package_data=True,
zip_safe=False
)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
from setuptools import setup
from djangocms_link import __version__
INSTALL_REQUIRES = [
#'Django-Select2',
]
CLASSIFIERS = [
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Communications',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content :: Message Boards',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
]
setup(
name='djangocms-link',
version=__version__,
description='Link Plugin for django CMS',
author='Divio AG',
author_email='info@divio.ch',
url='https://github.com/divio/djangocms-link',
packages=['djangocms_link', 'djangocms_link.migrations', 'djangocms_link.migrations_django'],
install_requires=INSTALL_REQUIRES,
license='LICENSE.txt',
platforms=['OS Independent'],
classifiers=CLASSIFIERS,
long_description=open('README.md').read(),
include_package_data=True,
zip_safe=False
)
| bsd-3-clause | Python |
a9e2e225e083575c66c33986db06b496c1596449 | Create setup.py | Bachmann1234/weight-watchers-sync | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name='wwsync',
version='1.0',
author='Matt Bachmann',
url='https://github.com/Bachmann1234/weight-watchers-sync',
description='Syncs Weight Watcher food log to Fitbit',
license='MIT',
packages=find_packages(),
install_requires=['requests==2.9.1']
)
| mit | Python | |
265e1b6177552e13f332b1f39885433789f27d94 | add setup.py | brysontyrrell/Jook | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
import re
with open('jook/__init__.py', 'r') as fobj:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]',
fobj.read(), re.MULTILINE).group(1)
with open('README.rst', 'r') as fobj:
long_description = fobj.read()
setup(
name='Jook',
version=version,
description='A Jamf Pro webhook simulator',
long_description=long_description,
url='https://github.com/brysontyrrell/Jook',
author='Bryson Tyrrell',
author_email='bryson.tyrrell@gmail.com',
license='MIT',
classifiers=[
'Development Status :: 1 - Planning',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7'
],
keywords='jamf webhooks testing',
packages=['jook'],
install_requires=[
'dicttoxml>=1.7',
'requests>=2.11'
],
zip_safe=False
)
| mit | Python | |
90faa98cf8d0a11e8cfd1ff1b91b505a43f956c5 | Add setup.py | Jwuthri/Mozinor | setup.py | setup.py | """Module to prepare you python environment."""
import importlib
def run_package(package):
"""Try to run the package just installed."""
try:
globals()[package] = importlib.import_module(package)
print("================================")
except Exception:
print("We can't install {}".format(package))
print("================================")
def install_and_import(package):
"""Install all requiered packages."""
print("checking for {}".format(str(package)))
try:
importlib.import_module(package)
print("{} is already installed".format(package))
except ImportError:
print("We'll install {} before continuing".format(package))
import pip
pip.main(['--trusted-host', 'pypi.python.org', 'install', package])
print("installing {}...".format(package))
finally:
run_package(package)
def get_all_packages():
""""Get all packages in requirement.txt."""
lst_packages = list()
with open('requirement.txt') as fp:
for line in fp:
lst_packages.append(line.split("=")[0].lower())
return lst_packages
if __name__ == '__main__':
lst_install_requires = get_all_packages()
for module in lst_install_requires:
install_and_import(module)
print('You are ready to use the module')
| mit | Python | |
1bf649207f850b21e99eb6a9479ecdb1cb03a93d | Update version to 0.7.1 | Dreamsolution/django-auth-policy,mcella/django-auth-policy,Dreamsolution/django-auth-policy,mcella/django-auth-policy | setup.py | setup.py | #!/usr/bin/env python
from setuptools import setup
install_requires = ['Django>=1.5']
try:
from collections import OrderedDict
except ImportError:
install_requires.append('ordereddict>=1.1')
setup(
name='django-auth-policy',
version='0.7.1',
description='Enforces a couple of common authentication policies for the '
'Django web framework.',
author='Fox-IT B.V.',
author_email='fox@fox-it.com',
maintainer='Rudolph Froger',
maintainer_email='rudolphfroger@estrate.nl',
url='https://github.com/rudolphfroger/django-auth-policy',
license='BSD',
packages=['django_auth_policy'],
package_data={'django_auth_policy': ['locale/*/LC_MESSAGES/*.mo',
'locale/*/LC_MESSAGES/*.po']},
install_requires=install_requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Topic :: Security',
'Topic :: Internet :: WWW/HTTP :: Session',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| #!/usr/bin/env python
from setuptools import setup
install_requires = ['Django>=1.5']
try:
from collections import OrderedDict
except ImportError:
install_requires.append('ordereddict>=1.1')
setup(
name='django-auth-policy',
version='0.7',
description='Enforces a couple of common authentication policies for the '
'Django web framework.',
author='Fox-IT B.V.',
author_email='fox@fox-it.com',
maintainer='Rudolph Froger',
maintainer_email='rudolphfroger@estrate.nl',
url='https://github.com/rudolphfroger/django-auth-policy',
license='BSD',
packages=['django_auth_policy'],
package_data={'django_auth_policy': ['locale/*/LC_MESSAGES/*.mo',
'locale/*/LC_MESSAGES/*.po']},
install_requires=install_requires,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Framework :: Django',
'Topic :: Security',
'Topic :: Internet :: WWW/HTTP :: Session',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| bsd-3-clause | Python |
b37b2976398f52032379c916714e4b6360614e78 | create packaging file | mkolodny/3taps | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
if sys.argv[-1] == 'publish':
os.system('python setup.py sdist upload')
sys.exit()
with open('README.rst') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='threetaps',
version=threetaps.version,
description='3taps Python API Client.',
long_description=readme,
author='Michael Kolodny',
packages=['threetaps'],
license=license,
)
| mit | Python | |
18688b67e2dfc36ff6b1c28a618a289f46cc494d | Add Slide class | cferwin/Command-Line-Repetition | slide.py | slide.py | class Slide:
""" Stores data to be studied. """
def __init__(self, prompt, answer):
self.prompt = prompt
self.answer = answer
| mit | Python | |
b418ff779c79afd0eca85ed1479ba633f25ce73c | Fix variable referenced before assginment in vmwareapi code. | n0ano/ganttclient | nova/tests/test_vmwareapi_vm_util.py | nova/tests/test_vmwareapi_vm_util.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright 2013 Canonical Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import exception
from nova import test
from nova.virt.vmwareapi import fake
from nova.virt.vmwareapi import vm_util
class fake_session(object):
def __init__(self, ret=None):
self.ret = ret
def _call_method(self, *args):
return self.ret
class VMwareVMUtilTestCase(test.TestCase):
def setUp(self):
super(VMwareVMUtilTestCase, self).setUp()
def tearDown(self):
super(VMwareVMUtilTestCase, self).tearDown()
def test_get_datastore_ref_and_name(self):
result = vm_util.get_datastore_ref_and_name(
fake_session([fake.Datastore()]))
self.assertEquals(result[1], "fake-ds")
self.assertEquals(result[2], 1024 * 1024 * 1024)
self.assertEquals(result[3], 1024 * 1024 * 500)
def test_get_datastore_ref_and_name_without_datastore(self):
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), host="fake-host")
self.assertRaises(exception.DatastoreNotFound,
vm_util.get_datastore_ref_and_name,
fake_session(), cluster="fake-cluster")
| apache-2.0 | Python | |
c3f1e723e13598deb53a8454787204f00841c34a | Add extension method type | seibert/numba,seibert/numba,stefanseefeld/numba,pitrou/numba,numba/numba,numba/numba,gmarkall/numba,pombredanne/numba,stonebig/numba,gdementen/numba,cpcloud/numba,gdementen/numba,seibert/numba,sklam/numba,gdementen/numba,stonebig/numba,stuartarchibald/numba,ssarangi/numba,pitrou/numba,shiquanwang/numba,stonebig/numba,pombredanne/numba,ssarangi/numba,jriehl/numba,cpcloud/numba,stonebig/numba,stefanseefeld/numba,sklam/numba,pombredanne/numba,gdementen/numba,cpcloud/numba,pitrou/numba,stefanseefeld/numba,stuartarchibald/numba,pitrou/numba,stefanseefeld/numba,stuartarchibald/numba,GaZ3ll3/numba,sklam/numba,GaZ3ll3/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,jriehl/numba,jriehl/numba,shiquanwang/numba,jriehl/numba,IntelLabs/numba,ssarangi/numba,IntelLabs/numba,gmarkall/numba,gdementen/numba,numba/numba,pombredanne/numba,GaZ3ll3/numba,stuartarchibald/numba,sklam/numba,gmarkall/numba,cpcloud/numba,stefanseefeld/numba,gmarkall/numba,GaZ3ll3/numba,seibert/numba,gmarkall/numba,seibert/numba,IntelLabs/numba,pitrou/numba,ssarangi/numba,IntelLabs/numba,stonebig/numba,cpcloud/numba,GaZ3ll3/numba,shiquanwang/numba,sklam/numba,pombredanne/numba,jriehl/numba,numba/numba,ssarangi/numba | numba/typesystem/exttypes/methods.py | numba/typesystem/exttypes/methods.py |
from numba.typesystem import *
#------------------------------------------------------------------------
# Extension Method Types
#------------------------------------------------------------------------
class ExtMethodType(NumbaType, minitypes.FunctionType):
"""
Extension method type, a FunctionType plus the following fields:
is_class: is classmethod?
is_static: is staticmethod?
"""
def __init__(self, return_type, args, name=None,
is_class=False, is_static=False, **kwds):
super(ExtMethodType, self).__init__(return_type, args, name, **kwds)
self.is_class = is_class
self.is_static = is_static | bsd-2-clause | Python | |
b41206dd44948a4a99f0f3fabb3ffc04f0d781a1 | Create spatial_pooler_compute_test.py | subutai/nupic.research,subutai/nupic.research,numenta/nupic.research,numenta/nupic.research | packages/spatial_pooler/tests/spatial_pooler_compute_test.py | packages/spatial_pooler/tests/spatial_pooler_compute_test.py | # ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2022, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import numpy as np
import time
import unittest
from nupic.research.frameworks.spatial_pooler import SpatialPooler
real_type = np.float32
uint_type = np.uint32
class SpatialPoolerComputeTest(unittest.TestCase):
'''
end-to-end test of the compute function
'''
def basic_compute_loop(self, sp, input_size, minicolumn_dims):
'''
feed in some vectors and retrieve outputs. ensure the right number of minicolumns win and that we always get binary outputs.
'''
num_records = 100
generator = np.random.default_rng()
input_matrix = (generator.random((num_records, input_size)) > 0.8).astype(uint_type)
y = np.zeros(minicolumn_dims, dtype=uint_type)
# with learning
for v in input_matrix:
y.fill(0)
sp.compute(v, True, y)
self.assertEqual(sp.num_active_minicolumns_per_inh_area, y.sum())
self.assertEqual(0, y.min())
self.assertEqual(1, y.max())
# without learning
for v in input_matrix:
y.fill(0)
sp.compute(v, False, y)
self.assertEqual(sp.num_active_minicolumns_per_inh_area, y.sum())
self.assertEqual(0, y.min())
self.assertEqual(1, y.max())
def test_basic_compute1(self):
'''
run basic_compute_loop with mostly default parameters.
'''
input_size = 30
minicolumn_dims = 50
sp = SpatialPooler(
input_dims=[input_size],
minicolumn_dims=[minicolumn_dims],
num_active_minicolumns_per_inh_area=10,
local_density=-1,
potential_radius=input_size,
potential_percent=0.5,
global_inhibition=True,
stimulus_threshold=0.0,
synapse_perm_inc=0.05,
synapse_perm_dec=0.008,
synapse_perm_connected=0.1,
min_percent_overlap_duty_cycles=0.001,
duty_cycle_period=1000,
boost_strength=0.0,
seed=int((time.time() % 10000)*10)
)
print('test_basic_compute1, SP seed set to:', sp.seed)
self.basic_compute_loop(sp, input_size, minicolumn_dims)
def test_basic_compute2(self):
'''
run basic_compute_loop with learning turned off.
'''
input_size = 100
minicolumn_dims = 100
sp = SpatialPooler(
input_dims=[input_size],
minicolumn_dims=[minicolumn_dims],
num_active_minicolumns_per_inh_area=10,
local_density=-1,
potential_radius=input_size,
potential_percent=0.5,
global_inhibition=True,
stimulus_threshold=0.0,
synapse_perm_inc=0.0,
synapse_perm_dec=0.0,
synapse_perm_connected=0.1,
min_percent_overlap_duty_cycles=0.001,
duty_cycle_period=1000,
boost_strength=0.0,
seed=int((time.time() % 10000)*10)
)
print('test_basic_compute2, SP seed set to:', sp.seed)
self.basic_compute_loop(sp, input_size, minicolumn_dims)
if __name__ == '__main__':
unittest.main() | agpl-3.0 | Python | |
7162926576b6136c17a4f1d889d7ecd2541a763c | Add examples_plot_features.py | mgeplf/NeuroM,liesbethvanherpe/NeuroM,lidakanari/NeuroM,juanchopanza/NeuroM,wizmer/NeuroM,BlueBrain/NeuroM,eleftherioszisis/NeuroM | examples/plot_features.py | examples/plot_features.py | #!/usr/bin/env python
# Copyright (c) 2015, Ecole Polytechnique Federale de Lausanne, Blue Brain Project
# All rights reserved.
#
# This file is part of NeuroM <https://github.com/BlueBrain/NeuroM>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
'''Plot a selection of features from a morphology population'''
from neurom import ezy
from neurom.analysis import morphtree as mt
from collections import defaultdict
import json
import matplotlib.pyplot as plt
nrns = ezy.load_neurons('../morphsyn/Synthesizer/build/L23MC/')
sim_params = json.load(open('../morphsyn/Synthesizer/data/L23MC.json'))
NEURITES_ = (ezy.TreeType.axon,
ezy.TreeType.apical_dendrite,
ezy.TreeType.basal_dendrite)
GET_FEATURE = {
'trunk_azimuth': lambda nrn, typ: [mt.trunk_azimuth(n, nrn.soma)
for n in nrn.neurites if n.type == typ],
'trunk_elevation': lambda nrn, typ: [mt.trunk_elevation(n, nrn.soma)
for n in nrn.neurites if n.type == typ]
}
FEATURES = GET_FEATURE.keys()
stuff = defaultdict(lambda: defaultdict(list))
# unpack data into arrays
for nrn in nrns:
for t in NEURITES_:
for feat in FEATURES:
stuff[feat][str(t).split('.')[1]].extend(
GET_FEATURE[feat](nrn, t)
)
# Then access the arrays of azimuths with tr_azimuth[key]
# where the keys are string representations of the tree types.
for feat, d in stuff.iteritems():
for typ, data in d.iteritems():
print typ, feat
print 'Params:', sim_params['components'][typ][feat]
num_bins = 100
n, bins, patches = plt.hist(data, num_bins, normed=1, facecolor='green', alpha=0.5)
plt.show()
| bsd-3-clause | Python | |
ebceea82af38e4c7f11678841bfbce3635a66f7d | Add twitter_parser.py | chengdujin/newsman,chengdujin/newsman,chengdujin/newsman | newsman/scraper/twitter_parser.py | newsman/scraper/twitter_parser.py | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
Twitter parser parses specific twitter account in real time
"""
# @author chengdujin
# @contact chengdujin@gmail.com
# @created Nov. 19, 2013
import sys
reload(sys)
sys.setdefaultencoding('UTF-8')
import twitter
import urllib2
access_token_key = "24129666-M47Q6pDLZXLQy1UITxkijkTdKfkvTcBpleidNPjac"
access_token_secret = "0zHhqV5gmrmsnjiOEOBCvqxORwsjVC5ax4mM3dCDZ7RLk"
consumer_key = "hySdhZgpj5gF12kRWMoVpQ"
consumer_secret = "2AkrRg89SdJL0qHkHwuP933fiBaNTioChMpxRdoicUQ"
api = twitter.Api(consumer_key, consumer_secret, access_token_key, access_token_secret)
| agpl-3.0 | Python | |
3912e9ab49e10f2490da36b17e8525d2c97c1844 | add fabric | Answeror/aip,Answeror/aip | fabfile.py | fabfile.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from fabric.api import run, env, cd
# the user to use for the remote commands
env.user = 'answeror'
# the servers where the commands are executed
env.hosts = ['aip.io']
def deploy():
run('pyenv virtualenvwrapper')
run('workon aip')
with cd('/www/aip/repo'):
run('git pull')
run('python setup.py develop')
# and finally touch the .wsgi file so that mod_wsgi triggers
# a reload of the application
run('touch /www/aip/repo/application.wsgi')
| mit | Python | |
643b4867627feb2810257f99b0b7865b43bb6454 | Add fabfile to deploy documentation | GauravButola/angular-tryton,fulfilio/angular-tryton,tarunbhardwaj/angular-tryton,fulfilio/angular-tryton,fulfilio/angular-tryton,GauravButola/angular-tryton,tarunbhardwaj/angular-tryton,openlabs/angular-tryton,tarunbhardwaj/angular-tryton,GauravButola/angular-tryton,openlabs/angular-tryton,openlabs/angular-tryton | fabfile.py | fabfile.py | # -*- coding: utf-8 -*-
"""
fabfile
Fab file to build and push documentation to github
:copyright: © 2013-2014 by Openlabs Technologies & Consulting (P) Limited
:license: BSD, see LICENSE for more details.
"""
import time
import getpass
import os
from fabric.api import (
local, lcd, cd, sudo, execute, settings, env, run, prompt
)
from fabric.decorators import hosts
def upload_documentation():
"""
Build and upload the documentation HTML to github
"""
# Build the documentation
local('grunt ngdocs')
# Checkout to gh-pages branch
local('git checkout gh-pages')
# Copy back the files from docs folder
local('cp -a docs/* .')
# Add the relevant files
local('git add .')
local('git commit -m "Build documentation"')
local('git push')
local('git checkout develop')
| bsd-3-clause | Python | |
b63f6a83f86808df0a2fea66b47478e3c5ec0994 | Create neymanPearson2.py | probml/pyprobml,probml/pyprobml,probml/pyprobml,probml/pyprobml | scripts/neymanPearson2.py | scripts/neymanPearson2.py | # Convert Neyman-Pearson testing paradigm(Fig 5.15 (a)) to python/JAX
# Author: Garvit9000c
import jax.scipy.stats.multivariate_normal as gaussprob
import jax.numpy as jnp
import matplotlib.pyplot as plt
#constants
pi=jnp.pi
sigma=1.5
xmin = -4
xmax = 8
ymin = 0
ymax = 0.3
res = 0.01
#Domain
x=jnp.arange(xmin,xmax,res)
#functions
y1=gaussprob.pdf(x, 0, sigma**2)
y2=gaussprob.pdf(x, 4, sigma**2)
#Axes Limits
plt.ylim(ymin,ymax)
plt.xlim(xmin,xmax)
#Ploting Curve
plt.plot(x,y1,'b') #Curve_B
plt.plot(x,y2,'r') #Curve_A
plt.vlines(x=2.3, ymin=0, ymax=0.5, linewidth=1.5, color='k')
plt.xticks([2.3],['$X^*$'],size=18)
plt.yticks([])
#Shading α Region
x1=jnp.arange(2.3,xmax,res)
y_1=gaussprob.pdf(x1, 0, sigma**2)
plt.fill_between(x1,y_1, 0, alpha=0.50)
#Shading β Region
x2=jnp.arange(xmin,2.3,res)
y_2=gaussprob.pdf(x2, 4, sigma**2)
plt.fill_between(x2,y_2, 0, alpha=0.50)
#Axis Arrows
plt.arrow(0,0.07,1.2,-0.05,color='black',head_width=0.02,head_length=0.2) #β
plt.arrow(4,0.07,-1.2,-0.05,color='black',head_width=0.02,head_length=0.2)#α
#label
plt.text(-0.4, 0.07, 'β', fontsize=15) #β
plt.text(4, 0.07, 'α', fontsize=15) #α
plt.text(-0.2, 0.28, '$H_0$', fontsize=15) #H0
plt.text(3.8,0.28, '$H_1$', fontsize=15) #H1
plt.savefig('../figures/neymanPearson2.pdf', dpi=300)
plt.show()
| mit | Python | |
e7a41ed29f6ec097e19f4c9beec9821a2804585c | Add organizations.py | irqed/octokit.py | octokit/resources/organizations.py | octokit/resources/organizations.py | # encoding: utf-8
"""Methods for the Organizations API
http://developer.github.com/v3/orgs/
"""
| mit | Python | |
337c48648f3a891642fc58c7161fdb48e705160f | add timer | Larhard/tsp | timer.py | timer.py | # Copyright (c) 2015, Bartlomiej Puget <larhard@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the Bartlomiej Puget nor the names of its
# contributors may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL BARTLOMIEJ PUGET BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from datetime import datetime
class Timer:
def __init__(self, message=None):
self.begin = None
self.message = message or "::: timer :::"
def __enter__(self):
self.begin = datetime.now()
def __exit__(self, exc_type, exc_value, exc_traceback):
end = datetime.now()
print(self.message, str(end - self.begin))
| bsd-3-clause | Python | |
cbde3323e790717fa593d75ca282e2875bb67dca | Add stacker ecs hook | remind101/stacker,mhahn/stacker,mhahn/stacker,remind101/stacker,EnTeQuAk/stacker,federicobaldo/stacker | stacker/hooks/ecs.py | stacker/hooks/ecs.py | # A lot of this code exists to deal w/ the broken ECS connect_to_region
# function, and will be removed once this pull request is accepted:
# https://github.com/boto/boto/pull/3143
import logging
logger = logging.getLogger(__name__)
from boto.regioninfo import get_regions
from boto.ec2containerservice.layer1 import EC2ContainerServiceConnection
def regions():
return get_regions('ec2containerservice',
connection_cls=EC2ContainerServiceConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
def create_clusters(region, namespace, mappings, parameters, **kwargs):
""" Creates ECS clusters.
Expects a 'clusters' argument, which should contain a list of cluster
names to create.
"""
conn = connect_to_region(region)
try:
clusters = kwargs['clusters']
except KeyError:
logger.error("setup_clusters hook missing 'clusters' argument")
return False
if isinstance(clusters, basestring):
clusters = [clusters]
for cluster in clusters:
logger.debug("Creating ECS cluster: %s", cluster)
conn.create_cluster(cluster)
return True
| bsd-2-clause | Python | |
421b99ffb0cf84a8ccea0c4c0fe2496f895603a0 | Add migration | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | bluebottle/initiatives/migrations/0011_auto_20190522_0931.py | bluebottle/initiatives/migrations/0011_auto_20190522_0931.py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2019-05-22 07:31
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('initiatives', '0010_auto_20190521_0954'),
]
operations = [
migrations.RemoveField(
model_name='initiativeplatformsettings',
name='facebook_at_work_url',
),
migrations.RemoveField(
model_name='initiativeplatformsettings',
name='share_options',
),
]
| bsd-3-clause | Python | |
51e516f260858e699ee828ac6fc91af39c67254c | Add script for automatically updating schemas | spacetelescope/asdf-standard | update-schemas.py | update-schemas.py | #!/usr/bin/env python
import os
import re
import sys
import subprocess as sp
def get_schemas(pattern):
cmd = ['git', 'grep', '--name-only']
output = sp.check_output(cmd + [pattern, '--', 'schemas']).decode('utf8')
names = output.split()
print(names)
dedupe = dict()
for name in names:
version = re.findall(r'\d\.\d.\d', name)[0]
basepath = name.split('-')[0]
if basepath in dedupe and dedupe[basepath] > version:
continue
dedupe[basepath] = version
return ['{}-{}.yaml'.format(x, y) for x,y in dedupe.items()]
def update_version(string):
groups = re.search(r'((\d)\.(\d)\.(\d))', string).groups()
bumped = int(groups[2]) + 1
new_version = '{}.{}.{}'.format(groups[1], bumped, groups[3])
return re.sub(r'((\d)\.(\d)\.(\d))', new_version, string)
def create_updated_schema(schema, pattern, new_pattern):
name = os.path.splitext(os.path.basename(schema))[0]
updated = update_version(name)
new_schema = re.sub(name, updated, schema)
with open(new_schema, 'w') as new_file:
with open(schema, 'r') as old_file:
for line in old_file:
line = line.replace(pattern, new_pattern)
line = line.replace(name, updated)
new_file.write(line)
def main():
if len(sys.argv) != 2:
name = os.path.basename(sys.argv[0])
sys.stderr.write('USAGE: {} <pattern>\n'.format(name))
exit(1)
pattern = sys.argv[1]
new_pattern = update_version(pattern)
schemas = get_schemas(pattern)
for s in schemas:
create_updated_schema(s, pattern, new_pattern)
if __name__ == '__main__':
main()
| bsd-3-clause | Python | |
7e8ffcef7111fc3cd2f1d58831afb09741d9d8fc | Create my-calendar-i.py | kamyu104/LeetCode,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode,tudennis/LeetCode---kamyu104-11-24-2015,tudennis/LeetCode---kamyu104-11-24-2015,kamyu104/LeetCode | Python/my-calendar-i.py | Python/my-calendar-i.py | # Time: O(n^2)
# Space: O(n)
# Implement a MyCalendar class to store your events.
# A new event can be added if adding the event will not cause a double booking.
#
# Your class will have the method, book(int start, int end).
# Formally, this represents a booking on the half open interval [start, end),
# the range of real numbers x such that start <= x < end.
#
# A double booking happens when two events have some non-empty intersection
# (ie., there is some time that is common to both events.)
#
# For each call to the method MyCalendar.book,
# return true if the event can be added to the calendar successfully without causing a double booking.
# Otherwise, return false and do not add the event to the calendar.
#
# Your class will be called like this: MyCalendar cal = new MyCalendar(); MyCalendar.book(start, end)
# Example 1:
# MyCalendar();
# MyCalendar.book(10, 20); // returns true
# MyCalendar.book(15, 25); // returns false
# MyCalendar.book(20, 30); // returns true
# Explanation:
# The first event can be booked. The second can't because time 15 is already booked by another event.
# The third event can be booked, as the first event takes every time less than 20, but not including 20.
#
# Note:
# - The number of calls to MyCalendar.book per test case will be at most 1000.
# - In calls to MyCalendar.book(start, end), start and end are integers in the range [0, 10^9].
class MyCalendar(object):
def __init__(self):
self.__calendar = []
def book(self, start, end):
"""
:type start: int
:type end: int
:rtype: bool
"""
for i, j in self.__calendar:
if start < j and end > i:
return False
self.__calendar.append((start, end))
return True
# Your MyCalendar object will be instantiated and called as such:
# obj = MyCalendar()
# param_1 = obj.book(start,end)
| mit | Python | |
e1993d4d3c3199dce2be2b475a9236e95732a0f0 | Verify computer executing the script is of OS X 10.6.1 or higher type | yoda-yoda/osx-dock-dodger,denisKaranja/osx-dock-dodger | dodge.py | dodge.py | import platform
class OSXDodger(object):
allowed_version = "10.12.1"
def __init__(self, applications_dir):
self.app_dir = applications_dir
def load_applications(self):
"""
Read all applications in the `/Applications/` dir
"""
pass
def select_applications(self):
"""
Allow user to select an application they want
not to appear on the Dock
"""
pass
def load_dodger_filer(self):
"""
Load the file to modify for the application
chosen by the user in `select_applications`
The file to be loaded for is `info.plist`
"""
pass
def dodge_application(self):
"""
Remive the application from the Dock
"""
pass
@classmethod
def pc_is_macintosh(cls):
"""
Check if it is an `Apple Computer` i.e a Mac
@return bool
"""
system = platform.system().lower()
sys_version = int((platform.mac_ver())[0].replace(".", ""))
allowed_version = int(cls.allowed_version.replace(".", ""))
if (system == "darwin") and (sys_version >= allowed_version):
return True
else:
print("\nSorry :(")
print("FAILED. OsX-dock-dodger is only applicable to computers " +
"running OS X {} or higher".format(cls.allowed_version))
return False
dodge = OSXDodger("/Applications/")
dodge.pc_is_macintosh()
| mit | Python | |
e965147ef7bc89e6c8885d1521d92305604de6f8 | add problem | caoxudong/code_practice,caoxudong/code_practice,caoxudong/code_practice,caoxudong/code_practice | others/find_array_index_of_sum.py | others/find_array_index_of_sum.py | """
题目:给定一个整数数组和一个整数,返回两个数组的索引,这两个索引指向的数字的加和等于指定的整数。需要最优的算法,分析算法的空间和时间复杂度
"""
if __name__ == "__main__":
numbers = [1,2,3,4,5,6,7,8,9]
n = 10
index_map = {}
supplment_map = {}
index = 0
for i in numbers:
index_map[i] = index
supplment_map[i] = n - i
index = index + 1
result = []
for k,v in index_map.items():
suppliment_element = supplment_map.get(k)
if suppliment_element is not None:
supp_index = index_map.get(suppliment_element)
if supp_index is not None:
result.append((v, supp_index))
print(result)
| mit | Python | |
ef8b909beb4de8435c20ed0b45bca9478d476ed8 | Add python script to get coordinates from the csv | awensaunders/BuSHAX0rZ,awensaunders/BuSHAX0rZ,awensaunders/BuSHAX0rZ | geocode.py | geocode.py | #! /bin/python3
import csv
import time
from geopy.geocoders.googlev3 import GoogleV3
geocoder = GoogleV3(api_key="AIzaSyAy6XiyZG-6u99q-qacOz-dtT9ILbYzb-4")
with open("../ReadingBusesOrig.csv") as cf:
with open("../out.csv", "a") as cw:
reader = csv.DictReader(cf)
writer = csv.DictWriter(cw, ["latitude", "longitude", "date"])
startrow = 0
for i in range(0, startrow):
row = reader[i]
location = geocoder.geocode(row['Place of Event'], components={
"locality": "Reading",
"country": "GB"
})
print("Resolved Address: " + str(location.address))
print("Latitude: " + str(location.latitude))
print("Longitude: " + str(location.longitude))
print('\n')
writer.writerow({
"latitude": location.latitude, "longitude": location.longitude, "date": row['Accident Date']
})
time.sleep(0.2)
| mit | Python | |
bb928a0c0a4ddc11b05771e9eaa33f1058cc022a | Add pageset for ugamsolutions.com | vanish87/skia,nvoron23/skia,YUPlayGodDev/platform_external_skia,aosp-mirror/platform_external_skia,tmpvar/skia.cc,VRToxin-AOSP/android_external_skia,rubenvb/skia,BrokenROM/external_skia,MarshedOut/android_external_skia,boulzordev/android_external_skia,VRToxin-AOSP/android_external_skia,samuelig/skia,Igalia/skia,BrokenROM/external_skia,invisiblek/android_external_skia,qrealka/skia-hc,UBERMALLOW/external_skia,AOSPB/external_skia,MinimalOS-AOSP/platform_external_skia,Hikari-no-Tenshi/android_external_skia,boulzordev/android_external_skia,geekboxzone/mmallow_external_skia,ominux/skia,Jichao/skia,aosp-mirror/platform_external_skia,aosp-mirror/platform_external_skia,qrealka/skia-hc,rubenvb/skia,shahrzadmn/skia,ominux/skia,noselhq/skia,spezi77/android_external_skia,tmpvar/skia.cc,TeamTwisted/external_skia,geekboxzone/mmallow_external_skia,shahrzadmn/skia,UBERMALLOW/external_skia,OneRom/external_skia,shahrzadmn/skia,noselhq/skia,HalCanary/skia-hc,nvoron23/skia,pcwalton/skia,rubenvb/skia,noselhq/skia,tmpvar/skia.cc,rubenvb/skia,amyvmiwei/skia,aosp-mirror/platform_external_skia,ominux/skia,qrealka/skia-hc,pcwalton/skia,w3nd1go/android_external_skia,invisiblek/android_external_skia,PAC-ROM/android_external_skia,MarshedOut/android_external_skia,qrealka/skia-hc,boulzordev/android_external_skia,OneRom/external_skia,geekboxzone/mmallow_external_skia,Jichao/skia,geekboxzone/mmallow_external_skia,MinimalOS-AOSP/platform_external_skia,nvoron23/skia,shahrzadmn/skia,google/skia,Infinitive-OS/platform_external_skia,MinimalOS-AOSP/platform_external_skia,timduru/platform-external-skia,AOSP-YU/platform_external_skia,Hikari-no-Tenshi/android_external_skia,Jichao/skia,invisiblek/android_external_skia,HalCanary/skia-hc,MinimalOS-AOSP/platform_external_skia,UBERMALLOW/external_skia,AOSPB/external_skia,rubenvb/skia,YUPlayGodDev/platform_external_skia,spezi77/android_external_skia,rubenvb/skia,MonkeyZZZZ/platform_external_skia,timduru/platform-external-skia,Infinitive-OS/platform_external_skia,pcwalton/skia,nfxosp/platform_external_skia,PAC-ROM/android_external_skia,timduru/platform-external-skia,amyvmiwei/skia,pcwalton/skia,rubenvb/skia,shahrzadmn/skia,nvoron23/skia,spezi77/android_external_skia,VRToxin-AOSP/android_external_skia,todotodoo/skia,TeamTwisted/external_skia,VRToxin-AOSP/android_external_skia,qrealka/skia-hc,HalCanary/skia-hc,TeamExodus/external_skia,ominux/skia,pcwalton/skia,AOSP-YU/platform_external_skia,scroggo/skia,TeamTwisted/external_skia,nvoron23/skia,google/skia,geekboxzone/mmallow_external_skia,OneRom/external_skia,Igalia/skia,PAC-ROM/android_external_skia,google/skia,w3nd1go/android_external_skia,ominux/skia,AOSP-YU/platform_external_skia,AOSP-YU/platform_external_skia,HalCanary/skia-hc,boulzordev/android_external_skia,TeamExodus/external_skia,boulzordev/android_external_skia,rubenvb/skia,TeamExodus/external_skia,YUPlayGodDev/platform_external_skia,spezi77/android_external_skia,w3nd1go/android_external_skia,HalCanary/skia-hc,Hikari-no-Tenshi/android_external_skia,AOSPB/external_skia,samuelig/skia,nfxosp/platform_external_skia,aosp-mirror/platform_external_skia,noselhq/skia,rubenvb/skia,geekboxzone/mmallow_external_skia,boulzordev/android_external_skia,AOSPB/external_skia,MonkeyZZZZ/platform_external_skia,MinimalOS-AOSP/platform_external_skia,invisiblek/android_external_skia,invisiblek/android_external_skia,TeamTwisted/external_skia,tmpvar/skia.cc,pcwalton/skia,nfxosp/platform_external_skia,google/skia,AOSP-YU/platform_external_skia,MinimalOS-AOSP/platform_external_skia,samuelig/skia,vanish87/skia,geekboxzone/mmallow_external_skia,aosp-mirror/platform_external_skia,qrealka/skia-hc,MonkeyZZZZ/platform_external_skia,todotodoo/skia,MarshedOut/android_external_skia,w3nd1go/android_external_skia,invisiblek/android_external_skia,YUPlayGodDev/platform_external_skia,w3nd1go/android_external_skia,noselhq/skia,amyvmiwei/skia,YUPlayGodDev/platform_external_skia,AOSP-YU/platform_external_skia,BrokenROM/external_skia,MonkeyZZZZ/platform_external_skia,UBERMALLOW/external_skia,TeamExodus/external_skia,aosp-mirror/platform_external_skia,Infinitive-OS/platform_external_skia,vanish87/skia,VRToxin-AOSP/android_external_skia,vanish87/skia,UBERMALLOW/external_skia,PAC-ROM/android_external_skia,samuelig/skia,MarshedOut/android_external_skia,google/skia,samuelig/skia,google/skia,Igalia/skia,PAC-ROM/android_external_skia,amyvmiwei/skia,boulzordev/android_external_skia,google/skia,AOSP-YU/platform_external_skia,TeamTwisted/external_skia,spezi77/android_external_skia,OneRom/external_skia,w3nd1go/android_external_skia,timduru/platform-external-skia,vanish87/skia,OneRom/external_skia,todotodoo/skia,Hikari-no-Tenshi/android_external_skia,nvoron23/skia,samuelig/skia,tmpvar/skia.cc,qrealka/skia-hc,shahrzadmn/skia,noselhq/skia,ominux/skia,rubenvb/skia,tmpvar/skia.cc,pcwalton/skia,spezi77/android_external_skia,scroggo/skia,invisiblek/android_external_skia,MarshedOut/android_external_skia,Hikari-no-Tenshi/android_external_skia,HalCanary/skia-hc,TeamExodus/external_skia,boulzordev/android_external_skia,amyvmiwei/skia,geekboxzone/mmallow_external_skia,AOSPB/external_skia,MarshedOut/android_external_skia,HalCanary/skia-hc,YUPlayGodDev/platform_external_skia,VRToxin-AOSP/android_external_skia,noselhq/skia,vanish87/skia,todotodoo/skia,samuelig/skia,google/skia,OneRom/external_skia,Igalia/skia,OneRom/external_skia,shahrzadmn/skia,AOSPB/external_skia,TeamTwisted/external_skia,scroggo/skia,Igalia/skia,amyvmiwei/skia,noselhq/skia,TeamTwisted/external_skia,Infinitive-OS/platform_external_skia,HalCanary/skia-hc,nfxosp/platform_external_skia,AOSPB/external_skia,Infinitive-OS/platform_external_skia,MonkeyZZZZ/platform_external_skia,Infinitive-OS/platform_external_skia,scroggo/skia,qrealka/skia-hc,HalCanary/skia-hc,MonkeyZZZZ/platform_external_skia,VRToxin-AOSP/android_external_skia,MarshedOut/android_external_skia,google/skia,Jichao/skia,shahrzadmn/skia,timduru/platform-external-skia,UBERMALLOW/external_skia,ominux/skia,UBERMALLOW/external_skia,nfxosp/platform_external_skia,MarshedOut/android_external_skia,nvoron23/skia,geekboxzone/mmallow_external_skia,Igalia/skia,TeamExodus/external_skia,shahrzadmn/skia,pcwalton/skia,MinimalOS-AOSP/platform_external_skia,boulzordev/android_external_skia,YUPlayGodDev/platform_external_skia,MarshedOut/android_external_skia,todotodoo/skia,aosp-mirror/platform_external_skia,HalCanary/skia-hc,todotodoo/skia,Infinitive-OS/platform_external_skia,Infinitive-OS/platform_external_skia,BrokenROM/external_skia,w3nd1go/android_external_skia,ominux/skia,UBERMALLOW/external_skia,BrokenROM/external_skia,tmpvar/skia.cc,nvoron23/skia,w3nd1go/android_external_skia,timduru/platform-external-skia,nfxosp/platform_external_skia,BrokenROM/external_skia,BrokenROM/external_skia,scroggo/skia,scroggo/skia,VRToxin-AOSP/android_external_skia,OneRom/external_skia,nfxosp/platform_external_skia,Igalia/skia,BrokenROM/external_skia,vanish87/skia,w3nd1go/android_external_skia,AOSPB/external_skia,vanish87/skia,nvoron23/skia,AOSPB/external_skia,Jichao/skia,google/skia,nfxosp/platform_external_skia,amyvmiwei/skia,amyvmiwei/skia,todotodoo/skia,YUPlayGodDev/platform_external_skia,TeamExodus/external_skia,todotodoo/skia,AOSP-YU/platform_external_skia,MonkeyZZZZ/platform_external_skia,TeamTwisted/external_skia,Infinitive-OS/platform_external_skia,Hikari-no-Tenshi/android_external_skia,vanish87/skia,PAC-ROM/android_external_skia,AOSP-YU/platform_external_skia,Hikari-no-Tenshi/android_external_skia,tmpvar/skia.cc,VRToxin-AOSP/android_external_skia,UBERMALLOW/external_skia,nfxosp/platform_external_skia,PAC-ROM/android_external_skia,Jichao/skia,timduru/platform-external-skia,samuelig/skia,aosp-mirror/platform_external_skia,TeamTwisted/external_skia,Hikari-no-Tenshi/android_external_skia,TeamExodus/external_skia,PAC-ROM/android_external_skia,Igalia/skia,YUPlayGodDev/platform_external_skia,OneRom/external_skia,MinimalOS-AOSP/platform_external_skia,ominux/skia,todotodoo/skia,Jichao/skia,MonkeyZZZZ/platform_external_skia,Jichao/skia,PAC-ROM/android_external_skia,scroggo/skia,scroggo/skia,noselhq/skia,tmpvar/skia.cc,MonkeyZZZZ/platform_external_skia,TeamExodus/external_skia,Jichao/skia,MinimalOS-AOSP/platform_external_skia,pcwalton/skia,aosp-mirror/platform_external_skia,invisiblek/android_external_skia | tools/skp/page_sets/skia_ugamsolutions_desktop.py | tools/skp/page_sets/skia_ugamsolutions_desktop.py | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class SkiaBuildbotDesktopPage(page_module.Page):
def __init__(self, url, page_set):
super(SkiaBuildbotDesktopPage, self).__init__(
url=url,
page_set=page_set,
credentials_path='data/credentials.json')
self.user_agent_type = 'desktop'
self.archive_data_file = 'data/skia_ugamsolutions_desktop.json'
def RunNavigateSteps(self, action_runner):
action_runner.NavigateToPage(self)
action_runner.Wait(15)
class SkiaUgamsolutionsDesktopPageSet(page_set_module.PageSet):
""" Pages designed to represent the median, not highly optimized web """
def __init__(self):
super(SkiaUgamsolutionsDesktopPageSet, self).__init__(
user_agent_type='desktop',
archive_data_file='data/skia_ugamsolutions_desktop.json')
urls_list = [
# Why: for crbug.com/447291
'http://www.ugamsolutions.com',
]
for url in urls_list:
self.AddUserStory(SkiaBuildbotDesktopPage(url, self))
| bsd-3-clause | Python | |
68e5bdc3c3a8a59f820ea15e706e85e14f2a654b | Add mgmt cmd to fix bad loc-type references | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/locations/management/commands/fix_loc_type_reference.py | corehq/apps/locations/management/commands/fix_loc_type_reference.py | from optparse import make_option
from django.core.management.base import BaseCommand
from corehq.apps.locations.models import SQLLocation, LocationType
class Command(BaseCommand):
help = "Make "
option_list = (
make_option('--dry_run',
action='store_true',
dest='dry_run',
default=False,
help='Just check what domains have problems'),
make_option('--noinput',
action='store_true',
dest='noinput',
default=False,
help='Skip important confirmation warnings.'),
)
def handle(self, *args, **options):
domains = (SQLLocation.objects
.order_by('domain')
.distinct('domain')
.values_list('domain', flat=True))
for domain in domains:
if has_bad_location_types(domain):
print "{} has bad location types".format(domain)
if not options['dry_run']:
if options['noinput'] or raw_input("fix? (y/N)").lower() == 'y':
fix_domain(domain)
def fix_domain(domain):
locs_w_bad_types = (SQLLocation.objects
.filter(domain=domain)
.exclude(location_type__domain=domain))
print "found {} locs with bad types".format(locs_w_bad_types.count())
bad_types = LocationType.objects.filter(sqllocation__in=locs_w_bad_types).distinct()
assert domain not in bad_types.values_list('domain', flat=True)
bad_to_good = {}
for bad_type in bad_types:
good_type = LocationType.objects.get(domain=domain, code=bad_type.code)
bad_to_good[bad_type.code] = good_type
print "successfully found corresponding loctypes on the domain for each misreferenced loctype"
for loc in locs_w_bad_types:
loc.location_type = bad_to_good[loc.location_type.code]
loc.save()
def has_bad_location_types(domain):
return (SQLLocation.objects
.filter(domain=domain)
.exclude(location_type__domain=domain)
.exists())
| bsd-3-clause | Python | |
0ca7ec8da8fffbfe057038e832bb12a33384c07b | add date format to date column in ctable mapping | dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,puttarajubr/commcare-hq,puttarajubr/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq | custom/apps/gsid/ctable_mappings.py | custom/apps/gsid/ctable_mappings.py | from ctable.fixtures import CtableMappingFixture
from ctable.models import ColumnDef, KeyMatcher
class PatientSummaryMapping(CtableMappingFixture):
name = 'patient_summary'
domains = ['gsid']
couch_view = 'gsid/patient_summary'
schedule_active = True
@property
def columns(self):
columns = [
ColumnDef(name="domain", data_type="string", value_source="key", value_index=0),
ColumnDef(name="disease_name", data_type="string", value_source="key", value_index=1),
ColumnDef(name="test_version", data_type="string", value_source="key", value_index=2),
ColumnDef(name="country", data_type="string", value_source="key", value_index=3),
ColumnDef(name="province", data_type="string", value_source="key", value_index=4),
ColumnDef(name="district", data_type="string", value_source="key", value_index=5),
ColumnDef(name="clinic", data_type="string", value_source="key", value_index=6),
ColumnDef(name="gender", data_type="string", value_source="key", value_index=7),
ColumnDef(name="date", data_type="date", value_source="key", value_index=8, date_format="%Y-%m-%d"),
ColumnDef(name="diagnosis", data_type="string", value_source="key", value_index=9),
ColumnDef(name="age", data_type="integer", value_source="value", value_attribute="sum"),
ColumnDef(name="lot_number", data_type="integer", value_source="key", value_index=10),
ColumnDef(name="gps", data_type="string", value_source="key", value_index=11),
ColumnDef(name="gps_country", data_type="string", value_source="key", value_index=12),
ColumnDef(name="gps_province", data_type="string", value_source="key", value_index=13),
ColumnDef(name="gps_district", data_type="string", value_source="key", value_index=14),
]
return columns | from ctable.fixtures import CtableMappingFixture
from ctable.models import ColumnDef, KeyMatcher
class PatientSummaryMapping(CtableMappingFixture):
name = 'patient_summary'
domains = ['gsid']
couch_view = 'gsid/patient_summary'
schedule_active = True
@property
def columns(self):
columns = [
ColumnDef(name="domain", data_type="string", value_source="key", value_index=0),
ColumnDef(name="disease_name", data_type="string", value_source="key", value_index=1),
ColumnDef(name="test_version", data_type="string", value_source="key", value_index=2),
ColumnDef(name="country", data_type="string", value_source="key", value_index=3),
ColumnDef(name="province", data_type="string", value_source="key", value_index=4),
ColumnDef(name="district", data_type="string", value_source="key", value_index=5),
ColumnDef(name="clinic", data_type="string", value_source="key", value_index=6),
ColumnDef(name="gender", data_type="string", value_source="key", value_index=7),
ColumnDef(name="date", data_type="date", value_source="key", value_index=8),
ColumnDef(name="diagnosis", data_type="string", value_source="key", value_index=9),
ColumnDef(name="age", data_type="integer", value_source="value", value_attribute="sum"),
ColumnDef(name="lot_number", data_type="integer", value_source="key", value_index=10),
ColumnDef(name="gps", data_type="string", value_source="key", value_index=11),
ColumnDef(name="gps_country", data_type="string", value_source="key", value_index=12),
ColumnDef(name="gps_province", data_type="string", value_source="key", value_index=13),
ColumnDef(name="gps_district", data_type="string", value_source="key", value_index=14),
]
return columns | bsd-3-clause | Python |
5d6dabcad4f2467f07765f1e28752b5cbba61d53 | add xfailing test for #693 | dials/dials,dials/dials,dials/dials,dials/dials,dials/dials | test/util/test_options.py | test/util/test_options.py | from __future__ import absolute_import, division, print_function
import os
import pytest
from dials.util.options import OptionParser
from dials.util.options import flatten_datablocks
pytestmark = pytest.mark.skipif(
not os.access('/dls/i04/data/2019/cm23004-1/20190109/Eiger', os.R_OK),
reason='Test images not available')
@pytest.mark.xfail
def test_not_master_h5():
data_h5 = '/dls/i04/data/2019/cm23004-1/20190109/Eiger/gw/Thaum/Thau_4/Thau_4_1_000001.h5'
parser = OptionParser(read_datablocks=True, read_datablocks_from_images=True)
params, options = parser.parse_args([data_h5])
datablocks = flatten_datablocks(params.input.datablock)
assert len(datablocks) == 0
| bsd-3-clause | Python | |
32471e9abff0d11b001ad0024a8f917e9ddadd60 | Test for commandline.py | karlch/vimiv,karlch/vimiv,karlch/vimiv | tests/commandline_test.py | tests/commandline_test.py | #!/usr/bin/env python
# encoding: utf-8
import os
import time
from unittest import TestCase, main
from gi import require_version
require_version('Gtk', '3.0')
from gi.repository import Gtk
import vimiv.main as v_main
from vimiv.parser import parse_config
def refresh_gui(delay=0):
time.sleep(delay)
while Gtk.events_pending():
Gtk.main_iteration_do(False)
class CommandlineTest(TestCase):
def setUp(self):
self.settings = parse_config()
self.vimiv = v_main.Vimiv(self.settings, [], 0)
self.vimiv.main(True)
def test_toggling(self):
# Focusing
self.vimiv.commandline.focus()
self.assertEqual(self.vimiv.commandline.entry.get_text(), ":")
# Leaving by deleting the colon
self.vimiv.commandline.entry.set_text("")
self.assertFalse(self.vimiv.commandline.box.is_visible())
def test_run_command(self):
before_command = self.vimiv.image.overzoom
self.vimiv.commandline.entry.set_text(":set overzoom!")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
after_command = self.vimiv.image.overzoom
self.assertNotEqual(before_command, after_command)
def test_run_external(self):
self.vimiv.commandline.entry.set_text(":!touch tmp_foo")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
time.sleep(0.1) # Necessary so the entry is created
# (->multithreading...)
files = os.listdir()
self.assertTrue("tmp_foo" in files)
os.remove("tmp_foo")
def test_pipe(self):
# Internal command
before_command = self.vimiv.image.overzoom
self.vimiv.commandline.entry.set_text(":!echo set overzoom! |")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
refresh_gui(0.05)
after_command = self.vimiv.image.overzoom
self.assertNotEqual(before_command, after_command)
# Directory
expected_dir = os.path.abspath("./testimages")
self.vimiv.commandline.entry.set_text(":!ls -d testimages |")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
refresh_gui(0.05)
dir_after = os.getcwd()
self.assertEqual(expected_dir, dir_after)
# Image
expected_image = os.path.abspath("arch-logo.png")
self.vimiv.commandline.entry.set_text(":!echo arch-logo.png |")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
refresh_gui(0.05)
self.assertEqual(self.vimiv.paths[0], expected_image)
os.chdir("..")
def test_path(self):
# Pass a directory
expected_dir = os.path.abspath("./testimages")
self.vimiv.commandline.entry.set_text(":./testimages")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
dir_after = os.getcwd()
self.assertEqual(expected_dir, dir_after)
# Pass an image
expected_image = os.path.abspath("arch-logo.png")
self.vimiv.commandline.entry.set_text(":./arch-logo.png")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
self.assertEqual(self.vimiv.paths[0], expected_image)
os.chdir("..")
def test_search(self):
self.vimiv.commandline.cmd_search()
self.assertEqual(self.vimiv.commandline.entry.get_text(), "/")
# Search should move into testimages
expected_dir = os.path.abspath("./testimages")
self.vimiv.commandline.entry.set_text("/test")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
dir_after = os.getcwd()
self.assertEqual(expected_dir, dir_after)
# Search should have these results
self.vimiv.commandline.search_case = False
self.vimiv.commandline.entry.set_text("/Ar")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
expected_search_results = ["arch_001.jpg", "arch-logo.png"]
search_results = self.vimiv.commandline.search_names
self.assertEqual(search_results, expected_search_results)
# Moving forward to next result should work
self.vimiv.commandline.search_move(1)
self.assertEqual(self.vimiv.library.treepos, 1)
# Searching case sensitively should have no results here
self.vimiv.commandline.search_case = True
self.vimiv.commandline.entry.set_text("/Ar")
self.vimiv.commandline.handler(self.vimiv.commandline.entry)
self.assertFalse(self.vimiv.commandline.search_names)
os.chdir("..")
if __name__ == '__main__':
main()
| mit | Python | |
00bba22c2fb62b378efb40d86ff633881c55991d | Add tests | divmain/GitSavvy,divmain/GitSavvy,divmain/GitSavvy | tests/test_commit_view.py | tests/test_commit_view.py | import os
from textwrap import dedent
import sublime
from unittesting import DeferrableTestCase
from GitSavvy.tests.mockito import unstub, when
from GitSavvy.tests.parameterized import parameterized as p
from GitSavvy.core.commands.commit import extract_commit_message, GsCommitCommand
examples = [
(
dedent("""\
""".rstrip()),
""
),
(
dedent("""\
## To make a commit, ...
""".rstrip()),
""
),
(
dedent("""\
The subject
## To make a commit, ...
""".rstrip()),
"The subject"
),
(
dedent("""\
The subject
b
c
d
## To make a commit, ...
""".rstrip()),
dedent("""\
The subject
b
c
d
""".rstrip())
),
]
class TestExtractCommitMessage(DeferrableTestCase):
@classmethod
def setUpClass(cls):
sublime.run_command("new_window")
cls.window = sublime.active_window()
s = sublime.load_settings("Preferences.sublime-settings")
s.set("close_windows_when_empty", False)
@classmethod
def tearDownClass(self):
self.window.run_command('close_window')
def tearDown(self):
unstub()
@p.expand(examples)
def test_a(self, VIEW_CONTENT, output):
view = self.window.new_file()
self.addCleanup(view.close)
view.set_syntax_file("Packages/GitSavvy/syntax/make_commit.sublime-syntax")
view.run_command('append', {'characters': VIEW_CONTENT})
view.set_scratch(True)
self.assertEqual(output, extract_commit_message(view).strip())
def test_basic_default_commit_view(self):
view = self.window.new_file()
self.addCleanup(view.close)
exists = os.path.exists
when(os.path).exists(...).thenAnswer(exists)
when(os.path).exists("/foo").thenReturn(True)
when(GsCommitCommand).git("diff", ...).thenReturn(dedent("""\
diff --git a/bar/test.txt b/bar/test.txt
index 9303f2c..5a9ce64 100644
--- a/bar/test.txt
+++ b/bar/test.txt
@@ -1,14 +1,22 @@
This is a diff
""".rstrip()))
self.window.run_command("gs_commit", {"repo_path": "/foo"})
yield self.window.active_view().name() == "COMMIT: foo"
commit_view = self.window.active_view()
self.assertTrue(commit_view.find_by_selector("meta.dropped.git.commit"))
self.assertTrue(commit_view.find_by_selector("git-savvy.diff"))
self.assertEquals("", extract_commit_message(commit_view).rstrip())
| mit | Python | |
d1eac6f370f4a06151870be25cb362370d9ec53d | Add salt/utils/cli.py | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/utils/cli.py | salt/utils/cli.py | # -*- coding: utf-8 -*-
'''
Functions used for CLI argument handling
'''
# Import python libs
import re
import yaml
# Import salt libs
from salt._compat import string_types, integer_types
#KWARG_REGEX = re.compile(r'^([^\d\W][\w-]*)=(?!=)(.*)$', re.UNICODE) # python 3
KWARG_REGEX = re.compile(r'^([^\d\W][\w-]*)=(?!=)(.*)$')
def parse_cli(args):
'''
Parse out the args and kwargs from a list of CLI args
'''
_args = []
_kwargs = {}
for arg in args:
if isinstance(arg, string_types):
arg_name, arg_value = parse_kwarg(arg)
if arg_name:
_kwargs[arg_name] = yamlify_arg(arg_value)
else:
_args.append(yamlify_arg(arg))
return _args, _kwargs
def parse_kwarg(string_):
'''
Parses the string and looks for the following kwarg format:
"{argument name}={argument value}"
For example: "my_message=Hello world"
Returns the kwarg name and value, or (None, None) if the regex was not
matched.
'''
try:
return KWARG_REGEX.match(string_).groups()
except AttributeError:
return None, None
def yamlify_arg(arg):
'''
yaml.safe_load the arg unless it has a newline in it.
'''
if not isinstance(arg, string_types):
return arg
try:
original_arg = str(arg)
if isinstance(arg, string_types):
if '#' in arg:
# Don't yamlify this argument or the '#' and everything after
# it will be interpreted as a comment.
return arg
if '\n' not in arg:
arg = yaml.safe_load(arg)
print('arg = {0}'.format(arg))
if isinstance(arg, dict):
# dicts must be wrapped in curly braces
if (isinstance(original_arg, string_types) and
not original_arg.startswith('{')):
return original_arg
else:
return arg
elif isinstance(arg, (list, float, integer_types, string_types)):
# yaml.safe_load will load '|' as '', don't let it do that.
if arg == '' and original_arg in ('|',):
return original_arg
# yaml.safe_load will treat '#' as a comment, so a value of '#'
# will become None. Keep this value from being stomped as well.
elif arg is None and original_arg.strip().startswith('#'):
return original_arg
else:
return arg
else:
# we don't support this type
return original_arg
except Exception:
# In case anything goes wrong...
return original_arg
| apache-2.0 | Python | |
13e6f6967298173d4979c1bc4eb9d7ec6b1f9354 | add a wrapper, not finished yet | gstoner/gpudb,gstoner/gpudb,gstoner/gpudb,gstoner/gpudb | gpudb.py | gpudb.py | #! /usr/bin/python
import sys
import os
import shutil
def dbHelp():
print "Command:"
print "\tcreate DBName: create the database"
print "\tdelete DBName: delete the database"
print "\tlist: list the table infomation in the database"
print "\ttranslate SQL: translate SQL into CUDA file"
print "\texecute SQL: translate and execute given SQL on GPU"
print "\tload TableName data: load data into the given table"
print "\texit"
def dbCreate(dbName):
ret = 0
dbTop = "database"
if not os.path.exists(dbTop):
os.makedirs(dbTop)
dbPath = dbTop + "/" + dbName
if os.path.exists(dbPath):
return -1
os.makedirs(dbPath)
cmd = 'python XML2MapReduce/main.py ' + schemaFile
ret = os.system(cmd)
if ret !=0 :
exit(-1)
cmd = 'make -C src/GPUCODE/ loader &> /dev/null'
ret = os.system(cmd)
if ret != 0:
exit(-1)
cmd = 'cp src/GPUCODE/gpuDBLoader ' + dbPath
ret = os.system(cmd)
if ret != 0:
exit(-1)
return 0
def dbDelete(dbName):
dbTop = "database"
dbPath = dbTop + "/" + dbName
if os.path.exists(dbPath):
shutil.rmtree(dbPath)
if len(sys.argv) != 2:
print "./gpudb.py schemaFile"
exit(-1)
schemaFile = sys.argv[1]
while 1:
ret = 0
dbCreated = 0
dbName = ""
cmd = raw_input(">")
cmdA = cmd.lstrip().rstrip().split()
if len(cmdA) == 0:
continue
if cmdA[0].upper() == "HELP":
dbHelp()
elif cmdA[0].upper() == "?":
dbHelp()
elif cmdA[0].upper() == "EXIT":
break
elif cmdA[0].upper() == "CREATE":
if dbCreated !=0:
print "Already created database. Delete first."
continue
if len(cmdA) !=2:
print "usage: create DBName"
else:
ret = dbCreate(cmdA[1].upper())
if ret == -1:
print cmdA[1] + " already exists"
else:
dbCreated = 1
dbName = cmdA[1].upper()
print cmdA[1] + " has been successfully created"
elif cmdA[0].upper() == "DELETE":
if len(cmdA) !=2:
print "usage: delete DBName"
dbCreated = 0
dbDelete(cmdA[1].upper())
else:
print "Unknown command"
os.system("clear")
| apache-2.0 | Python | |
3a14fa67e4d35fa2865d20e08c03272cff7fcd4e | test wechat post | jtr109/Alpha2kindle | hello.py | hello.py | from flask import Flask, request
app = Flask(__name__)
@app.route('/')
def index():
return '<h1>Hello World!</h1>'
@app.route('/kindle_push', methods=['POST'])
def kindle_push():
print(request.data)
return 'success'
if __name__ == '__main__':
app.run(debug=True) | mit | Python | |
2d0950df803d45c486cbf87d49ff055b818d4999 | Create cvrfile.py | fnielsen/cvrminer,fnielsen/cvrminer,fnielsen/cvrminer | cvrminer/cvrfile.py | cvrminer/cvrfile.py | """CVR miner.
Handles JSONL files from CVR in Erhvervsstyrelsen with information about
companies, places and participants.
Command-line usage
------------------
$ python -m cvrminer.cvrfile
JSON file structure
-------------------
_id : ID of some form
_index :
_source : Actual data
_type: 'virksomhed', 'produktionsenhed', 'deltager' or 'meta'
fields
The 'meta' type appear only once and with the entry:
{"_index":"cvr-permanent-prod-20151209",
"_type":"meta",
"_id":"1",
"_score":1,
"_source":{
"NewestRetrievedFileTimestampForBeskaeftigelse":
"2016-05-07T08:59:23.373+02:00"}}
"""
import csv
import json
from pprint import pprint
from .virksomhed import Virksomhed
JSONL_FILENAME = 'cvr-permanent.json'
# $ wc cvr-permanent.json
# 4721004 127333568 42796650397 cvr-permanent.json
class CvrFile(object):
"""CVR file.
Examples
--------
>>> for item in CvrFile():
... print(str(item['_type']))
... break
'virksomhed'
"""
def __init__(self, filename=JSONL_FILENAME):
self.filename = filename
self.fid = open(filename)
def __iter__(self):
return self
def __next__(self):
line = self.fid.readline()
data = json.loads(line)
return data
next = __next__
def write_virksomhed_feature_file(self, filename='virksomheder-features.csv'):
"""Write feature file for virksomheder in the fille.
Parameters
----------
filename : str
Filename for comma-separated output file.
"""
with open(filename, 'w') as csvfile:
csv_writer = csv.writer(csvfile)
header = None
for n, obj in enumerate(self):
if 'Vrvirksomhed' not in obj['_source']:
continue
virksomhed = Virksomhed(obj)
features = virksomhed.features()
if not header:
header = features.keys()
csv_writer.writerow(header)
values = [unicode(value).encode('utf-8')
for value in features.values()]
csv_writer.writerow(values)
def print_types(filename=JSONL_FILENAME):
"""Print entry types from JSON file.
Iterate over entire file and print distinct entry types.
This should produce:
virksomhed
produktionsenhed
deltager
meta
Parameters
----------
filename : str
Filename for JSONL file
"""
types = set()
for obj in CvrFile(filename):
type_ = obj['_type']
if type_ not in types:
print(type_)
types.add(type_)
def print_source_fields(self):
"""Print field values from _source fields in JSON file.
This could produce:
(u'Vrvirksomhed',)
(u'VrproduktionsEnhed',)
(u'Vrdeltagerperson',)
(u'NewestRetrievedFileTimestampForBeskaeftigelse',)
"""
fields_set = set()
for obj in CvrFile(filename=filename):
fields = tuple(obj['_source'].keys())
if fields not in fields_set:
print(fields)
fields_set.add(fields)
def pretty_print(filename=JSONL_FILENAME):
"""Pretty print JSON lines."""
for obj in CvrFile(filename=filename):
pprint(obj)
def main():
pretty_print()
if __name__ == '__main__':
main()
| apache-2.0 | Python | |
ffdcc9d523daa5a610de1534f3ed10f4d629aaf2 | add 'inspect' filter | serge-name/myansible,serge-name/myansible,serge-name/myansible | filter_plugins/inspect.py | filter_plugins/inspect.py | class FilterModule(object):
''' A comment '''
def filters(self):
return {
'inspect': self.inspect,
}
def inspect(self, input_value, verbose=None):
if (type(input_value) is list) and verbose:
return "[{}]".format(",".join([str(type(i)) for i in input_value]))
else:
return str(type(input_value))
| mit | Python | |
737936b91a4a908a02338373b716161f487e44c9 | add factories | byteweaver/django-skrill | skrill/tests/factories.py | skrill/tests/factories.py | from decimal import Decimal
import random
from django.contrib.auth.models import User
import factory
from skrill.models import PaymentRequest
from skrill.settings import ISO4217
class UserFactory(factory.Factory):
FACTORY_FOR = User
username = factory.Sequence(lambda n: "Test User %s" % n)
class PaymentRequestFactory(factory.Factory):
FACTORY_FOR = PaymentRequest
user = UserFactory()
amount = Decimal(random.randrange(10000))/100
currency = random.choice(ISO4217)[0]
| bsd-3-clause | Python | |
5b54313e08ddf7176583f4776c804a482b111de1 | add test | baishancloud/pykit,baishancloud/pykit,sejust/pykit,sejust/pykit | mysqlutil/test/test_privileges.py | mysqlutil/test/test_privileges.py | #!/usr/bin/env python
# coding: utf-8
import logging
import unittest
from pykit import mysqlutil
from pykit import ututil
dd = ututil.dd
logger = logging.getLogger(__name__)
class TestPrivileges(unittest.TestCase):
def test_load_dump(self):
ks = (
"ALL",
"ALTER",
"ALTER ROUTINE",
"CREATE",
"CREATE ROUTINE",
"CREATE TABLESPACE",
"CREATE TEMPORARY TABLES",
"CREATE USER",
"CREATE VIEW",
"DELETE",
"DROP",
"EVENT",
"EXECUTE",
"FILE",
"GRANT OPTION",
"INDEX",
"INSERT",
"LOCK TABLES",
"PROCESS",
"PROXY",
"REFERENCES",
"RELOAD",
"REPLICATION CLIENT",
"REPLICATION SLAVE",
"SELECT",
"SHOW DATABASES",
"SHOW VIEW",
"SHUTDOWN",
"SUPER",
"TRIGGER",
"UPDATE",
"USAGE",
)
for k in ks:
self.assertEqual((k,), mysqlutil.privileges[k])
self.assertEqual((k,), mysqlutil.privileges[k.replace(' ', '_')])
shortcuts = {
'replicator': (
'REPLICATION CLIENT',
'REPLICATION SLAVE',
'SELECT',
),
'monitor': (
'SELECT',
),
'business': (
'CREATE',
'DROP',
'REFERENCES',
'ALTER',
'DELETE',
'INDEX',
'INSERT',
'SELECT',
'UPDATE',
),
'readwrite': (
'DELETE',
'INSERT',
'SELECT',
'UPDATE',
),
}
for k, expected in shortcuts.items():
self.assertEqual(expected, mysqlutil.privileges[k])
| mit | Python | |
860580119cc6ae9241e866275eccc7d71ae95e8c | Build fis assets on deploy updated. | liwushuo/fapistrano | fapistrano/plugins/fis.py | fapistrano/plugins/fis.py | # -*- coding: utf-8 -*-
from fabric.api import show, run, env, cd
from .. import signal
def init():
signal.register('deploy.updated', build_fis_assets)
def build_fis_assets():
with show('output'):
run('''
fis release --file %(releases_path)s/%(new_release)s/%(fis_conf)s \
--dest %(releases_path)s/%(new_release)s/%(fis_dest)s \
--root %(releases_path)s/%(new_release)s/%(fis_source)s \
--optimize \
--pack \
--md5
''' % env)
| mit | Python | |
471f5738e82fbb57c1028bdf2f1556edb0b074ed | Rename concurrent log handler | Netuitive/netuitive-diamond,Netuitive/netuitive-diamond,Netuitive/netuitive-diamond,Netuitive/netuitive-diamond | src/diamond/logging/handlers/concurrentloghandler.py | src/diamond/logging/handlers/concurrentloghandler.py | # coding=utf-8
from concurrent_log_handler import ConcurrentRotatingFileHandler as CRFH
import sys
class ConcurrentRotatingFileHandler(TRFH):
def flush(self):
try:
super(ConcurrentRotatingFileHandler, self).flush()
except IOError:
sys.stderr.write('ConcurrentRotatingFileHandler received a IOError!')
sys.stderr.flush()
| mit | Python | |
910fb34ec6ef8544d4de5d8baf52fbd1c2c48027 | Create Hello World Example | MarkGalloway/RIS,MarkGalloway/RIS | hello.py | hello.py | from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello_world():
return '<h1>Hello 391 team!<h1>'
if __name__ == '__main__':
app.run() | apache-2.0 | Python | |
1290fc572bfc862ab7c8ee579257e00486a3c921 | add a really tiny wsgi | fedora-infra/hrf | hrf.wsgi | hrf.wsgi | from hrf.hrf import app as application
| lgpl-2.1 | Python | |
a4200e281279370f1ef6b4527c5340cec9c729b4 | add lexer for Elm language | dscorbett/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,pygments/pygments,pygments/pygments,dscorbett/pygments,dscorbett/pygments,pygments/pygments | pygments/lexers/elm.py | pygments/lexers/elm.py | # -*- coding: utf-8 -*-
"""
pygments.lexers.elm
~~~~~~~~~~~~~~~~~~~
Lexer for the Elm programming language.
"""
import re
from pygments.lexer import bygroups, RegexLexer, words, include, using
from pygments.token import Comment, Keyword, Name, Number, Operator, Punctuation, String, Text, Error
__all__ = ['ElmLexer']
class ElmLexer(RegexLexer):
"""
For `Elm <http://elm-lang.org/>`_ source code.
"""
name = 'Elm'
aliases = ['elm']
filenames = ['*.elm']
mimetypes = ['text/x-elm']
validName = r'[a-z_][a-zA-Z_\']*'
specialName = r'^main '
builtinOps = (
'~', '||', '|>', '|', '`', '^', '\\', '\'', '>>', '>=', '>', '==',
'=', '<~', '<|', '<=', '<<', '<-', '<', '::', ':', '/=', '//', '/',
'..', '.', '->', '-', '++', '+', '*', '&&', '%',
)
reservedWords = words((
'if', 'then', 'else', 'case', 'of', 'let', 'in', 'type', 'module', 'where',
'import', 'as', 'hiding', 'open', 'export', 'foreign', 'deriving', 'port',
), suffix=r'\b')
tokens = {
'root': [
# Comments
(r'{-', Comment.Multiline, 'comment'),
(r'--.*', Comment.Single),
# Whitespace
(r'\s+', Text),
# Strings
(r'"', String, 'doublequote'),
# Modules
(r'^\s*module\s*', Keyword.Namespace, 'imports'),
# Imports
(r'^\s*import\s*', Keyword.Namespace, 'imports'),
# Keywords
(reservedWords, Keyword.Reserved),
# Types
(r'[A-Z]\w*', Keyword.Type),
# Main
(specialName, Keyword.Reserved),
# Prefix Operators
(words((builtinOps), prefix=r'\(', suffix=r'\)'), Name.Function),
# Infix Operators
(words((builtinOps)), Name.Function),
# Numbers
include('numbers'),
# Variable Names
(validName, Name.Variable),
# Parens
(r'[,\(\)\[\]{}]', Punctuation),
],
'comment': [
(r'-(?!})', Comment.Multiline),
(r'{-', Comment.Multiline, 'comment'),
(r'^@docs .*\n', Comment.Preproc),
(r'^# .*\n', Comment.Preproc),
(r'^ {4}.*\n', String.Doc),
(r'[^-}]', Comment.Multiline),
(r'-}', Comment.Multiline, '#pop'),
],
'imports': [
(r'\w+(\.\w+)*', Name.Class, '#pop'),
],
'numbers': [
(r'_?\d+\.(?=\d+)', Number.Float),
(r'_?\d+', Number.Integer),
],
'doublequote': [
(r'\\u[0-9a-fA-F]\{4}', String.Escape),
(r'\\[nrfvb\\\"]', String.Escape),
(r'[^"]', String),
(r'"', String, '#pop'),
],
}
| bsd-2-clause | Python | |
c242589075aa4fc9af0a17461f235348b53284a1 | Add sample code decorator | rawswift/python-collections | decorator/cli-sample-decorator.py | decorator/cli-sample-decorator.py | #!/usr/bin/env python
class Deco(object):
def __init__(self, func):
self.func = func
def __call__(self, *args, **kwargs):
result = self.func(*args, **kwargs)
# multiple it by itself
result = result * result
return result
@Deco
def process(x=0, y=0):
return x+y
print process(1, 1) # 4
print process(2, 2) # 16
print process(3, 3) # 36
| mit | Python | |
bf6cfcaa1ac20c1cb65d2d803f64f35026c099f3 | Add Event class as well as connect and fire methods. | bsmukasa/stock_alerter | event.py | event.py | class Event:
def __init__(self):
self.listeners = []
def connect(self, listener):
self.listeners.append(listener)
def fire(self, *args, **kwargs):
for listener in self.listeners:
listener(*args, **kwargs) | mit | Python | |
8d38a72548f3bfc62bac9f49d537fa2cdee7a6df | Add vanilla sequential face detection example. | vmlaker/sherlock | face1.py | face1.py | """Sequential, vanilla face detection."""
import datetime
import sys
import cv2
import numpy as np
import util
import cascade
DEVICE = int(sys.argv[1])
WIDTH = int(sys.argv[2])
HEIGHT = int(sys.argv[3])
DURATION = float(sys.argv[4]) # In seconds.
# Create the OpenCV video capture object.
cap = cv2.VideoCapture(DEVICE)
cap.set(3, WIDTH)
cap.set(4, HEIGHT)
# Monitor framerates.
framerate = util.RateTicker((1,5,10))
# Allow view window to be resizeable.
cv2.namedWindow('face detection', cv2.cv.CV_WINDOW_NORMAL)
end = datetime.datetime.now() + datetime.timedelta(seconds=DURATION)
while end > datetime.datetime.now():
hello, image = cap.read()
size = np.shape(image)[:2]
result = list()
for classi in cascade.classifiers:
rects = classi.detectMultiScale(
image,
scaleFactor=1.3,
minNeighbors=3,
minSize=tuple([x/20 for x in size]),
maxSize=tuple([x/2 for x in size]),
)
if len(rects):
for a,b,c,d in rects:
result.append((a,b,c,d, cascade.colors[classi]))
for x1, y1, x2, y2, color in result:
cv2.rectangle(
image,
(x1, y1), (x1+x2, y1+y2),
color=color,
thickness=2,
)
scale = 0.85
for org, text in (
((20, int(30*scale)), '%dx%d'%(size[1], size[0])),
((20, int(60*scale)), '%.2f, %.2f, %.2f'%framerate.tick()),
):
cv2.putText(
image,
text=text,
org=org,
fontFace=cv2.FONT_HERSHEY_SIMPLEX,
fontScale=scale,
color=(0,255,0),
thickness=2,
)
cv2.imshow('face detection', image)
cv2.waitKey(1)
# The end.
| mit | Python | |
78ce22e302d749e56352e6ec8f592dca4e2287f6 | Add IATISerializer | akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr,akvo/akvo-rsr | akvo/api/serializers.py | akvo/api/serializers.py | # -*- coding: utf-8 -*-
# Akvo RSR is covered by the GNU Affero General Public License.
# See more details in the license.txt file located at the root folder of the Akvo RSR module.
# For additional details on the GNU license please see < http://www.gnu.org/licenses/agpl.html >.
from lxml import etree
import os
from tastypie.serializers import Serializer
class IATISerializer(Serializer):
def from_etree(self, data):
""" transform the iati-activity XML into "tastypie compliant" XML using the 'iati-xslt.xml' stylesheet
"""
if data.tag == 'iati-activity':
with open(os.path.join(os.path.dirname(__file__),'xml', 'iati-xslt.xml'), 'r') as f:
iati_xslt = f.read()
etree_xml = etree.XML(iati_xslt)
etree_xslt = etree.XSLT(etree_xml)
tasty_xml = etree_xslt(data)
return self.from_etree(tasty_xml.getroot())
else:
return super(IATISerializer, self).from_etree(data) | agpl-3.0 | Python | |
6cbd414a670e4b25e8e1e8df9c0e32bee541478c | Create ember.py | scardine/django-ember-tag | ember.py | ember.py | from django import template
register = template.Library()
@register.tag(name='linkto')
def do_linkto(parser, token):
nodelist = parser.parse(('endlinkto',))
args = token.split_contents()[1:]
if not args:
raise template.TemplateSyntaxError("{0} tag requires at least one argument".format(token.contents.split()[0]))
parser.delete_first_token()
return LinkToNode(nodelist, *args)
class LinkToNode(template.Node):
def __init__(self, nodelist, *args):
self.args = args
self.nodelist = nodelist
def render(self, context):
output = self.nodelist.render(context)
return "{{#linkTo " + " ".join(self.args) + '}}' + output + "{{/linkTo}}"
@register.tag(name='ember')
def do_ember(parser, token):
tokens = token.split_contents()
args = " ".join(tokens[1:])
#parser.delete_first_token()
return EmberTagNode(args)
class EmberTagNode(template.Node):
def __init__(self, args):
self.args = args
def render(self, context):
return "{{" + self.args + "}}"
| mit | Python | |
6d80c414944bfdd6632b6068d98805a6f67353fb | add test script for interfaces | kellrott/gopy,go-python/gopy,lifei/gopy,go-python/gopy-gen,lifei/gopy,go-python/gopy-gen,kellrott/gopy,go-python/gopy | _examples/iface/test.py | _examples/iface/test.py | # Copyright 2015 The go-python Authors. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
## py2/py3 compat
from __future__ import print_function
import iface
### test docs
print("doc(iface): %r" % (iface.__doc__,))
print("t = iface.T()")
t = iface.T()
print("t.F()")
t.F()
print("iface.CallIface(t)")
iface.CallIface(t)
| bsd-3-clause | Python | |
bb4f81f86d5b7bc5869da9f29cc5ea6013d4b4cf | Add plot speed analysis | bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc,bskari/sparkfun-avc | analysis/plot_speeds.py | analysis/plot_speeds.py | """Plots the speed readings."""
#from dateutil import parser as dateparser
from matplotlib import pyplot
import collections
import datetime
import json
import sys
def main():
"""Main function."""
if sys.version_info.major <= 2:
print('Please use Python 3')
sys.exit(1)
if len(sys.argv) != 2:
print('Usage: {} <log file>'.format(sys.argv[0]))
sys.exit(1)
with open(sys.argv[1]) as file_:
lines = file_.readlines()
first_stamp = timestamp(lines[0])
speeds = collections.defaultdict(lambda: [])
times = collections.defaultdict(lambda: [])
acceleration_times = []
not_moving_times = []
run_times = []
stop_times = []
for line in lines:
if 'speed_m_s' in line:
data = json.loads(line[line.find('{'):])
speeds[data['device_id']].append(data['speed_m_s'])
times[data['device_id']].append(timestamp(line) - first_stamp)
elif 'not moving according' in line:
not_moving_times.append(timestamp(line) - first_stamp)
elif 'Received run command' in line:
run_times.append(timestamp(line) - first_stamp)
elif 'Received stop command' in line or 'No waypoints, stopping' in line:
stop_times.append(timestamp(line) - first_stamp)
for device, speeds in speeds.items():
pyplot.scatter(times[device], speeds)
pyplot.scatter(not_moving_times, [0.25] * len(not_moving_times), marker='x', color='blue')
pyplot.scatter(run_times, [0.3] * len(run_times), marker='x', color='green')
pyplot.scatter(stop_times, [0.35] * len(stop_times), marker='x', color='red')
pyplot.title(device)
pyplot.draw()
pyplot.show()
def timestamp(line):
"""Returns the timestamp of a log line."""
# 2016-08-22 09:57:28,343
year = int(line[0:4])
month = int(line[5:7])
day = int(line[8:10])
hour = int(line[11:13])
minute = int(line[14:16])
seconds = int(line[17:19])
comma = line.find(',')
millis = float(line[comma + 1:line.find(':', comma)])
dt = datetime.datetime(year, month, day, hour, minute, seconds)
return dt.timestamp() + millis / 1000.
if __name__ == '__main__':
main()
| mit | Python | |
d19899981a77faa9c2017b6991c8e0c4ca4b7b97 | Create db_migrate.py | rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python | fade/db_migrate.py | fade/db_migrate.py | #!flask/bin/python
"""
See LICENSE.txt file for copyright and license details.
"""
import imp
from migrate.versioning import api
from app import db
from config import SQLALCHEMY_DATABASE_URI
from config import SQLALCHEMY_MIGRATE_REPO
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
migration = SQLALCHEMY_MIGRATE_REPO + ('/versions/%03d_migration.py' % (v+1))
tmp_module = imp.new_module('old_model')
old_model = api.create_model(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
exec(old_model, tmp_module.__dict__)
script = api.make_update_script_for_model(SQLALCHEMY_DATABASE_URI,
SQLALCHEMY_MIGRATE_REPO,
tmp_module.meta, db.metadata)
open(migration, "wt").write(script)
api.upgrade(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
v = api.db_version(SQLALCHEMY_DATABASE_URI, SQLALCHEMY_MIGRATE_REPO)
print('New migration saved as ' + migration)
print('Current database version: ' + str(v))
| bsd-3-clause | Python | |
1646916ce204b51a2900fd9edd4626e145f0d873 | Create objects_impl.py | spatial-ucsb/ConceptsOfSpatialInformation,liangcun/ConceptsOfSpatialInformation,spatial-ucsb/ConceptsOfSpatialInformation,liangcun/ConceptsOfSpatialInformation,spatial-ucsb/ConceptsOfSpatialInformation,liangcun/ConceptsOfSpatialInformation | CoreConceptsPy/objects_impl.py | CoreConceptsPy/objects_impl.py | from coreconcepts import AObjects
from osgeo import gdal, gdal_array
import ogr, os, osr
from gdalconst import *
import numpy as np
class ArcShpObjects(AObjects):
@staticmethod
def getBounds (obj):
#Get geometery
geom = obj.GetGeometryRef()
env = geom.GetEnvelope()
#Return bounds in form (MinX, MaxX, MinY, MaxY)
return env
@staticmethod
def hasRelation (obj1, obj2, relType):
#Get geometeries
assert relType in ['Intersects','Equals','Disjoint','Touches','Crosses','Within','Contains','Overlaps']
geom1 = obj1.GetGeometryRef()
geom2 = obj2.GetGeometryRef()
#Possible relations are: Intersects, Equals, Disjoint, Touches, Crosses, Within, Contains, Overlaps
if getattr(geom1,relType)(geom2): #getattr is equivalent to geom1.relType
return True
else:
return False
@staticmethod
def getProperty (obj, prop):
#Get index of property - note: index 13 is building name
index = obj.GetFieldIndex(prop)
#Return value as a string
value = obj.GetFieldAsString(index)
return value
| apache-2.0 | Python | |
fa1e8f21347d41712b03817e4fac5a07a1a991eb | Add `btheadphone.py` to find headphones' BTADDR | benizi/dotfiles,benizi/dotfiles,benizi/dotfiles,benizi/dotfiles,benizi/dotfiles,benizi/dotfiles,benizi/dotfiles,benizi/dotfiles | bin/btheadphone.py | bin/btheadphone.py | #!/usr/bin/env python
import dbus
from xml.etree import ElementTree
bluez_bus = 'org.bluez'
bluez_root = '/org/bluez'
intro_iface = 'org.freedesktop.DBus.Introspectable'
props_iface = 'org.freedesktop.DBus.Properties'
dev_iface = 'org.bluez.Device1'
a2dp_uuid = '0000110d-0000-1000-8000-00805f9b34fb'
avrcp_uuid = '0000110e-0000-1000-8000-00805f9b34fb'
svc_audio = int('200000', 16)
dev_major_av = int('400', 16)
dev_minor_headset = int('4', 16)
dev_minor_headphone = int('18', 16)
def btobj(path):
return dbus.SystemBus().get_object(bluez_bus, path)
# from: https://unix.stackexchange.com/a/203678/2582 (kind of)
def kids(obj):
prefix = obj.object_path.rstrip('/')
for node in intro(obj, 'node'):
yield "%s/%s" % (prefix, node)
def interfaces(obj):
return list(intro(obj, 'interface'))
def intro(obj, tag=None):
iface = dbus.Interface(obj, intro_iface)
tree = ElementTree.fromstring(iface.Introspect())
if not tag:
return tree
return [e.attrib['name'] for e in tree if e.tag == tag]
def devprop(obj, name, default=None):
return prop(obj, dev_iface).get(name, default)
def prop(obj, iface, attr=None):
attrs = obj.GetAll(iface, dbus_interface=props_iface)
if attr:
return attrs.get(attr)
return attrs
def uuids(obj):
return devprop(obj, 'UUIDs', [])
def hasbits(n, bits):
return (n & bits) == bits
def is_headphones(obj):
cls = devprop(obj, 'Class', 0)
if not hasbits(cls, svc_audio):
return False
if not hasbits(cls, dev_major_av):
return False
return hasbits(cls, dev_minor_headset) or hasbits(cls, dev_minor_headphone)
bluez = btobj(bluez_root)
hci = btobj(list(kids(bluez))[0])
devs = list(kids(hci))
headphones = [d for d in devs if is_headphones(btobj(d))]
bth = btobj(headphones[0])
print(devprop(bth, 'Address'))
| mit | Python | |
e7b5f2a91cbf55666e29ec6ac4775e8cee1cc574 | Add ob_atom class | tommyogden/maxwellbloch,tommyogden/maxwellbloch | maxwellbloch/ob_atom.py | maxwellbloch/ob_atom.py | # -*- coding: utf-8 -*-
import json
from maxwellbloch import ob_base
class OBAtom(ob_base.OBBase):
def __init__(self, num_states=0, energies=[], decays=[], fields=[]):
self.num_states = num_states
self.energies = energies
self.decays = decays
self.build_fields(fields)
def __repr__(self):
return ("Atom(num_states={0}, " +
"energies={1}, " +
"decays={2}, " +
"fields={3})").format(self.num_states,
self.energies,
self.decays,
self.fields)
def add_field(self, field_dict):
self.fields.append(field.Field(**field_dict))
def build_fields(self, field_dicts):
self.fields = []
for f in field_dicts:
self.add_field(f)
return self.fields
def to_json_str(self):
""" Return a JSON string representation of the Atom object.
Returns:
(string) JSON representation of the Atom object.
"""
json_dict = { "num_states": self.num_states,
"energies": self.energies,
"decays": self.decays,
"fields": [f.__dict__ for f in self.fields] }
return json.dumps(json_dict)
@classmethod
def from_json_str(cls, json_str):
json_dict = json.loads(json_str)
return cls(**json_dict)
def main():
print(OBAtom())
if __name__ == '__main__':
status = main() | mit | Python | |
87790d3a1916a75dd6787f6154e3a76611ec0f0d | Create lc1000.py | FiveEye/ProblemSet,FiveEye/ProblemSet | LeetCode/lc1000.py | LeetCode/lc1000.py | # import heapq
# class Solution:
# def mergeStones(self, stones: List[int], K: int) -> int:
# heapq.heapify(stones)
# ans = 0
# while len(stones) >= K:
# t = 0
# for _ in range(K):
# x = heapq.heappop(stones)
# print(x)
# t += x
# ans += t
# heapq.heappush(stones, t)
# if len(stones) == 1:
# return ans
# return -1
# def find(stones, K):
# ind = 0
# ret = 0
# tmp = 0
# for i in stones[:K]:
# ret += i
# tmp = ret
# for i in range(K, len(stones)):
# tmp -= stones[i-K]
# tmp += stones[i]
# if tmp < ret:
# ret = tmp
# ind = i - K + 1
# return ind, ret
# class Solution:
# def mergeStones(self, stones: List[int], K: int) -> int:
# n = len(stones)
# if K > 2 and n % (K-1) != 1:
# return -1
# ans = 0
# while True:
# ind, ret = find(stones, K)
# ans += ret
# stones = stones[0:ind] + [ret] + stones[ind+K:]
# if len(stones) == 1:
# break
# return ans
# [6, 4, 4, 6]
def createArray(dims) :
if len(dims) == 1:
return [-1 for _ in range(dims[0])]
return [createArray(dims[1:]) for _ in range(dims[0])]
n = 0
sum = []
dp = []
class Solution:
def mergeStones(self, stones: List[int], K: int) -> int:
global n, sum, dp
n = len(stones)
if K > 2 and n % (K - 1) != 1:
return -1
sum = [0 for _ in range(n+1)]
for i in range(1, n+1):
sum[i] = sum[i-1]+stones[i-1]
dp = createArray([n, n])
for i in range(n):
for j in range(K-1):
if i+j >= n:
break
dp[i][i+j] = 0
if i + K - 1 < n:
dp[i][i+K-1] = sum[i+K] - sum[i]
for l in range(K,n):
for i in range(0, n - l):
j = i + l
for k in range(i, j, K-1):
#print(i, j, k, dp[i][k], dp[k+1][j])
if dp[i][j] == -1 or dp[i][j] > dp[i][k] + dp[k+1][j]:
dp[i][j] = dp[i][k] + dp[k+1][j]
if (j - i + 1) % (K - 1) == 1 or K == 2:
dp[i][j] += sum[j+1] - sum[i]
return dp[0][n-1]
| mit | Python | |
0f0d48230c07b7c5ce2f8ecc1138d360c67fa8ce | add fusion seq_concat_fc op test | chengduoZH/Paddle,PaddlePaddle/Paddle,tensor-tang/Paddle,luotao1/Paddle,baidu/Paddle,reyoung/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,QiJune/Paddle,chengduoZH/Paddle,PaddlePaddle/Paddle,PaddlePaddle/Paddle,baidu/Paddle,QiJune/Paddle,baidu/Paddle,reyoung/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,PaddlePaddle/Paddle,luotao1/Paddle,tensor-tang/Paddle,PaddlePaddle/Paddle,tensor-tang/Paddle,luotao1/Paddle,chengduoZH/Paddle,QiJune/Paddle,QiJune/Paddle,reyoung/Paddle,QiJune/Paddle,baidu/Paddle,luotao1/Paddle,baidu/Paddle,luotao1/Paddle,QiJune/Paddle,reyoung/Paddle,reyoung/Paddle,tensor-tang/Paddle,chengduoZH/Paddle,reyoung/Paddle,tensor-tang/Paddle,chengduoZH/Paddle | python/paddle/fluid/tests/unittests/test_fusion_seq_concat_fc_op.py | python/paddle/fluid/tests/unittests/test_fusion_seq_concat_fc_op.py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import unittest
import numpy as np
from op_test import OpTest
from test_fusion_lstm_op import fc, ACTIVATION
def fusion_seqexpand_concat_fc(xs, lod, w, b, fc_act):
T = sum(lod[0])
N = len(lod[0])
num_inputs = len(xs)
D = w.shape[1]
expanded_inputs = [xs[0]]
for i in range(num_inputs - 1):
x = xs[i + 1]
assert x.shape[0] == N
expanded = np.repeat(x, lod[0], axis=0)
assert expanded.shape[0] == T
assert expanded.shape[1] == x.shape[1]
expanded_inputs.append(expanded)
fc_input = np.concatenate(expanded_inputs, axis=1)
assert fc_input.shape[0] == T
assert fc_input.shape[1] == w.shape[0]
fc_out = fc(fc_input, w, b)
fc_out = fc_act(fc_out)
assert fc_out.shape[0] == T
assert fc_out.shape[1] == D
return fc_out
class TestFusionSeqExpandConcatFCOp(OpTest):
def set_conf(self):
pass
def setUp(self):
self.op_type = 'fusion_seq_concat_fc'
self.lod = [[3, 5, 8, 2]]
self.inputs_M = [15, 10, 10]
self.D = 20
self.with_bias = True
self.fc_act = 'relu'
self.set_conf()
T = sum(self.lod[0])
bs = len(self.lod[0])
num_inputs = len(self.inputs_M)
x0 = np.random.normal(size=(T, self.inputs_M[0])).astype('float32')
xs = [x0]
for i in range(num_inputs - 1):
xi = np.random.normal(size=(bs,
self.inputs_M[i + 1])).astype('float32')
xs.append(xi)
# fc weight and bias
w = np.random.normal(size=(sum(self.inputs_M),
self.D)).astype('float32')
b = np.random.normal(size=(
1, self.D)).astype('float32') if self.with_bias else np.zeros(
(1, self.D)).astype('float32')
out = fusion_seqexpand_concat_fc(xs, self.lod, w, b,
ACTIVATION[self.fc_act])
self.inputs = {'X': [(x0, self.lod)], 'FCWeight': w}
normal_lod = [i for i in range(bs + 1)]
for i in range(num_inputs - 1):
self.inputs['X'].append((xs[i + 1], normal_lod))
if self.with_bias:
self.inputs['FCBias'] = b
self.outputs = {'Out': (out, self.lod)}
self.attrs = {'fc_activation': self.fc_act, }
def test_check_output(self):
self.check_output()
class TestFusionSECFCOpNonBias(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.with_bias = False
class TestFusionSECFCOpNonAct(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.fc_act = 'identity'
class TestFusionSECFCOpMD1(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.inputs_M = [3, 4, 2, 1, 5]
self.D = 8
class TestFusionSECFCOpMD2(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.lod = [[5, 6]]
self.inputs_M = [1, 1]
class TestFusionSECFCOpBS1_1(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.lod = [[1]]
self.inputs_M = [3, 4, 2]
class TestFusionSECFCOpBS1_2(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.lod = [[1]]
self.inputs_M = [3, 4]
class TestFusionSECFCOpBS1_3(TestFusionSeqExpandConcatFCOp):
def set_conf(self):
self.lod = [[5]]
self.inputs_M = [6, 3]
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python | |
e6b1a590a47a806e0a717d94c2dc112c7671bb2c | add script to merge Aaron's unWISE astrometric offsets into the WISE tile file | legacysurvey/legacypipe,legacysurvey/legacypipe | py/legacyanalysis/wise-offsets.py | py/legacyanalysis/wise-offsets.py | from astrometry.util.fits import fits_table
'''
This is a little script for merging Aaron's astrometric offsets into our
WISE tile file.
'''
#/project/projectdirs/cosmo/work/wise/outputs/merge/neo4/fulldepth/fulldepth_neo4_index.fits
W = fits_table('legacypipe/data/wise-tiles.fits')
offsets = fits_table('fulldepth_neo4_index.fits')
off1 = offsets[offsets.band == 1]
off2 = offsets[offsets.band == 2]
name_map_1 = dict([(tile,i) for i,tile in enumerate(off1.coadd_id)])
W.crpix_w1 = off1.crpix[np.array([name_map_1[tile] for tile in W.coadd_id])]
ra = off1.ra [np.array([name_map_1[tile] for tile in W.coadd_id])]
dec = off1.dec[np.array([name_map_1[tile] for tile in W.coadd_id])]
diff = np.mean(np.hypot(W.ra - ra, W.dec - dec))
print('Mean difference RA,Dec:', diff)
name_map_2 = dict([(tile,i) for i,tile in enumerate(off2.coadd_id)])
W.crpix_w2 = off2.crpix[np.array([name_map_2[tile] for tile in W.coadd_id])]
ra = off2.ra [np.array([name_map_2[tile] for tile in W.coadd_id])]
dec = off2.dec[np.array([name_map_2[tile] for tile in W.coadd_id])]
diff = np.mean(np.hypot(W.ra - ra, W.dec - dec))
print('Mean difference RA,Dec:', diff)
W.writeto('wise-tiles.fits')
| bsd-3-clause | Python | |
fbebb94cd621f0d7b37dbe46272fe9a09a9905a7 | Add cram package. | LLNL/spack,mfherbst/spack,iulian787/spack,krafczyk/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,iulian787/spack,TheTimmy/spack,krafczyk/spack,mfherbst/spack,lgarren/spack,lgarren/spack,skosukhin/spack,TheTimmy/spack,iulian787/spack,lgarren/spack,lgarren/spack,matthiasdiener/spack,matthiasdiener/spack,tmerrick1/spack,matthiasdiener/spack,skosukhin/spack,mfherbst/spack,TheTimmy/spack,LLNL/spack,mfherbst/spack,tmerrick1/spack,iulian787/spack,EmreAtes/spack,krafczyk/spack,skosukhin/spack,mfherbst/spack,krafczyk/spack,lgarren/spack,skosukhin/spack,tmerrick1/spack,krafczyk/spack,matthiasdiener/spack,EmreAtes/spack,LLNL/spack,EmreAtes/spack,tmerrick1/spack,iulian787/spack,tmerrick1/spack,skosukhin/spack,TheTimmy/spack,TheTimmy/spack,EmreAtes/spack,EmreAtes/spack | var/spack/packages/cram/package.py | var/spack/packages/cram/package.py | from spack import *
class Cram(Package):
"""Cram runs many small MPI jobs inside one large MPI job."""
homepage = "https://github.com/scalability-llnl/cram"
url = "http://github.com/scalability-llnl/cram/archive/v1.0.1.tar.gz"
version('1.0.1', 'c73711e945cf5dc603e44395f6647f5e')
depends_on("mpi")
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
make()
make("install")
| lgpl-2.1 | Python | |
e7a5f511a87ce80b4db9939904163d19e58738c4 | Add send game_version script | innogames/igcollect | game_sent_game_version.py | game_sent_game_version.py | #!/usr/bin/env python
#
# igcollect - Sent currently deployed game version.
#
# Copyright (c) 2016 InnoGames GmbH
#
import os
import re
import socket
# All possible revision files for all games.
revision_file = [
'/www/ds/revision',
'/www/grepo/branch',
'/www/foe/version',
'/www/onyx/branch',
]
if __name__ == '__main__':
# Game name can be found from hostname, I think.
hostname = socket.gethostname()
# Hostname suffix should be the shortcode.
game_shortcode = hostname.split('.')[-1]
# Game market should be first two characters of hostname.
game_market = hostname[:2]
# World name should be first characters followed by numbers.
regex = re.search('^[a-z]+[0-9]+', hostname)
game_worldname = regex.group(0)
# If all goes well, this variable will be set to revision.
revision = None
for filename in revision_file:
if os.path.exists(filename):
with open(filename, 'r') as fh:
revision = fh.readlines()[0]
# For Tribalwars the version starts after the first space
if filename.startswith('/www/ds'):
revision = revision.split(' ')[-1]
# For Elvenar stip the 'v' from the beginning.
if filename.startswith('/www/onyx'):
if revision.startswith('v'):
revision = revision[1:]
if revision:
# Replace all the dots.
revision = revision.replace('.', '_')
# Print data if revision was valid.
print 'games.{}.{}.{}.version.{}'.format(
game_shortcode, game_market, game_worldname, revision)
| mit | Python | |
cad82b50c8115cd92b6c1bbc1c18c9a825bad368 | Add interactive graph with terms | CristianCantoro/thes-discover | graph.py | graph.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import matplotlib
import matplotlib.pyplot as plt
from unicodecsv import UnicodeReader
NEW_ANNOTATIONS = 'new_annotations.map'
def graph():
readlist = []
with open(NEW_ANNOTATIONS, 'r') as infile:
reader = UnicodeReader(infile)
readlist += [re for re in reader]
labels = []
values = []
wikipages = []
for row in readlist:
labels.append(row[1])
wikipages.append(row[2])
values.append(float(row[3]))
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1, axisbg='white')
line, = ax.plot(values, 'ro', picker=5)
def onpick(event):
thisline = event.artist
xdata = thisline.get_xdata()
ydata = thisline.get_ydata()
ind = event.ind
n = int(xdata[ind])
v = ydata[ind]
try:
ax.texts.pop()
except IndexError:
pass
# text = matplotlib.text.Annotation(labels[n], xy=(n, v+0.005))
# # ax.texts.remove(text)
# ax.add_artist(text)
text = labels[n] + '\n' + wikipages[n]
ax.annotate(text, xy=(n+0.005, v+0.005))
print 'onpick points:', zip(xdata[ind], ydata[ind])
fig.canvas.draw()
fig.canvas.mpl_connect('pick_event', onpick)
plt.show()
if __name__ == '__main__':
graph()
| mit | Python | |
408af1320637063bacdd105cf00cdf71ef1ff9b2 | Add labeled implementation | AuthEceSoftEng/rnn2source | multi.py | multi.py | from keras.layers import Input, LSTM, TimeDistributed, Dense, merge, Dropout
from keras.models import Model
vocab_size = 200
seq_len = 25
label_size = 10
batch_size = 50
lstm_size = 512
char_input = Input(batch_shape=(batch_size, seq_len, vocab_size), name='char_input')
label_input = Input(batch_shape=(batch_size, seq_len, label_size), name='label_input')
x = merge([char_input, label_input], mode='concat', concat_axis=-1) # checkif concat actually works as expected
lstm_layer = LSTM(lstm_size, return_sequences=True, stateful=True)(x)
lstm_layer = Dropout(0.2)(lstm_layer)
lstm_layer = LSTM(lstm_size, return_sequences=True, stateful=True)(lstm_layer)
lstm_layer = Dropout(0.2)(lstm_layer)
lstm_layer = LSTM(lstm_size, return_sequences=True, stateful=True)(lstm_layer)
lstm_layer = Dropout(0.2)(lstm_layer)
char_output = TimeDistributed(Dense(vocab_size, activation='softmax'), name='char_output')(lstm_layer)
label_output = TimeDistributed(Dense(label_size, activation='softmax'), name='label_output')(lstm_layer)
model = Model([char_input, label_input], [char_output, label_output])
model.summary()
| mit | Python | |
43fd2b8bd64e684ee47c91f582c8c2578e420105 | Test check_visitors | edx/edx-lint | test/plugins/test_common.py | test/plugins/test_common.py | """Tests of edx_lint/pylint/common.py"""
import pytest
from edx_lint.pylint.common import check_visitors
# pylint: disable=missing-docstring, unused-variable
def test_check_good_visitors():
@check_visitors
class ItsRight(object):
def visit_call(self):
pass # pragma: no cover
def this_isnt_checked(self):
pass # pragma: no cover
def visit_classdef(self):
pass # pragma: no cover
def test_check_bad_visitors():
msg = "Method visit_xyzzy doesn't correspond to a node class"
with pytest.raises(Exception, match=msg):
@check_visitors
class ItsNotRight(object):
def visit_xyzzy(self):
pass # pragma: no cover
| apache-2.0 | Python | |
c841fccb6099fb00b0a66a11c837bf2afd32e2f1 | Create shell_cmd_test.py | stephaneAG/Python_tests,stephaneAG/Python_tests,stephaneAG/Python_tests,stephaneAG/Python_tests | shell_cmd_test.py | shell_cmd_test.py | import subprocess
subprocess.call(['say', ' Hello World from Python.'])
listing_holder = subprocess.call(['ls','-l'])
if listing_holder > 0:
print "here the files listing resulting from the shell command ran from within python: \n %s" %listing_holder
| mit | Python | |
6988dab0256ce6b6e0d5cbb4b3ac06727956ee37 | Create a new file to calculate features from sets of points | shankari/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,shankari/e-mission-server,yw374cornell/e-mission-server,e-mission/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,sunil07t/e-mission-server | emission/analysis/point_features.py | emission/analysis/point_features.py | # Standard imports
import math
import logging
import numpy as np
import emission.core.common as ec
def calSpeed(point1, point2):
distanceDelta = ec.calDistance([point1.mLongitude, point1.mLatitude], [point2.mLongitude, point2.mLatitude])
timeDelta = point2.mTime - point1.mTime
# print "Distance delta = %s and time delta = %s" % (distanceDelta, timeDelta)
# assert(timeDelta != 0)
if (timeDelta == 0):
logging.debug("timeDelta = 0, distanceDelta = %s, returning speed = 0")
assert(distanceDelta < 0.01)
return 0
# TODO: Once we perform the conversions from ms to secs as part of the
# usercache -> timeseries switch, we need to remove this division by 1000
return distanceDelta/(float(timeDelta)/1000)
| bsd-3-clause | Python | |
bef97246f77aaa1030b17e1213850c95786f51d7 | Add state tests | njbbaer/unicorn-remote,njbbaer/unicorn-remote,njbbaer/unicorn-remote | app/tests/test_state.py | app/tests/test_state.py | import unittest
import time
from app.state import state, ProgramNotFound
class TestState(unittest.TestCase):
def tearDown(self):
if state._process is not None:
state._process.terminate()
def test_start_all(self):
programs= ["ascii_text", "cheertree", "cross", "demo", "dna",
"game_of_life", "matrix", "psychedelia", "rain", "rainbow",
"random_blinky", "random_sparkles", "simple", "snow", "trig"]
for program in programs:
with self.subTest(program=program):
r = state.start_program(program)
self.assertTrue(state._process.is_alive())
def test_start_not_found(self):
with self.assertRaises(ProgramNotFound):
state.start_program("does_not_exist")
def test_start_with_good_params(self):
state.start_program("demo", {"brightness": 0.5, "rotation": 0})
self.assertTrue(state._process.is_alive())
def test_start_with_bad_brightness(self):
with self.assertRaises(ValueError):
state.start_program("demo", {"brightness": 1.1})
def test_start_with_bad_rotation(self):
with self.assertRaises(ValueError):
state.start_program("demo", {"rotation": 91})
def test_stop_program(self):
state.start_program("demo")
state.stop_program()
time.sleep(0.1)
self.assertFalse(state._process.is_alive()) | mit | Python | |
54033b438900e17d8e3e852222dc25c981cdb7e7 | add model_helper, forgot to commit it previously | jcfr/mystic,jcfr/mystic | models/_model_helper.py | models/_model_helper.py |
chebyshev2coeffs = [2., 0., -1.]
chebyshev4coeffs = [8., 0., -8., 0., 1.]
chebyshev6coeffs = [32., 0., -48., 0., 18., 0., -1.]
chebyshev8coeffs = [128., 0., -256., 0., 160., 0., -32., 0., 1.]
chebyshev16coeffs = [32768., 0., -131072., 0., 212992., 0., -180224., 0., 84480., 0., -21504., 0., 2688., 0., -128., 0., 1]
def chebyshev(trial, target, M=61):
from mystic.math import polyeval
result=0.0
x=-1.0
dx = 2.0 / (M-1)
for i in range(M):
px = polyeval(trial, x)
if px<-1 or px>1:
result += (1 - px) * (1 - px)
x += dx
px = polyeval(trial, 1.2) - polyeval(target, 1.2)
if px<0: result += px*px
px = polyeval(trial, -1.2) - polyeval(target, -1.2)
if px<0: result += px*px
return result
| bsd-3-clause | Python | |
91bab4277a8875f7248af698773938d54e19724f | Create InputNeuronGroup_Liquid.py | ricardodeazambuja/BrianConnectUDP | examples/InputNeuronGroup_Liquid.py | examples/InputNeuronGroup_Liquid.py | '''
Example of a spike generator (only outputs spikes)
In this example spikes are generated and sent through UDP packages. At the end of the simulation a raster plot of the
spikes is created.
'''
from brian import *
import numpy
from brian_multiprocess_udp import BrianConnectUDP
number_of_neurons_total = 45
number_of_neurons_spiking = 30
def main_NeuronGroup(input_Neuron_Group, simulation_clock):
print "main_NeuronGroup!" #DEBUG!
simclock = simulation_clock
delta_t=5
random_list=numpy.random.randint(number_of_neurons_total,size=number_of_neurons_spiking)
random_list.sort()
spiketimes = [(i, delta_t*ms) for i in random_list]
SpikesOut = SpikeGeneratorGroup(number_of_neurons_total, spiketimes, period=300*ms, clock=simclock) # the maximum clock of the input spikes is limited here (period)
MSpkOut=SpikeMonitor(SpikesOut) # Spikes sent by UDP
return ([SpikesOut],[],[MSpkOut])
def post_simulation_function(input_NG, simulation_NG, simulation_SYN, simulation_MN):
"""
input_NG: the neuron group that receives the input spikes
simulation_NG: the neuron groups list passed to the system by the user function (main_NeuronGroup)
simulation_SYN: the synapses list passed to the system by the user function (main_NeuronGroup)
simulation_MN: the monitors list passed to the system by the user function (main_NeuronGroup)
This way it is possible to plot, save or do whatever you want with these objects after the end of the simulation!
"""
figure()
raster_plot(simulation_MN[0])
title("Spikes Sent by UDP")
show(block=True)
if __name__=="__main__":
my_simulation = BrianConnectUDP(main_NeuronGroup, NumOfNeuronsOutput=number_of_neurons_total, post_simulation_function=post_simulation_function,
output_addresses=[("127.0.0.1", 11111)], simclock_dt=5, TotalSimulationTime=10000, brian_address=0)
| cc0-1.0 | Python | |
a11d33f5e1df23f044cac709ebbbb5d369d0e6ca | Add first test for update_language_list function | caleb531/youversion-suggest,caleb531/youversion-suggest | tests/test_add_language/test_update_language_list.py | tests/test_add_language/test_update_language_list.py | # test_update_language_list
from __future__ import unicode_literals
import json
import os
import os.path
import nose.tools as nose
import yvs.shared as yvs
import utilities.add_language as add_lang
from tests.test_add_language import set_up, tear_down
from tests.test_add_language.decorators import redirect_stdout
@nose.with_setup(set_up, tear_down)
@redirect_stdout
def test_update_languge_list_add(out):
"""should add new languages to language list"""
add_lang.update_language_list('kln', 'Klingon')
langs_path = os.path.join(yvs.PACKAGED_DATA_DIR_PATH, 'languages.json')
with open(langs_path, 'r') as langs_file:
langs = json.load(langs_file)
klingon_lang = None
for lang in langs:
if lang['id'] == 'kln':
klingon_lang = lang
nose.assert_is_not_none(klingon_lang)
nose.assert_equal(klingon_lang['name'], 'Klingon')
| mit | Python | |
bd5fc565c5106d609a7dc65a663515920e29caa4 | Add example of multi-layer chart | jakevdp/altair,altair-viz/altair,ellisonbg/altair | altair/vegalite/v2/examples/multiple_marks.py | altair/vegalite/v2/examples/multiple_marks.py | """
Multiple Marks
==============
This example demonstrates creating a single chart with multiple markers
representing the same data.
"""
import altair as alt
from vega_datasets import data
stocks = data.stocks()
chart = alt.LayerChart(stocks).encode(
x='date:T',
y='price:Q',
color='symbol:N'
).add_layers(
alt.Chart().mark_point(),
alt.Chart().mark_line()
)
| bsd-3-clause | Python | |
41acbf471edce3babeed4a59a7f5f2a923d6fed6 | Create sampe_1.py | jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi,jeonghoonkang/BerePi | apps/mongodb/sampe_1.py | apps/mongodb/sampe_1.py | import pandas as pd
import bson
FILE="/folder/file.bson"
with open(FILE,'rb') as f:
data = bson.decode_all(f.read())
main_df=pd.DataFrame(data)
main_df.describe()
| bsd-2-clause | Python | |
043ec867e3407fad1afa9d767961c92ede3a05c2 | Add initial support for KHR_lights extension | Kupoman/blendergltf | extension_exporters/khr_lights.py | extension_exporters/khr_lights.py | from ..blendergltf import Reference
class KhrLights:
ext_meta = {
'name': 'KHR_lights',
'url': (
'https://github.com/andreasplesch/glTF/blob/ec6f61d73bcd58d59d4a4ea9ac009f973c693c5f/'
'extensions/Khronos/KHR_lights/README.md'
),
'isDraft': True,
}
def export_light(self, light):
def calc_att():
linear_factor = 0
quad_factor = 0
if light.falloff_type == 'INVERSE_LINEAR':
linear_factor = 1 / light.distance
elif light.falloff_type == 'INVERSE_SQUARE':
quad_factor = 1 / light.distance
elif light.falloff_type == 'LINEAR_QUADRATIC_WEIGHTED':
linear_factor = light.linear_attenuation * (1 / light.distance)
quad_factor = light.quadratic_attenuation * (1 / (light.distance * light.distance))
return linear_factor, quad_factor
gltf_light = {}
if light.type == 'SUN':
gltf_light = {
'directional': {
'color': (light.color * light.energy)[:],
},
'type': 'directional',
}
elif light.type == 'POINT':
linear_factor, quad_factor = calc_att()
gltf_light = {
'point': {
'color': (light.color * light.energy)[:],
# TODO: grab values from Blender lamps
'constantAttenuation': 1,
'linearAttenuation': linear_factor,
'quadraticAttenuation': quad_factor,
},
'type': 'point',
}
elif light.type == 'SPOT':
linear_factor, quad_factor = calc_att()
gltf_light = {
'spot': {
'color': (light.color * light.energy)[:],
# TODO: grab values from Blender lamps
'constantAttenuation': 1.0,
'fallOffAngle': 3.14159265,
'fallOffExponent': 0.0,
'linearAttenuation': linear_factor,
'quadraticAttenuation': quad_factor,
},
'type': 'spot',
}
else:
print("Unsupported lamp type on {}: {}".format(light.name, light.type))
gltf_light = {'type': 'unsupported'}
gltf_light['name'] = light.name
return gltf_light
def export(self, state):
state['extensions_used'].append('KHR_lights')
# Export lights
state['output']['extensions'] = state['output'].get('extensions', {})
state['output']['extensions']['KHR_lights'] = {
'lights': [self.export_light(lamp) for lamp in state['input'].get('lamps', [])]
}
# Add light references to nodes
obj_pairs = [
(obj, state['output']['nodes'][state['refmap'][('objects', obj.name)]])
for obj in state['input']['objects']
]
for obj, node in obj_pairs:
if obj.type == 'LAMP':
node['extensions'] = node.get('extensions', {})
ext = node['extensions']['KHR_lights'] = {}
ext['light'] = Reference('lamps', obj.data.name, ext, 'light')
state['references'].append(ext['light'])
| apache-2.0 | Python | |
e7c6a1d5ca6c6ebd85976698e8c00ca761747b59 | ADD FEATURE : simple C/C++ compiler | dgu-dna/DNA-Bot | apps/simple_compiler.py | apps/simple_compiler.py | from apps.decorators import on_command
from apps.slackutils import cat_token
from subprocess import check_output, CalledProcessError, STDOUT
import os
import re
@on_command(['!컴파일'])
def run(robot, channel, tokens, user, command):
'''C, C++, Python 소스 실행시켜드림'''
msg = ''
if len(tokens) < 2:
return channel, '자세한 사용방법은...'
if tokens[0].lower() in ['c', 'c++']:
source = cat_token(tokens, 1)
source = re.sub('&', '&', source)
source = re.sub('<', '<', source)
source = re.sub('>', '>', source)
source = re.sub(r'(#.*>)', r'\1\n', source)
if tokens[0].lower() == 'c':
open(user + '.c', 'w').write(source)
msg += check_output(['gcc', user + '.c', '-o', user + '.out']).decode('utf-8')
os.remove(user + '.c')
else:
open(user + '.cpp', 'w').write(source)
try:
msg += check_output(['g++', '-std=c++11' ,user + '.cpp', '-o', user + '.out'], stderr=STDOUT).decode('utf-8')
except CalledProcessError as e:
msg += e.output.decode('utf-8')
return channel, msg
os.remove(user + '.cpp')
try:
msg += check_output(['./' + user + '.out']).decode('utf-8')
except CalledProcessError as e:
msg += '> :warning: WARNING : Your program returned exit status `' + str(e.args[0]) +'`\n'
msg += e.output.decode('utf-8')
os.remove(user + '.out')
return channel, msg
| mit | Python | |
ccae8514f184d612c515f1c17b8832b89b0982db | Create phylo.py | jdanav/alife | phylo.py | phylo.py | def phy_descend(parent, dictionary, out={}):
if parent not in out:
out[parent] = {}
for i in dictionary.keys():
if dictionary[i] == parent: phy_descend(i, dictionary, out[parent])
return out
def phy_ancestry(child, dictionary, out=[]):
if child in dictionary.keys():
out.append(child)
phy_ancestry(dictionary[child], dictionary, out)
return out
def phy_stratus(dictio, layer=0):
if layer == 0: stratus = dictio.keys()
else:
stratus = []
for i in dictio.keys():
stratus = stratus + phy_stratus(dictio[i], layer - 1)
return stratus
clean = []
def phy_toclean(family, indent=0, preceding='', printed=[]):
if None in family.keys(): family = family[None]
print("|")
if len(family) > 1: preceding += '|'
else: preceding += ''
for parent in family:
clean.append(preceding + '|__' + parent)
phy_toclean(family[parent], indent + 1, preceding + ' ', printed + [parent])
def cleanlines(clean=[]):
clean = clean[::-1]
for n in range(0,len(clean)):
newline = ''
for i in range(len(clean[n])):
tc = clean[n][i]
if clean[n][i:i+2] == '||': tc = ''
if clean[n][i:i+2] == '| ' and \
(len(clean[n-1]) < i or \
(len(clean[n-1]) > i and clean[n-1][i] == ' ')): tc = ' '
else: newline = newline + tc
clean[n] = newline
for i in clean[::-1]: print i
| mit | Python | |
5bed3cf9ec4ccbc94529a4d7b37802f5340803a6 | add utilities for unicode-friendly csv file reading & writing | qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq | dimagi/utils/csv.py | dimagi/utils/csv.py | """
extend csv.writer and csv.reader to support Unicode
from http://docs.python.org/library/csv.html
"""
from __future__ import absolute_import
import csv
import codecs
import cStringIO
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([unicode(s).encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
| bsd-3-clause | Python | |
066ba7f846f9cf32e29e82f6ba90db4f4f27029f | Create superBall.py | NendoTaka/CodeForReference,NendoTaka/CodeForReference,NendoTaka/CodeForReference | CodeWars/8kyu/superBall.py | CodeWars/8kyu/superBall.py | class Ball(object):
def __init__(self, type = "regular"):
self.ball_type = type
| mit | Python | |
8579e10ad13f2a9b5680edfbe185d3e1b597f9c6 | add hello.py | Jessime/Excision,Jessime/Excision,Jessime/Excision | hello.py | hello.py | # -*- coding: utf-8 -*-
"""
Created on Fri Oct 16 20:11:07 2015
@author: jessime
"""
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run() | mit | Python | |
852ecb67e11f4ad9662c832d8be5f5bf1b8327b1 | Add tests for action groups. | geggo/pyface,brett-patterson/pyface,geggo/pyface | pyface/action/tests/test_group.py | pyface/action/tests/test_group.py | from __future__ import absolute_import
from traits.testing.unittest_tools import UnittestTools, unittest
from ...image_cache import ImageCache
from ...window import Window
from ..action import Action
from ..action_item import ActionItem
from ..group import Group
class TestActionItem(unittest.TestCase, UnittestTools):
def setUp(self):
# test whether function is called by updating list
# XXX should really use mock
self.memo = []
def perform():
self.memo.append('called')
self.perform = perform
self.action = Action(name='Test', on_perform=perform)
self.action_item = ActionItem(action=self.action)
def test_init_action_item(self):
group = Group(self.action_item)
self.assertEqual(group.items, [self.action_item])
def test_init_action(self):
group = Group(self.action)
self.assertEqual(len(group.items), 1)
self.assertEqual(group.items[0].action, self.action)
def test_init_callable(self):
group = Group(self.perform)
self.assertEqual(len(group.items), 1)
self.assertEqual(group.items[0].action.on_perform, self.perform)
self.assertEqual(group.items[0].action.name, "Perform")
| bsd-3-clause | Python | |
76f636d38d6f3947efe6d58eacbd655027fc1a0e | Add post-hook to handle the difference between project name and role name. | FGtatsuro/cookiecutter-ansible-role,FGtatsuro/cookiecutter-ansible-role | hooks/post_gen_project.py | hooks/post_gen_project.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
# Use symlink to handle the difference between project name and role name.
os.symlink('../../{{ cookiecutter.role_project_name }}', 'roles/{{ cookiecutter.role_name }}')
| mit | Python | |
fd7949d9fba182b19fc0de3cb32bca142cf8c801 | Add missing file. | ProjetPP/PPP-datamodel-Python,ProjetPP/PPP-datamodel-Python | tests/test_triple_parser.py | tests/test_triple_parser.py | from unittest import TestCase
from ppp_datamodel import Triple, Resource, Missing
from ppp_datamodel.triple_parser import parse_triples
class TripleParserTestCase(TestCase):
def testBasics(self):
self.assertEqual(parse_triples('(foo, (bar, ?, (?, qux, quux)), ?)'), [
Triple(
Resource('foo'),
Triple(
Resource('bar'),
Missing(),
Triple(
Missing(),
Resource('qux'),
Resource('quux')
),
),
Missing()
)])
| agpl-3.0 | Python | |
882b016d7cc084b2b6071bd128c060083d0aecd6 | Create __init__.py | MKLab-ITI/prophet | rdflib/plugins/parsers/__init__.py | rdflib/plugins/parsers/__init__.py | """
"""
| apache-2.0 | Python | |
7022b47ed984d11474b1aa5721099620cc20956c | Fix updating issue with Qtile widget | magus424/powerline,bezhermoso/powerline,seanfisk/powerline,dragon788/powerline,prvnkumar/powerline,IvanAli/powerline,darac/powerline,xxxhycl2010/powerline,dragon788/powerline,magus424/powerline,Luffin/powerline,DoctorJellyface/powerline,s0undt3ch/powerline,firebitsbr/powerline,xfumihiro/powerline,cyrixhero/powerline,areteix/powerline,russellb/powerline,prvnkumar/powerline,wfscheper/powerline,DoctorJellyface/powerline,cyrixhero/powerline,IvanAli/powerline,Liangjianghao/powerline,Luffin/powerline,darac/powerline,Luffin/powerline,EricSB/powerline,lukw00/powerline,EricSB/powerline,xxxhycl2010/powerline,kenrachynski/powerline,firebitsbr/powerline,lukw00/powerline,bartvm/powerline,seanfisk/powerline,darac/powerline,keelerm84/powerline,Liangjianghao/powerline,magus424/powerline,seanfisk/powerline,junix/powerline,QuLogic/powerline,bartvm/powerline,junix/powerline,bezhermoso/powerline,S0lll0s/powerline,bezhermoso/powerline,wfscheper/powerline,areteix/powerline,bartvm/powerline,s0undt3ch/powerline,s0undt3ch/powerline,blindFS/powerline,IvanAli/powerline,firebitsbr/powerline,xfumihiro/powerline,junix/powerline,xfumihiro/powerline,S0lll0s/powerline,dragon788/powerline,cyrixhero/powerline,blindFS/powerline,kenrachynski/powerline,EricSB/powerline,xxxhycl2010/powerline,russellb/powerline,lukw00/powerline,DoctorJellyface/powerline,blindFS/powerline,QuLogic/powerline,keelerm84/powerline,areteix/powerline,S0lll0s/powerline,kenrachynski/powerline,wfscheper/powerline,prvnkumar/powerline,russellb/powerline,QuLogic/powerline,Liangjianghao/powerline | powerline/bindings/qtile/widget.py | powerline/bindings/qtile/widget.py | # -*- coding: utf-8 -*-
from libqtile import bar
from libqtile.widget import base
from powerline.core import Powerline as PowerlineCore
class Powerline(base._TextBox):
def __init__(self, timeout=2, text=" ", width=bar.CALCULATED, **config):
base._TextBox.__init__(self, text, width, **config)
self.timeout_add(timeout, self.update)
self.powerline = PowerlineCore(ext='wm', renderer_module='pango_markup')
def update(self):
if not self.configured:
return True
self.text = self.powerline.renderer.render(side='right')
self.bar.draw()
return True
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
base._TextBox._configure(self, qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
| # -*- coding: utf-8 -*-
from libqtile import bar
from libqtile.widget import base
from powerline.core import Powerline as PowerlineCore
class Powerline(base._TextBox):
def __init__(self, timeout=2, text=" ", width=bar.CALCULATED, **config):
base._TextBox.__init__(self, text, width, **config)
self.timeout_add(timeout, self.update)
self.powerline = PowerlineCore(ext='wm', renderer_module='pango_markup')
def update(self):
self.text = self.powerline.renderer.render(side='right')
self.bar.draw()
def cmd_update(self, text):
self.update(text)
def cmd_get(self):
return self.text
def _configure(self, qtile, bar):
base._TextBox._configure(self, qtile, bar)
self.layout = self.drawer.textlayout(
self.text,
self.foreground,
self.font,
self.fontsize,
self.fontshadow,
markup=True)
| mit | Python |
aa9160875482debdf6eacc0cbfa6ca1906cf5cb1 | Add a script to run tests in a virtual machine | rvykydal/blivet,jkonecny12/blivet,AdamWill/blivet,vojtechtrefny/blivet,jkonecny12/blivet,vojtechtrefny/blivet,AdamWill/blivet,rvykydal/blivet | tests/vmtests/runvmtests.py | tests/vmtests/runvmtests.py | import argparse
import libvirt
import paramiko
import sys
import time
from contextlib import contextmanager
TESTS = ["tests.vmtests.blivet_reset_vmtest.LVMTestCase",
"tests.vmtests.blivet_reset_vmtest.LVMSnapShotTestCase",
"tests.vmtests.blivet_reset_vmtest.LVMThinpTestCase",
"tests.vmtests.blivet_reset_vmtest.LVMThinSnapShotTestCase",
"tests.vmtests.blivet_reset_vmtest.LVMRaidTestCase",
"tests.vmtests.blivet_reset_vmtest.MDRaid0TestCase",
"tests.vmtests.blivet_reset_vmtest.LVMOnMDTestCase"]
SNAP_NAME = "snapshot"
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--repo", type=str, help="Git repo with tests", required=True)
parser.add_argument("--branch", type=str, help="Git branch to test", required=True)
parser.add_argument("--connection", type=str, help="Libvirt connection URI", required=True)
parser.add_argument("--name", type=str, help="Name of the virtual machine", required=True)
parser.add_argument("--ip", type=str, help="IP adress of the virtual machine", required=True)
parser.add_argument("--passphrase", type=str, help="Root passphrase for the virtual machine", required=True)
args = parser.parse_args()
return args
@contextmanager
def virtual_machine(cmd_args):
try:
conn = libvirt.open(cmd_args.connection)
except libvirt.libvirtError as e:
raise RuntimeError("Failed to open connection:\n%s", str(e))
try:
dom = conn.lookupByName(cmd_args.name)
except libvirt.libvirtError:
raise RuntimeError("Virtual machine %s not found", cmd_args.name)
snapshots = dom.snapshotListNames()
if SNAP_NAME in snapshots:
try:
snap = dom.snapshotLookupByName(SNAP_NAME)
snap.delete()
except libvirt.libvirtError as e:
raise RuntimeError("Failed to delete snapshot:\n %s", str(e))
# start the VM
try:
dom.create()
except libvirt.libvirtError as e:
raise RuntimeError("Failed to start virtual machine:%s", str(e))
# wait for virtual machine to boot and create snapshot
time.sleep(120)
with ssh_connection(cmd_args):
try:
snap_xml = "<domainsnapshot><name>%s</name></domainsnapshot>" % SNAP_NAME
dom.snapshotCreateXML(snap_xml)
except libvirt.libvirtError as e:
raise RuntimeError("Failed to create snapshot:\n%s.", str(e))
yield dom
# stop the VM
try:
dom.destroy()
except libvirt.libvirtError as e:
raise RuntimeError("Failed to stop virtual machine:%s", str(e))
# remove the snapshot
try:
snap = dom.snapshotLookupByName(SNAP_NAME)
snap.delete()
except libvirt.libvirtError as e:
raise RuntimeError("Failed to delete snapshot:\n %s", str(e))
@contextmanager
def ssh_connection(cmd_args):
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
try:
ssh.connect(cmd_args.ip, username="root", password=cmd_args.passphrase)
except paramiko.AuthenticationException:
raise RuntimeError("Authentication failed while trying to connect to virtual machine.")
yield ssh
ssh.close()
def run_tests(cmd_args):
""" Run tests in the VM
:param cmd_args: parsed args from command line
"""
with virtual_machine(cmd_args) as virt:
num_errors = 0
for test in TESTS:
with ssh_connection(cmd_args) as ssh:
_stdin, stdout, stderr = ssh.exec_command("git clone %s" % cmd_args.repo)
if stdout.channel.recv_exit_status() != 0:
raise RuntimeError("Failed to clone test repository.")
cmd = "export VM_ENVIRONMENT=1 && cd blivet && git checkout %s && \
PYTHONPATH=. python3 -m unittest %s" % (cmd_args.branch, test)
_stdin, stdout, stderr = ssh.exec_command(cmd)
print(stdout.read().decode("utf-8"))
print(stderr.read().decode("utf-8"))
ret = stdout.channel.recv_exit_status()
if ret != 0:
num_errors += 1
try:
snap = virt.snapshotLookupByName(SNAP_NAME)
virt.revertToSnapshot(snap)
except libvirt.libvirtError as e:
raise RuntimeError("Failed to revert to snapshot:\n %s", str(e))
print("================================")
print("Ran %d tests. %d failures/errors." % (len(TESTS), num_errors))
print("================================")
return 0 if num_errors == 0 else 1
def main():
cmd_args = parse_args()
ret = run_tests(cmd_args)
sys.exit(ret)
if __name__ == "__main__":
main()
| lgpl-2.1 | Python | |
e3793a076606e8200a38cb9504f04730470bd741 | Create Source.py | prakharmohan0809/OpenCV-Upper-Body-Python | Source.py | Source.py | import numpy as np
import cv2
vid=cv2.VideoCapture(0)
_, instant=vid.read()
avg=np.float32(instant)
file=cv2.CascadeClassifier("C:\opencv2.4.6\data\haarcascades\haarcascade_mcs_upperbody.xml")
obj=0
while(1):
_,frame=vid.read()
cv2.accumulateWeighted(frame, avg, 0.1)
background=cv2.convertScaleAbs(avg)
diff=cv2.absdiff(frame, background)
cv2.imshow("input", frame)
bodies=file.detectMultiScale(frame)
for body in bodies:
cv2.rectangle(frame, (body[0], body[1]), (body[0]+body[2], body[0]+body[3]), (255,0,0), 3)
cv2.imshow("Upper Body", frame)
if cv2.waitKey(5)==27:
break
cv2.destroyAllWindows()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.