commit
stringlengths
40
40
old_file
stringlengths
4
236
new_file
stringlengths
4
236
old_contents
stringlengths
1
3.26k
new_contents
stringlengths
16
4.43k
subject
stringlengths
16
624
message
stringlengths
17
3.29k
lang
stringclasses
5 values
license
stringclasses
13 values
repos
stringlengths
5
91.5k
27c3972a57e09faf35f86b82b35eb815dadc4688
mediachain/reader/dynamo.py
mediachain/reader/dynamo.py
import boto3 def get_table(name): dynamo = boto3.resource('dynamo') return dynamo.Table(name) def get_object(reference): table = get_table('mediachain') obj = table.get_item(Key={'multihash': reference}) byte_string = obj['Item']['data'] if byte_string is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) return cbor.loads(byte_string)
import boto3 import cbor def get_table(name): dynamo = boto3.resource('dynamodb', endpoint_url='http://localhost:8000', region_name='us-east-1', aws_access_key_id='', aws_secret_access_key='') return dynamo.Table(name) def get_object(reference): table = get_table('Mediachain') obj = table.get_item(Key={'multihash': reference}) if obj is None: raise KeyError('Could not find key <%s> in Dynamo'.format(reference)) byte_string = obj['Item']['data'].value return cbor.loads(byte_string)
Make get_object pull appropriate fields
Make get_object pull appropriate fields Temporarily set up dynamo to work internally
Python
mit
mediachain/mediachain-client,mediachain/mediachain-client
3f764874dbb805d661d38719bb4e78b6a52f9f79
parser/timestamp.py
parser/timestamp.py
from datetime import datetime DT_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" def get_timestamp(): """ Serialize actual datetime provided as simplified ISO 8601 (without timezone) string :type datetime: datetime :param datetime: datetime object to convert to string :return: serialized datetime :rtype: str """ return datetime.now().strftime(DT_FORMAT)
from datetime import datetime DT_FORMAT = "%Y-%m-%dT%H:%M:%S.%f" def get_timestamp(): """ Serialize actual datetime provided as simplified ISO 8601 (without timezone) string :return: serialized datetime :rtype: str """ return datetime.now().strftime(DT_FORMAT)
Remove docs for non-existing function parameters
Remove docs for non-existing function parameters
Python
mit
m4tx/techswarm-receiver
9c36417af3364b77853b62d9a924d5693e44dce0
fabfile.py
fabfile.py
# -*- coding: UTF-8 -*- from fabric.api import * #def dev() #def prod() #def setup_host() def hello(): print("Hello world!") def clean_db(): local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json") def test(): local("python manage.py test") def clean_test(): clean_db() test()
# -*- coding: UTF-8 -*- from fabric.api import * def clean_db(): local("rm -rf database/fst_demo.db;python manage.py syncdb --noinput;python manage.py loaddata fs_doc/fixtures/exempeldata.json") def test(): local("python manage.py test") def clean_test(): clean_db() test()
Remove noob fab method :-)
Remove noob fab method :-)
Python
bsd-3-clause
rinfo/fst,kamidev/autobuild_fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst,kamidev/autobuild_fst,rinfo/fst
c94598b8ce59b98213367b54164b1051d56a28da
scene.py
scene.py
import bpy class Scene: """Scene object""" def __init__(self, filepath, render_engine='CYCLES'): self.filepath = filepath self.render_engine = 'CYCLES' def setup(self): self._cleanup() bpy.context.scene.render.filepath = self.filepath bpy.context.scene.render.engine = self.render_engine bpy.context.scene.frame_start = 1 bpy.context.scene.frame_end = 1 def render(self): bpy.ops.render.render(animation=True) def _cleanup(self): """Delete everything""" bpy.ops.object.delete(use_global=False)
import bpy class Scene: """Scene object""" def __init__(self, filepath, render_engine='CYCLES'): self.filepath = filepath self.render_engine = 'CYCLES' def setup(self): self._cleanup() bpy.context.scene.render.filepath = self.filepath bpy.context.scene.render.engine = self.render_engine bpy.context.scene.frame_start = 1 bpy.context.scene.frame_end = 1 def render(self, samples=50): bpy.context.scene.cycles.samples = samples bpy.ops.render.render(animation=True) def _cleanup(self): """Delete everything""" bpy.ops.object.delete(use_global=False)
Allow to set render samples
Allow to set render samples
Python
mit
josuemontano/blender_wrapper
de3f474502e781dacc0b182ee8d50d729468c576
setup.py
setup.py
from distutils.core import setup setup( name = 'pycolors2', py_modules = ['colors',], version = '0.0.3', author = 'Chris Gilmer', author_email = 'chris.gilmer@gmail.com', maintainer = 'Chris Gilmer', maintainer_email = 'chris.gilmer@gmail.com', url = 'http://github.com/chrisgilmerproj/pycolors2', license = 'MIT license', description = """ Tool to color code python output """, long_description = open('README.markdown').read(), requires = [], classifiers = ( 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System', 'Topic :: Terminals', 'Topic :: Utilities', ), )
from distutils.core import setup setup( name = 'pycolors2', py_modules = ['colors',], version = '0.0.3', author = 'Chris Gilmer', author_email = 'chris.gilmer@gmail.com', maintainer = 'Chris Gilmer', maintainer_email = 'chris.gilmer@gmail.com', url = 'http://github.com/chrisgilmerproj/pycolors2', license = 'MIT license', description = """ Tool to color code python output """, long_description = open('README.markdown').read(), requires = [], classifiers = ( 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: Developers', 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Operating System :: POSIX :: Linux', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.2', 'Programming Language :: Python :: 3.3', 'Topic :: Software Development :: Libraries :: Python Modules', 'Topic :: System', 'Topic :: Terminals', 'Topic :: Utilities', ), )
Add trove classifiers for language support
Add trove classifiers for language support
Python
mit
chrisgilmerproj/pycolors2
d73ff1c66925613646495a22018e8c8a6ce139a7
setup.py
setup.py
#!/usr/bin/env python from distutils.core import setup, Command from distutils.extension import Extension import os import numpy as np from Cython.Distutils import build_ext os.environ['TEST_DATA_ROOT'] = os.path.abspath("tests/data") class CramTest(Command): user_options = [ ] def initialize_options(self): self._dir = os.getcwd() def finalize_options(self): pass def run(self): import cram import sys test_root = os.path.abspath("tests/cram") tests = [os.path.join(test_root, test) for test in os.listdir("tests/cram")] sys.exit(cram.main(tests)) setup( name="15puzz", version="0.0.0", description="15 Puzzle Game", author="Eric Chlebek", author_email="echlebek@gmail.com", packages=["fifteen"], #scripts=["scripts/15puzz"], test_suite="tests.unit", cmdclass={"cram": CramTest, "build_ext": build_ext}, ext_modules=[ Extension("_c15", ["fifteen/_c15.pyx"], [np.get_include()]) ] )
#!/usr/bin/env python from distutils.core import setup, Command from distutils.extension import Extension import os import numpy as np from Cython.Distutils import build_ext from unittest import TextTestRunner, TestLoader os.environ['TEST_DATA_ROOT'] = os.path.abspath("tests/data") class UnitTest(Command): def run(self): import tests.unit.test_15p loader = TestLoader() t = TextTestRunner() t.run(loader.loadTestsFromModule(tests.unit.test_15p)) user_options = [] def initialize_options(self): self._dir = os.getcwd() def finalize_options(self): pass class CramTest(Command): user_options = [ ] def initialize_options(self): self._dir = os.getcwd() def finalize_options(self): pass def run(self): import cram import sys test_root = os.path.abspath("tests/cram") tests = [os.path.join(test_root, test) for test in os.listdir("tests/cram")] sys.exit(cram.main(tests)) setup( name="15puzz", version="0.0.0", description="15 Puzzle Game", author="Eric Chlebek", author_email="echlebek@gmail.com", packages=["fifteen"], #scripts=["scripts/15puzz"], cmdclass={"cram": CramTest, "build_ext": build_ext, "test": UnitTest}, ext_modules=[ Extension("_c15", ["fifteen/_c15.pyx"], [np.get_include()]) ] )
Fix broken unit test command.
Fix broken unit test command.
Python
mit
echlebek/15puzz,echlebek/15puzz
df0efa079afbda84b1c09bc4895c84c0ec70861d
setup.py
setup.py
import importlib from cx_Freeze import setup, Executable backend_path = importlib.import_module("bcrypt").__path__[0] backend_path = backend_path.replace("bcrypt", ".libs_cffi_backend") # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "include_files": [ ("client/dist", "client"), "LICENSE", "templates", "readme.md", (backend_path, "lib/.libs_cffi_backend") ], "includes": [ "cffi", "numpy", "numpy.core._methods", "numpy.lib", "numpy.lib.format", "raven.processors" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "cffi", "idna", "motor", "packaging", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] classifiers=[ "Programming Language :: Python :: 3.7" ] importlib.import_module("virtool") setup(name='virtool', executables=executables, options=options, classifiers=classifiers, python_requires=">=3.6")
import importlib from cx_Freeze import setup, Executable backend_path = importlib.import_module("bcrypt").__path__[0] backend_path = backend_path.replace("bcrypt", ".libs_cffi_backend") # Dependencies are automatically detected, but it might need # fine tuning. build_exe_options = { "include_files": [ ("client/dist", "client"), "LICENSE", "templates", "readme.md", (backend_path, "lib/.libs_cffi_backend") ], "includes": [ "cffi", "numpy", "numpy.core._methods", "numpy.lib", "numpy.lib.format", "raven.processors" ], "packages": [ "_cffi_backend", "appdirs", "asyncio", "bcrypt", "cffi", "idna", "motor", "packaging", "ssl", "uvloop" ] } options = { "build_exe": build_exe_options } executables = [ Executable('run.py', base="Console") ] classifiers=[ "Programming Language :: Python :: 3.7" ] importlib.import_module("virtool") setup(name='virtool', executables=executables, options=options, classifiers=classifiers, python_requires=">=3.6")
Add SSL to cx-freeze packages
Add SSL to cx-freeze packages
Python
mit
igboyes/virtool,virtool/virtool,igboyes/virtool,virtool/virtool
720bd5b2f3c422af6bd1c70850fb2f67a773def0
slack.py
slack.py
#!/usr/bin/env python import requests import calendar from datetime import datetime, timedelta from settings import _token, _domain, _user, _time, _pretty if __name__ == '__main__': while 1: files_list_url = 'https://slack.com/api/files.list' date = str(calendar.timegm((datetime.now() + timedelta(-_time)).utctimetuple())) data = {"token": _token, "ts_to": date, "user": _user} response = requests.post(files_list_url, data=data) if len(response.json()["files"]) == 0: break for f in response.json()["files"]: print("Deleting file " + f["name"] + "...") timestamp = str(calendar.timegm(datetime.now().utctimetuple())) delete_url = "https://" + _domain + ".slack.com/api/files.delete?t=" + timestamp requests.post(delete_url, data={ "token": _token, "file": f["id"], "set_active": "true", "_attempts": "1"}) print("DONE!")
#!/usr/bin/env python import requests import calendar from datetime import datetime, timedelta from settings import _token, _domain, _user, _time, _pretty def delete_my_files(): while 1: files_list_url = 'https://slack.com/api/files.list' date = str(calendar.timegm((datetime.now() + timedelta(-_time)).utctimetuple())) data = {"token": _token, "ts_to": date, "user": _user} response = requests.post(files_list_url, data=data) if len(response.json()["files"]) == 0: break for f in response.json()["files"]: print("Deleting file " + f["name"] + "...") timestamp = str(calendar.timegm(datetime.now().utctimetuple())) delete_url = "https://" + _domain + ".slack.com/api/files.delete?t=" + timestamp requests.post(delete_url, data={ "token": _token, "file": f["id"], "set_active": "true", "_attempts": "1"}) print("DONE!") if __name__ == '__main__': delete_my_files()
Move the main items into a function
Move the main items into a function
Python
mit
marshallhumble/slack_app,marshallhumble/slack_app
16516b1ec44e3e44d2dc96a6f3d021268ce4e71d
osgtest/tests/test_84_xrootd.py
osgtest/tests/test_84_xrootd.py
import os import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest import unittest class TestStopXrootd(osgunittest.OSGTestCase): def test_01_stop_xrootd(self): if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True): files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd") files.restore('/etc/xrootd/auth_file',"xrootd") files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd") core.skip_ok_unless_installed('xrootd') self.skip_ok_if(['xrootd.started-server'] == False, 'did not start server') command = ('service', 'xrootd', 'stop') stdout, _, fail = core.check_system(command, 'Stop Xrootd server') self.assert_(stdout.find('FAILED') == -1, fail) self.assert_(not os.path.exists(core.config['xrootd.pid-file']), 'Xrootd server PID file still present')
import os import osgtest.library.core as core import osgtest.library.files as files import osgtest.library.osgunittest as osgunittest import unittest class TestStopXrootd(osgunittest.OSGTestCase): def test_01_stop_xrootd(self): if (core.config['xrootd.gsi'] == "ON") and (core.state['xrootd.backups-exist'] == True): files.restore('/etc/xrootd/xrootd-clustered.cfg',"xrootd") files.restore('/etc/xrootd/auth_file',"xrootd") files.restore('/etc/grid-security/xrd/xrdmapfile',"xrootd") core.skip_ok_unless_installed('xrootd') self.skip_ok_if(core.state['xrootd.started-server'] == False, 'did not start server') command = ('service', 'xrootd', 'stop') stdout, _, fail = core.check_system(command, 'Stop Xrootd server') self.assert_(stdout.find('FAILED') == -1, fail) self.assert_(not os.path.exists(core.config['xrootd.pid-file']), 'Xrootd server PID file still present')
Fix test if server started in xrootd cleanup code
Fix test if server started in xrootd cleanup code git-svn-id: 884a03e47e2adb735d896e55bb5ad6bc3421ba19@17920 4e558342-562e-0410-864c-e07659590f8c
Python
apache-2.0
efajardo/osg-test,efajardo/osg-test
75b02b3cafcc34dca143de9143f14c3c7c29c97c
transmutagen/tests/test_coefficients.py
transmutagen/tests/test_coefficients.py
import pytest slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) TOTAL_DEGREES = 30 from .crv_coeffs import coeffs as correct_coeffs from ..cram import get_CRAM_from_cache, CRAM_coeffs # @slow @pytest.mark.parametrize('degree', range(1, TOTAL_DEGREES+1)) def test_coefficients(degree): generated_coeffs = {} expr = get_CRAM_from_cache(degree, 200) generated_coeffs[degree] = CRAM_coeffs(expr, 20, decimal_rounding=True) # pytest won't show the full expr from the assert, so we print it too print(expr) assert generated_coeffs[degree] == correct_coeffs[degree], expr
import decimal import pytest from sympy import re slow = pytest.mark.skipif( not pytest.config.getoption("--runslow"), reason="need --runslow option to run" ) TOTAL_DEGREES = 30 from .crv_coeffs import coeffs as correct_coeffs from .partfrac_coeffs import part_frac_coeffs from ..cram import get_CRAM_from_cache, CRAM_coeffs from ..partialfrac import thetas_alphas # @slow @pytest.mark.parametrize('degree', range(1, TOTAL_DEGREES+1)) def test_coefficients(degree): generated_coeffs = {} expr = get_CRAM_from_cache(degree, 200) generated_coeffs[degree] = CRAM_coeffs(expr, 20, decimal_rounding=True) # pytest won't show the full expr from the assert, so we print it too print(expr) assert generated_coeffs[degree] == correct_coeffs[degree], expr @pytest.mark.xfail @pytest.mark.parametrize('degree', [14, 16]) def test_partial_fraction_coefficients(degree): generated_coeffs = {} expr = get_CRAM_from_cache(degree, 200) thetas, alphas, alpha0 = thetas_alphas(expr, 200) format_str = '{:.19e}' correct_coeffs = part_frac_coeffs[degree] # Thetas in the paper are negative what we have thetas = [-i for i in thetas] for theta, real_theta, imag_theta in zip(sorted(thetas, key=re), correct_coeffs['thetas']['real'], correct_coeffs['thetas']['imaginary']): real, imag = theta.as_real_imag() assert format_str.format(decimal.Decimal(repr(real))) == real_theta assert format_str.format(decimal.Decimal(repr(imag))) == imag_theta assert generated_coeffs[degree] == correct_coeffs[degree], expr
Add test against Pusa coefficients (skipped for now, as they don't pass)
Add test against Pusa coefficients (skipped for now, as they don't pass)
Python
bsd-3-clause
ergs/transmutagen,ergs/transmutagen
f468a26893c44411dc1f865b208788373f993918
asciibooth/camera.py
asciibooth/camera.py
import io # import time import picamera from . import config class Camera: def __init__(self): self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION) self.preview_alpha = 200 def capture(self): stream = io.BytesIO() self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE) stream.seek(0) return stream def toggle_preview(self): if self.camera.preview is None: self.camera.start_preview() self.camera.preview.alpha = self.preview_alpha else: self.camera.stop_preview() def start_preview(self, alpha=255): self.camera.start_preview() self.camera.preview.alpha = alpha def stop_preview(self): self.camera.stop_preview() def stop(self): self.camera.close()
import io # import time import picamera from . import config class Camera: def __init__(self): self.camera = picamera.PiCamera(resolution=config.CAPTURE_RESOLUTION) self.camera.hflip = True self.preview_alpha = 200 def capture(self): stream = io.BytesIO() self.camera.capture(stream, 'rgb', resize=config.CAPTURE_RESIZE) stream.seek(0) return stream def toggle_preview(self): if self.camera.preview is None: self.camera.start_preview(hflip = True) self.camera.preview.alpha = self.preview_alpha else: self.camera.stop_preview() def start_preview(self, alpha=255): self.camera.start_preview() self.camera.preview.alpha = alpha def stop_preview(self): self.camera.stop_preview() def stop(self): self.camera.close()
Enable hflip for capture and preview
Enable hflip for capture and preview
Python
cc0-1.0
jnv/asciibooth,jnv/asciibooth
6f213f17fab236e1222f4e691015dfd867073ae2
dbaas/workflow/steps/build_database.py
dbaas/workflow/steps/build_database.py
# -*- coding: utf-8 -*- import logging from base import BaseStep from logical.models import Database LOG = logging.getLogger(__name__) class BuildDatabase(BaseStep): def __unicode__(self): return "Creating logical database..." def do(self, workflow_dict): try: if not workflow_dict['team'] or not workflow_dict['description'] or not workflow_dict['databaseinfra']: return False LOG.info("Creating Database...") database = Database.provision(name= workflow_dict['name'], databaseinfra= workflow_dict['databaseinfra']) database.team = workflow_dict['team'] database.project = workflow_dict['project'] database.description = workflow_dict['description'] database.save() workflow_dict['database'] = database return True except Exception, e: print e return False def undo(self, workflow_dict): try: LOG.info("Destroying the database....") workflow_dict['database'].delete() return True except Exception, e: print e return False
# -*- coding: utf-8 -*- import logging from base import BaseStep from logical.models import Database LOG = logging.getLogger(__name__) class BuildDatabase(BaseStep): def __unicode__(self): return "Creating logical database..." def do(self, workflow_dict): try: if not workflow_dict['team'] or not workflow_dict['description'] or not workflow_dict['databaseinfra']: return False LOG.info("Creating Database...") database = Database.provision(name= workflow_dict['name'], databaseinfra= workflow_dict['databaseinfra']) workflow_dict['database'] = database database.team = workflow_dict['team'] if 'project' in workflow_dict: database.project = workflow_dict['project'] database.description = workflow_dict['description'] database.save() return True except Exception, e: print e return False def undo(self, workflow_dict): try: LOG.info("Destroying the database....") workflow_dict['database'].delete() return True except Exception, e: print e return False
Check if there is an project key on workflow_dict
Check if there is an project key on workflow_dict
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
84f9c189f62c6ab81de952cb9a7e9942237465ec
tasks.py
tasks.py
from invoke import task, Collection from invocations.packaging import release # TODO: once this stuff is stable and I start switching my other projects to be # pytest-oriented, move this into invocations somehow. @task def test(c): """ Run verbose pytests. """ c.run("pytest --verbose --color=yes") @task def coverage(c, html=True): """ Run coverage with coverage.py. """ # NOTE: this MUST use coverage itself, and not pytest-cov, because the # latter is apparently unable to prevent pytest plugins from being loaded # before pytest-cov itself is able to start up coverage.py! The result is # that coverage _always_ skips over all module level code, i.e. constants, # 'def' lines, etc. Running coverage as the "outer" layer avoids this # problem, thus no need for pytest-cov. # NOTE: this does NOT hold true for NON-PYTEST code, so # pytest-relaxed-USING modules can happily use pytest-cov. c.run("coverage run --source=pytest_relaxed -m pytest") if html: c.run("coverage html") c.run("open htmlcov/index.html") ns = Collection( coverage, test, packaging=release, ) ns.configure({ })
from invoke import task, Collection from invocations.packaging import release from invocations import pytest as pytests @task def coverage(c, html=True): """ Run coverage with coverage.py. """ # NOTE: this MUST use coverage itself, and not pytest-cov, because the # latter is apparently unable to prevent pytest plugins from being loaded # before pytest-cov itself is able to start up coverage.py! The result is # that coverage _always_ skips over all module level code, i.e. constants, # 'def' lines, etc. Running coverage as the "outer" layer avoids this # problem, thus no need for pytest-cov. # NOTE: this does NOT hold true for NON-PYTEST code, so # pytest-relaxed-USING modules can happily use pytest-cov. c.run("coverage run --source=pytest_relaxed -m pytest") if html: c.run("coverage html") c.run("open htmlcov/index.html") ns = Collection( coverage, pytests.test, packaging=release, )
Use new invocations pytest helper
Use new invocations pytest helper
Python
bsd-2-clause
bitprophet/pytest-relaxed
87ea6fbc07c547a0c92f0b85811edc0645cb4303
pysyte/oss/linux.py
pysyte/oss/linux.py
"""Linux-specific code""" import os from pysyte.types import paths def xdg_home(): """path to $XDG_CONFIG_HOME >>> assert xdg_home() == os.path.expanduser('~/.config') """ return paths.environ_path('XDG_CONFIG_HOME', '~/.config') def xdg_home_config(filename): """path to that file in $XDG_CONFIG_HOME >>> assert xdg_home_config('fred') == os.path.expanduser('~/.config/fred') """ return xdg_home() / filename def xdg_dirs(): """paths in $XDG_CONFIG_DIRS""" return paths.environ_paths('XDG_CONFIG_DIRS') def xdg_homes(): return [xdg_home()] bash_paste = 'xclip -selection clipboard' bash_copy = 'xclip -selection clipboard -o'
"""Linux-specific code""" from pysyte.types import paths def xdg_home(): """path to $XDG_CONFIG_HOME >>> assert xdg_home() == paths.path('~/.config').expand() """ return paths.environ_path('XDG_CONFIG_HOME', '~/.config') def xdg_home_config(filename): """path to that file in $XDG_CONFIG_HOME >>> assert xdg_home_config('fred') == paths.path('~/.config/fred').expand() """ return xdg_home() / filename def xdg_dirs(): """paths in $XDG_CONFIG_DIRS""" return paths.environ_paths('XDG_CONFIG_DIRS') def xdg_homes(): return [xdg_home()] bash_paste = 'xclip -selection clipboard' bash_copy = 'xclip -selection clipboard -o'
Remove unused import of "os"
Remove unused import of "os"
Python
mit
jalanb/dotsite
b12418acf3883024be965f42ec8d3a16e76d384f
special/special_relativity.py
special/special_relativity.py
# -*- coding: utf-8 -*- from __future__ import division import math class LorentzFactor(object): SPEED_OF_LIGHT = 299792458 @staticmethod def get_beta(velocity, is_percent): if is_percent: return velocity return velocity / SPEED_OF_LIGHT @staticmethod def lorentz_factor(time, velocity, is_percent): beta = LorentzFactor.get_beta(velocity, is_percent) return time / (math.sqrt(1 - beta ** 2)) class TimeDilation(LorentzFactor): @staticmethod def get_proper_time(time, velocity, is_percent=True): return time * TimeDilation.lorentz_factor(time, velocity, is_percent) @staticmethod def get_time_relative_ex_observer(time, velocity, is_percent=True): """ Dilation relative to an external observer """ return time ** 2 / TimeDilation.lorentz_factor(time, velocity, is_percent)
# -*- coding: utf-8 -*- from __future__ import division import math class LorentzFactor(object): SPEED_OF_LIGHT = 299792458 @staticmethod def get_beta(velocity, is_percent): if is_percent: return velocity return velocity / SPEED_OF_LIGHT @staticmethod def lorentz_factor(velocity, is_percent): beta = LorentzFactor.get_beta(velocity, is_percent) return 1 / (math.sqrt(1 - beta ** 2)) class TimeDilation(LorentzFactor): @staticmethod def get_proper_time(time, velocity, is_percent=True): return time * TimeDilation.lorentz_factor(velocity, is_percent) @staticmethod def get_time_relative_ex_observer(time, velocity, is_percent=True): """ Dilation relative to an external observer """ return time / TimeDilation.lorentz_factor(velocity, is_percent)
Fix error in calculation on lorentz factor
Fix error in calculation on lorentz factor
Python
mit
tdsymonds/relativity
eb31775a7dbbb2064cf64d85c2bb0912a92f4028
train.py
train.py
import data import argparse from model import EDSR parser = argparse.ArgumentParser() parser.add_argument("--dataset",default="data/General-100") parser.add_argument("--imgsize",default=100,type=int) parser.add_argument("--scale",default=2,type=int) parser.add_argument("--layers",default=32,type=int) parser.add_argument("--featuresize",default=256,type=int) parser.add_argument("--batchsize",default=10,type=int) parser.add_argument("--savedir",default='saved_models') parser.add_argument("--iterations",default=1000,type=int) args = parser.parse_args() data.load_dataset(args.dataset,args.imgsize) down_size = args.imgsize//args.scale network = EDSR(down_size,args.layers,args.featuresize,args.scale) network.set_data_fn(data.get_batch,(args.batchsize,args.imgsize,down_size),data.get_test_set,(args.imgsize,down_size)) network.train(args.iterations,args.savedir)
import data import argparse from model import EDSR parser = argparse.ArgumentParser() parser.add_argument("--dataset",default="data/General-100") parser.add_argument("--imgsize",default=100,type=int) parser.add_argument("--scale",default=2,type=int) parser.add_argument("--layers",default=32,type=int) parser.add_argument("--featuresize",default=256,type=int) parser.add_argument("--batchsize",default=10,type=int) parser.add_argument("--savedir",default='saved_models') parser.add_argument("--iterations",default=1000,type=int) args = parser.parse_args() data.load_dataset(args.dataset,args.imgsize) if args.imgsize % args.scale != 0: print(f"Image size {args.imgsize} is not evenly divisible by scale {arg.scale}") return down_size = args.imgsize//args.scale network = EDSR(down_size,args.layers,args.featuresize,args.scale) network.set_data_fn(data.get_batch,(args.batchsize,args.imgsize,down_size),data.get_test_set,(args.imgsize,down_size)) network.train(args.iterations,args.savedir)
Add warning for mismatched image size and scale
Add warning for mismatched image size and scale
Python
mit
jmiller656/EDSR-Tensorflow
d604d17e8286b1c95a0faafd6d4fd79af11441ab
nn/util.py
nn/util.py
import functools import numpy import tensorflow as tf def static_shape(tensor): return tf.convert_to_tensor(tensor).get_shape().as_list() def static_rank(tensor): return len(static_shape(tf.convert_to_tensor(tensor))) def funcname_scope(func): @functools.wraps(func) def wrapper(*args, **kwargs): with tf.variable_scope(func.__name__): return func(*args, **kwargs) return wrapper def on_device(device_name): def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): with tf.device(device_name): return func(*args, **kwargs) return wrapper return decorator def dimension_indices(tensor, start=0): return list(range(static_rank(tensor)))[start:] @funcname_scope def dtype_min(dtype): return tf.constant(_numpy_min(dtype.as_numpy_dtype)) def _numpy_min(dtype): return numpy.finfo(dtype).min @funcname_scope def dtype_epsilon(dtype): return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype)) def _numpy_epsilon(dtype): return numpy.finfo(dtype).eps def flatten(x): return tf.reshape(x, [-1])
import functools import numpy import tensorflow as tf def static_shape(tensor): return tf.convert_to_tensor(tensor).get_shape().as_list() def static_rank(tensor): return len(static_shape(tf.convert_to_tensor(tensor))) def funcname_scope(func_or_name): if isinstance(func_or_name, str): def wrapper(func): func.__name__ = func_or_name return funcname_scope(func) return wrapper func = func_or_name @functools.wraps(func) def wrapper(*args, **kwargs): with tf.variable_scope(func.__name__): return func(*args, **kwargs) return wrapper def on_device(device_name): def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): with tf.device(device_name): return func(*args, **kwargs) return wrapper return decorator def dimension_indices(tensor, start=0): return list(range(static_rank(tensor)))[start:] @funcname_scope def dtype_min(dtype): return tf.constant(_numpy_min(dtype.as_numpy_dtype)) def _numpy_min(dtype): return numpy.finfo(dtype).min @funcname_scope def dtype_epsilon(dtype): return tf.constant(_numpy_epsilon(dtype.as_numpy_dtype)) def _numpy_epsilon(dtype): return numpy.finfo(dtype).eps def flatten(x): return tf.reshape(x, [-1])
Extend funcname_scope so that it accepts funcnames
Extend funcname_scope so that it accepts funcnames
Python
unlicense
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
f3e3b43abebfad0fcaa20df8eac20e3cb8c099d6
imgproc.py
imgproc.py
from SimpleCV import * import numpy import cv2 def process_image(obj, img, config): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :return: Mask with candidates surrounded in a green rectangle """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), numpy.array([config.min_hue, config.min_sat, config.min_val]), numpy.array([config.max_hue, config.max_sat, config.max_val]))) segmented = segmented.dilate(2) blobs = segmented.findBlobs() if blobs: for b in blobs: if b.radius() > 10: rect_width = b.minRectWidth() rect_height = b.minRectHeight() aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) # Give the result mask return segmented.applyLayers()
from SimpleCV import * import numpy import cv2 def process_image(obj, img, config, each_blob=None): """ :param obj: Object we're tracking :param img: Input image :param config: Controls :param each_blob: function, taking a SimpleCV.Blob as an argument, that is called for every candidate blob :return: Mask with candidates """ hsv_image = img.toHSV() segmented = Image(cv2.inRange(hsv_image.getNumpy(), numpy.array([config.min_hue, config.min_sat, config.min_val]), numpy.array([config.max_hue, config.max_sat, config.max_val]))) segmented = segmented.dilate(2) blobs = segmented.findBlobs() if blobs: for b in blobs: if b.radius() > 10: rect_width = b.minRectWidth() rect_height = b.minRectHeight() aspect_ratio = rect_width / rect_height square_error = abs(obj.aspect_ratio - aspect_ratio) / abs(aspect_ratio) if square_error < 0.1: if not each_blob: # default to just outlining # minRectX and minRectY actually give the center point, not the minX and minY, so we shift by 1/2 segmented.drawRectangle(b.minRectX()-rect_width/2, b.minRectY()-rect_height/2, rect_width, rect_height, color=Color.GREEN, width=6) else: each_blob(b) # Give the result mask return segmented.applyLayers()
Allow a function to be called whenever a candidate blob is found during image processing
Allow a function to be called whenever a candidate blob is found during image processing
Python
mit
mstojcevich/Flash-Vision
0eb7e6b9a8e4e38793b1e045ab5f0f0a4d4e6777
synapse/metrics/resource.py
synapse/metrics/resource.py
# -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from twisted.web.resource import Resource from twisted.web.server import NOT_DONE_YET import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode()
# -*- coding: utf-8 -*- # Copyright 2015 OpenMarket Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from twisted.web.resource import Resource import synapse.metrics METRICS_PREFIX = "/_synapse/metrics" class MetricsResource(Resource): isLeaf = True def __init__(self, hs): Resource.__init__(self) # Resource is old-style, so no super() self.hs = hs def render_GET(self, request): response = synapse.metrics.render_all() request.setHeader("Content-Type", "text/plain") request.setHeader("Content-Length", str(len(response))) # Encode as UTF-8 (default) return response.encode()
Delete unused import of NOT_READY_YET
Delete unused import of NOT_READY_YET
Python
apache-2.0
matrix-org/synapse,illicitonion/synapse,iot-factory/synapse,TribeMedia/synapse,iot-factory/synapse,howethomas/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,rzr/synapse,howethomas/synapse,illicitonion/synapse,rzr/synapse,howethomas/synapse,illicitonion/synapse,matrix-org/synapse,howethomas/synapse,TribeMedia/synapse,TribeMedia/synapse,rzr/synapse,iot-factory/synapse,iot-factory/synapse,iot-factory/synapse,illicitonion/synapse,rzr/synapse,TribeMedia/synapse,rzr/synapse,TribeMedia/synapse,matrix-org/synapse,howethomas/synapse,illicitonion/synapse
8ef5b15c62960fb9abc43c9b30550faa0c0d7227
cactusbot/handler.py
cactusbot/handler.py
"""Handle handlers.""" import logging class Handlers(object): """Handlers.""" def __init__(self, *handlers): self.logger = logging.getLogger(__name__) self.handlers = handlers def handle(self, event, packet): """Handle incoming data.""" for handler in self.handlers: if hasattr(handler, "on_" + event): try: response = getattr(handler, "on_" + event)(packet) except Exception: self.logger.warning( "Exception in handler %s:", type(handler).__name__, exc_info=1) else: # TODO: support for multiple responses in an iterable if response is not None: yield response elif response is StopIteration: break class Handler(object): """Handler.""" def __init__(self): self.logger = logging.getLogger(__name__)
"""Handle handlers.""" import logging from .packet import Packet class Handlers(object): """Handlers.""" def __init__(self, *handlers): self.logger = logging.getLogger(__name__) self.handlers = handlers def handle(self, event, packet): """Handle incoming data.""" for handler in self.handlers: if hasattr(handler, "on_" + event): try: response = getattr(handler, "on_" + event)(packet) except Exception: self.logger.warning( "Exception in handler %s:", type(handler).__name__, exc_info=1) else: if isinstance(response, Packet): yield response elif isinstance(response, (tuple, list)): yield from response elif response is StopIteration: return class Handler(object): """Handler.""" def __init__(self): self.logger = logging.getLogger(__name__)
Add support for multiple return `Packet`s from `Handler`s
Add support for multiple return `Packet`s from `Handler`s
Python
mit
CactusDev/CactusBot
51cdd71cbcbcfd80105cc5ccb5b95f4d79dc593e
src/service_deployment_tools/paasta_cli/utils/cmd_utils.py
src/service_deployment_tools/paasta_cli/utils/cmd_utils.py
#!/usr/bin/env python """ Contains helper functions common to all paasta commands or the client """ import glob import os # List of commands the paasta client can execute CMDS = None def paasta_commands(): """ Read the files names in the cmds directory to determine the various commands the paasta client is able to execute :return: a list of string such as ['list','check'] that correspond to a file in cmds """ global CMDS if CMDS is None: CMDS = [] path = "%s/cmds/*.py" % os.path.abspath('.') for file_name in glob.glob(path): start = file_name.rfind('/') + 1 end = file_name.rfind('.') CMDS.append(file_name[start:end]) # Remove __init__.py CMDS.sort() CMDS.pop(0) return CMDS
#!/usr/bin/env python """ Contains helper functions common to all paasta commands or the client """ import glob import os def paasta_commands(): """ Read the files names in the cmds directory to determine the various commands the paasta client is able to execute :return: a list of string such as ['list','check'] that correspond to a file in cmds """ path = '%s/cmds/*.py' % os.path.abspath('.') for file_name in glob.glob(path): basename = os.path.basename(file_name) root, _ = os.path.splitext(basename) if root == '__init__': continue yield root
Clean up directory parsing code
Clean up directory parsing code
Python
apache-2.0
gstarnberger/paasta,Yelp/paasta,somic/paasta,Yelp/paasta,gstarnberger/paasta,somic/paasta
b81ace397887cb6d0fc7db21d623667223adbfbf
python/frequency_queries.py
python/frequency_queries.py
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) print(occurences) print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
#!/bin/python3 import math import os import random import re import sys from collections import Counter # Complete the freqQuery function below. def freqQuery(queries): output = [] array = [] occurences = Counter() frequencies = Counter() for operation, value in queries: if (operation == 1): frequencies[occurences[value]] -= 1 occurences[value] += 1 frequencies[occurences[value]] += 1 elif (operation == 2): if (occurences[value] > 0): frequencies[occurences[value]] -= 1 occurences[value] -= 1 frequencies[occurences[value]] += 1 elif (operation == 3): if frequencies[value] > 0: output.append(1) else: output.append(0) # print(occurences) # print(frequencies) return output if __name__ == '__main__': fptr = open(os.environ['OUTPUT_PATH'], 'w') q = int(input().strip()) queries = [] for _ in range(q): queries.append(list(map(int, input().rstrip().split()))) ans = freqQuery(queries) fptr.write('\n'.join(map(str, ans))) fptr.write('\n') fptr.close()
Fix bug with negative counts
Fix bug with negative counts
Python
mit
rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank
fdc0bb75271b90a31072f79b95283e1156d50181
waffle/decorators.py
waffle/decorators.py
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
from functools import wraps from django.http import Http404 from django.utils.decorators import available_attrs from waffle import is_active def waffle(flag_name): def decorator(view): @wraps(view, assigned=available_attrs(view)) def _wrapped_view(request, *args, **kwargs): if flag_name.startswith('!'): active = is_active(request, flag_name[1:]) else: active = is_active(request, flag_name) if not active: raise Http404 return view(request, *args, **kwargs) return _wrapped_view return decorator
Make the decorator actually work again.
Make the decorator actually work again.
Python
bsd-3-clause
isotoma/django-waffle,TwigWorld/django-waffle,rlr/django-waffle,webus/django-waffle,groovecoder/django-waffle,JeLoueMonCampingCar/django-waffle,crccheck/django-waffle,safarijv/django-waffle,paulcwatts/django-waffle,JeLoueMonCampingCar/django-waffle,11craft/django-waffle,festicket/django-waffle,styleseat/django-waffle,mark-adams/django-waffle,rodgomes/django-waffle,crccheck/django-waffle,mark-adams/django-waffle,webus/django-waffle,groovecoder/django-waffle,mwaaas/django-waffle-session,hwkns/django-waffle,VladimirFilonov/django-waffle,ekohl/django-waffle,festicket/django-waffle,paulcwatts/django-waffle,TwigWorld/django-waffle,mwaaas/django-waffle-session,VladimirFilonov/django-waffle,rlr/django-waffle,willkg/django-waffle,engagespark/django-waffle,hwkns/django-waffle,crccheck/django-waffle,JeLoueMonCampingCar/django-waffle,TwigWorld/django-waffle,hwkns/django-waffle,safarijv/django-waffle,webus/django-waffle,rodgomes/django-waffle,engagespark/django-waffle,safarijv/django-waffle,festicket/django-waffle,groovecoder/django-waffle,styleseat/django-waffle,mwaaas/django-waffle-session,mark-adams/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,webus/django-waffle,rlr/django-waffle,willkg/django-waffle,ilanbm/django-waffle,festicket/django-waffle,crccheck/django-waffle,rodgomes/django-waffle,ilanbm/django-waffle,ekohl/django-waffle,groovecoder/django-waffle,hwkns/django-waffle,isotoma/django-waffle,11craft/django-waffle,rlr/django-waffle,ilanbm/django-waffle,JeLoueMonCampingCar/django-waffle,isotoma/django-waffle,paulcwatts/django-waffle,VladimirFilonov/django-waffle,engagespark/django-waffle,engagespark/django-waffle,rsalmaso/django-waffle,styleseat/django-waffle,mark-adams/django-waffle,rsalmaso/django-waffle,isotoma/django-waffle,rsalmaso/django-waffle,rsalmaso/django-waffle,rodgomes/django-waffle,mwaaas/django-waffle-session,styleseat/django-waffle,ilanbm/django-waffle,safarijv/django-waffle
6c578b67753e7a3fd646e5d91259b50c0b39bec6
tests/test_add_target.py
tests/test_add_target.py
""" Tests for helper function for adding a target to a Vuforia database. """ import io from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image)
""" Tests for helper function for adding a target to a Vuforia database. """ import io from mock_vws import MockVWS from vws import VWS class TestSuccess: """ Tests for successfully adding a target. """ def test_add_target( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding one target. """ client.add_target(name='x', width=1, image=high_quality_image) def test_add_two_targets( self, client: VWS, high_quality_image: io.BytesIO, ) -> None: """ No exception is raised when adding two targets with different names. """ client.add_target(name='x', width=1, image=high_quality_image) client.add_target(name='a', width=1, image=high_quality_image) class TestCustomBaseURL: """ Tests for adding images to databases under custom VWS URLs. """ def test_custom_base_url(self, high_quality_image: io.BytesIO) -> None: """ It is possible to use add a target to a database under a custom VWS URL. """ base_vws_url = 'http://example.com' with MockVWS(base_vws_url=base_vws_url) as mock: client = VWS( server_access_key=mock.server_access_key, server_secret_key=mock.server_secret_key, base_vws_url=base_vws_url, ) client.add_target( name='x', width=1, image=high_quality_image, )
Add test for custom base URL
Add test for custom base URL
Python
mit
adamtheturtle/vws-python,adamtheturtle/vws-python
b6711f27146279ee419143b560cf32d3b3dfc80c
tools/conan/conanfile.py
tools/conan/conanfile.py
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
from conans import ConanFile, CMake, tools class VarconfConan(ConanFile): name = "varconf" version = "1.0.3" license = "GPL-2.0+" author = "Erik Ogenvik <erik@ogenvik.org>" homepage = "https://www.worldforge.org" url = "https://github.com/worldforge/varconf" description = "Configuration library for the Worldforge system." topics = ("mmorpg", "worldforge") settings = "os", "compiler", "build_type", "arch" options = {"shared": [False, True], "fPIC": [True, False]} default_options = {"shared": False, "fPIC": True} generators = "cmake" requires = ["sigc++/2.10.0@worldforge/stable"] scm = { "type": "git", "url": "https://github.com/worldforge/varconf.git", "revision": "auto" } def imports(self): self.copy("*.dll", "bin", "bin") def build(self): cmake = CMake(self) cmake.configure(source_folder=".") cmake.build() cmake.install() def package_info(self): self.cpp_info.libs = tools.collect_libs(self) self.cpp_info.includedirs = ["include/varconf-1.0"] def package(self): pass
Include binaries when importing (for Windows).
Include binaries when importing (for Windows).
Python
lgpl-2.1
worldforge/varconf,worldforge/varconf,worldforge/varconf,worldforge/varconf
4307fa24a27a2c623836a7518e3aceb4546abcf6
scholrroles/behaviour.py
scholrroles/behaviour.py
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
from collections import defaultdict from .utils import get_value_from_accessor class RoleBehaviour(object): ids = [] object_accessors = {} def __init__(self, user, request): self.user = user self.request = request def has_role(self): return False def has_role_for(self, obj): if self.has_role() and self.ids and obj: if self.role in obj.role_accessors: return get_value_from_accessor(obj, obj.role_accessors[self.role]) in self.ids return True def can_apply_permission(self, obj, perm): method = 'has_{}_{}_permission'.format(self.role, perm.name) print method, hasattr(obj, method), getattr(obj, method), callable(getattr(obj, method)) if hasattr(obj, method): function = getattr(obj, method) if callable(function): return function(self.user) return True class UserBehaviour(RoleBehaviour): role= 'user' def has_role(self): return True def role_behaviour_factory(): return RoleBehaviour class RoleBehaviourRegistry(object): _registry = defaultdict(role_behaviour_factory) def register(self, cls): self._registry[cls.role] = cls def get_role(self, role): return self._registry[role] registry = RoleBehaviourRegistry()
Validate Model function to allow permission
Validate Model function to allow permission
Python
bsd-3-clause
Scholr/scholr-roles
bdd842f55f3a234fefee4cd2a701fa23e07c3789
scikits/umfpack/setup.py
scikits/umfpack/setup.py
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
#!/usr/bin/env python # 05.12.2005, c from __future__ import division, print_function, absolute_import import sys def configuration(parent_package='',top_path=None): from numpy.distutils.misc_util import Configuration from numpy.distutils.system_info import get_info, dict_append config = Configuration('umfpack', parent_package, top_path) config.add_data_dir('tests') umf_info = get_info('umfpack', notfound_action=1) ## The following addition is needed when linking against a umfpack built ## from the latest SparseSuite. Not (strictly) needed when linking against ## the version in the ubuntu repositories. if not sys.platform == 'darwin': umf_info['libraries'].insert(0, 'rt') umfpack_i_file = config.paths('umfpack.i')[0] def umfpack_i(ext, build_dir): if umf_info: return umfpack_i_file blas_info = get_info('blas_opt') build_info = {} dict_append(build_info, **umf_info) dict_append(build_info, **blas_info) config.add_extension('__umfpack', sources=[umfpack_i], depends=['umfpack.i'], **build_info) return config if __name__ == "__main__": from numpy.distutils.core import setup setup(**configuration(top_path='').todict())
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
Add handling for building scikit-umfpack on the Mac, which doesn't have the librt file added to the umfpack dependencies.
Python
bsd-3-clause
scikit-umfpack/scikit-umfpack,scikit-umfpack/scikit-umfpack,rc/scikit-umfpack-rc,rc/scikit-umfpack,rc/scikit-umfpack,rc/scikit-umfpack-rc
6c4a2d6f80d7ee5f9c06c3d678bb86661c94a793
tools/np_suppressions.py
tools/np_suppressions.py
suppressions = [ [ ".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ ".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ ".*/multiarray/common\.", "PyCapsule_Check" ], ]
suppressions = [ # This one cannot be covered by any Python language test because there is # no code pathway to it. But it is part of the C API, so must not be # excised from the code. [ r".*/multiarray/mapping\.", "PyArray_MapIterReset" ], # PyArray_Std trivially forwards to and appears to be superceded by # __New_PyArray_Std, which is exercised by the test framework. [ r".*/multiarray/calculation\.", "PyArray_Std" ], # PyCapsule_Check is declared in a header, and used in # multiarray/ctors.c. So it isn't really untested. [ r".*/multiarray/common\.", "PyCapsule_Check" ], ]
Add documentation on one assertion, convert RE's to raw strings.
Add documentation on one assertion, convert RE's to raw strings.
Python
bsd-3-clause
numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor,numpy/numpy-refactor
dcba8b90b84506a7325f8e576d10ccb8d2e9a415
setuptools/py24compat.py
setuptools/py24compat.py
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x
""" Forward-compatibility support for Python 2.4 and earlier """ # from jaraco.compat 1.2 try: from functools import wraps except ImportError: def wraps(func): "Just return the function unwrapped" return lambda x: x try: import hashlib except ImportError: from setuptools._backport import hashlib
Add a shim for python 2.4 compatability with hashlib
Add a shim for python 2.4 compatability with hashlib --HG-- extra : rebase_source : 5f573e600aadbe9c95561ee28c05cee02c7db559
Python
mit
pypa/setuptools,pypa/setuptools,pypa/setuptools
03977d24d5862373a881b7098bc78adc30fe8256
make_src_bem.py
make_src_bem.py
from __future__ import print_function import mne from my_settings import * subject = sys.argv[1] # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct6-src.fif" % subject, src) conductivity = (0.3, 0.006, 0.3) # for three layers model = mne.make_bem_model(subject=subject, ico=None, conductivity=conductivity, subjects_dir=subjects_dir) bem = mne.make_bem_solution(model) mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
from __future__ import print_function import mne import subprocess from my_settings import * subject = sys.argv[1] cmd = "/usr/local/common/meeg-cfin/configurations/bin/submit_to_isis" # make source space src = mne.setup_source_space(subject, spacing='oct6', subjects_dir=subjects_dir, add_dist=False, overwrite=True) # save source space mne.write_source_spaces(mne_folder + "%s-oct-6-src.fif" % subject, src) setup_forward = "mne_setup_forward_model --subject %s --surf --ico -6" % ( subject) subprocess.call([cmd, "1", setup_forward]) # conductivity = (0.3, 0.006, 0.3) # for three layers # model = mne.make_bem_model(subject=subject, ico=None, # conductivity=conductivity, # subjects_dir=subjects_dir) # bem = mne.make_bem_solution(model) # mne.write_bem_solution(mne_folder + "%s-8194-bem-sol.fif" % subject)
Change to make BEM solution from mne-C
Change to make BEM solution from mne-C
Python
bsd-3-clause
MadsJensen/RP_scripts,MadsJensen/RP_scripts,MadsJensen/RP_scripts
27c614b30eda339ca0c61f35e498be6456f2280f
scoring/__init__.py
scoring/__init__.py
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model, descriptor_generator, model_opts = {}, desc_opts = {}): self.model = model() self.descriptor_generator = descriptor_generator(**desc_opts) def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
import numpy as np from sklearn.cross_validation import cross_val_score from sklearn.externals import joblib as pickle class scorer(object): def __init__(self, model_instance, descriptor_generator_instance): self.model = model_instance self.descriptor_generator = descriptor_generator_instance def fit(self, ligands, target): self.train_descs = self.descriptor_generator.build(ligands) self.train_target = target return self.model.fit(descs,target) def predict(self, ligands): descs = self.descriptor_generator.build(ligands) return self.model.predict(descs) def score(self, ligands, target): descs = self.descriptor_generator.build(ligands) return self.model.score(descs,target) def cross_validate(n = 10, test_set = None, test_target = None): if test_set and test_target: cv_set = np.vstack((self.train_descs, test_set)) cv_target = np.vstack((self.train_target, test_target)) else: cv_set = self.train_descs cv_target = self.train_target return cross_val_score(self.model, cv_set, cv_target, cv = n) def save(filename): f = open(filename,'wb') pickle.dump(self, filename) f.close() @classmethod def load(filename): return pickle.load(open(filename,'rb'))
Make scorer accept instances of model and desc. gen.
Make scorer accept instances of model and desc. gen.
Python
bsd-3-clause
mwojcikowski/opendrugdiscovery
b5477239d7b1ee9e73265b023355e8e83826ec49
scrapy_rss/items.py
scrapy_rss/items.py
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem: title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
# -*- coding: utf-8 -*- import scrapy from scrapy.item import BaseItem from scrapy_rss.elements import * from scrapy_rss import meta import six @six.add_metaclass(meta.ItemMeta) class RssItem(BaseItem): title = TitleElement() link = LinkElement() description = DescriptionElement() author = AuthorElement() category = meta.MultipleElements(CategoryElement) comments = CommentsElement() enclosure = EnclosureElement() guid = GuidElement() pubDate = PubDateElement() source = SourceElement() class ExtendableItem(scrapy.Item): def __setattr__(self, name, value): if name in self.fields: raise AttributeError("Use item[{!r}] = {!r} to set field value".format(name, value)) super(BaseItem, self).__setattr__(name, value) class RssedItem(ExtendableItem): def __init__(self, **kwargs): super(RssedItem, self).__init__(**kwargs) self.rss = RssItem()
Fix RssItem when each scraped item is instance of RssItem
Fix RssItem when each scraped item is instance of RssItem
Python
bsd-3-clause
woxcab/scrapy_rss
ee69971832120f4492e8f41abfbcb9c87e398d6a
DeepFried2/utils.py
DeepFried2/utils.py
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) )
import theano as _th import numpy as _np def create_param(shape, init, fan=None, name=None, type=_th.config.floatX): return _th.shared(init(shape, fan).astype(type), name=name) def create_param_and_grad(shape, init, fan=None, name=None, type=_th.config.floatX): val = init(shape, fan).astype(type) param = _th.shared(val, name=name) grad_name = 'grad_' + name if name is not None else None grad_param = _th.shared(_np.zeros_like(val), name=grad_name) return param, grad_param def create_param_state_as(other, initial_value=0, prefix='state_for_'): return _th.shared(other.get_value()*0 + initial_value, broadcastable=other.broadcastable, name=prefix + str(other.name) ) def count_params(module): params, _ = module.parameters() return sum(p.get_value().size for p in params) def save_params(module, where): params, _ = module.parameters() _np.savez_compressed(where, params=[p.get_value() for p in params]) def load_params(module, fromwhere): params, _ = module.parameters() with _np.load(fromwhere) as f: for p, v in zip(params, f['params']): p.set_value(v)
Add utility to save/load parameters, i.e. models.
Add utility to save/load parameters, i.e. models. Also adds a utility to compute the number of parameters, because that's always interesting and often reported in papers.
Python
mit
yobibyte/DeepFried2,lucasb-eyer/DeepFried2,elPistolero/DeepFried2,Pandoro/DeepFried2
9e7e61256eb2ca2b4f4f19ce5b926709a593a28b
vispy/app/tests/test_interactive.py
vispy/app/tests/test_interactive.py
from nose.tools import assert_equal, assert_true, assert_false, assert_raises from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
from nose.tools import assert_equal, assert_true, assert_false from vispy.testing import assert_in, run_tests_if_main from vispy.app import set_interactive from vispy.ext.ipy_inputhook import inputhook_manager # Expect the inputhook_manager to set boolean `_in_event_loop` # on instances of this class when enabled. class MockApp(object): pass def test_interactive(): f = MockApp() set_interactive(enabled=True, app=f) assert_equal('vispy', inputhook_manager._current_gui) assert_true(f._in_event_loop) assert_in('vispy', inputhook_manager.apps) assert_equal(f, inputhook_manager.apps['vispy']) set_interactive(enabled=False) assert_equal(None, inputhook_manager._current_gui) assert_false(f._in_event_loop) run_tests_if_main()
Fix for flake8 checks on new test file.
Fix for flake8 checks on new test file.
Python
bsd-3-clause
jay3sh/vispy,ghisvail/vispy,hronoses/vispy,kkuunnddaannkk/vispy,michaelaye/vispy,dchilds7/Deysha-Star-Formation,sh4wn/vispy,QuLogic/vispy,srinathv/vispy,sh4wn/vispy,QuLogic/vispy,julienr/vispy,kkuunnddaannkk/vispy,drufat/vispy,Eric89GXL/vispy,jay3sh/vispy,jdreaver/vispy,RebeccaWPerry/vispy,RebeccaWPerry/vispy,jay3sh/vispy,sbtlaarzc/vispy,Eric89GXL/vispy,srinathv/vispy,hronoses/vispy,drufat/vispy,julienr/vispy,Eric89GXL/vispy,kkuunnddaannkk/vispy,inclement/vispy,sh4wn/vispy,jdreaver/vispy,inclement/vispy,bollu/vispy,bollu/vispy,RebeccaWPerry/vispy,dchilds7/Deysha-Star-Formation,sbtlaarzc/vispy,inclement/vispy,julienr/vispy,michaelaye/vispy,sbtlaarzc/vispy,jdreaver/vispy,ghisvail/vispy,michaelaye/vispy,QuLogic/vispy,ghisvail/vispy,srinathv/vispy,hronoses/vispy,dchilds7/Deysha-Star-Formation,drufat/vispy,bollu/vispy
efc0f438e894fa21ce32665ec26c19751ec2ce10
ureport_project/wsgi_app.py
ureport_project/wsgi_app.py
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler application = WSGIHandler()
# wsgi_app.py import sys, os filedir = os.path.dirname(__file__) sys.path.append(os.path.join(filedir)) #print sys.path os.environ["CELERY_LOADER"] = "django" os.environ['DJANGO_SETTINGS_MODULE'] = 'settings' import sys print sys.path from django.core.handlers.wsgi import WSGIHandler from linesman.middleware import make_linesman_middleware application = WSGIHandler() application = make_linesman_middleware(application)
Apply linesman profiler to the wsgi app
Apply linesman profiler to the wsgi app I fully expect this commit to be backed out when we get the profiling stuff sorted out, but thankfully, this profiler can be disabled for a live site. check out http://<site>/__profiler__ after installing linesman, and running the uwsgi server
Python
bsd-3-clause
unicefuganda/ureport,unicefuganda/ureport,unicefuganda/ureport,mbanje/ureport_uganda,mbanje/ureport_uganda
5ba9888d267d663fb0ab0dfbfd9346dc20f4c0c1
test/test_turtle_serialize.py
test/test_turtle_serialize.py
import rdflib from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = rdflib.Graph() u = rdflib.URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
from rdflib import Graph, URIRef, BNode, RDF, Literal from rdflib.collection import Collection from rdflib.py3compat import b def testTurtleFinalDot(): """ https://github.com/RDFLib/rdflib/issues/282 """ g = Graph() u = URIRef("http://ex.org/bob.") g.bind("ns", "http://ex.org/") g.add( (u, u, u) ) s=g.serialize(format='turtle') assert b("ns:bob.") not in s def testTurtleBoolList(): subject = URIRef("http://localhost/user") predicate = URIRef("http://localhost/vocab#hasList") g1 = Graph() list_item1 = BNode() list_item2 = BNode() list_item3 = BNode() g1.add((subject, predicate, list_item1)) g1.add((list_item1, RDF.first, Literal(True))) g1.add((list_item1, RDF.rest, list_item2)) g1.add((list_item2, RDF.first, Literal(False))) g1.add((list_item2, RDF.rest, list_item3)) g1.add((list_item3, RDF.first, Literal(True))) g1.add((list_item3, RDF.rest, RDF.nil)) ttl_dump = g1.serialize(format="turtle") g2 = Graph() g2.parse(data=ttl_dump, format="turtle") list_id = g2.value(subject, predicate) bool_list = [i.toPython() for i in Collection(g2, list_id)] assert bool_list == [True, False, True] if __name__ == "__main__": import nose, sys nose.main(defaultTest=sys.argv[0])
Test boolean list serialization in Turtle
Test boolean list serialization in Turtle
Python
bsd-3-clause
RDFLib/rdflib,ssssam/rdflib,armandobs14/rdflib,yingerj/rdflib,RDFLib/rdflib,ssssam/rdflib,ssssam/rdflib,avorio/rdflib,marma/rdflib,marma/rdflib,RDFLib/rdflib,ssssam/rdflib,dbs/rdflib,armandobs14/rdflib,dbs/rdflib,dbs/rdflib,marma/rdflib,avorio/rdflib,marma/rdflib,yingerj/rdflib,RDFLib/rdflib,yingerj/rdflib,dbs/rdflib,armandobs14/rdflib,avorio/rdflib,armandobs14/rdflib,yingerj/rdflib,avorio/rdflib
8126ca21bcf8da551906eff348c92cb71fe79e6e
readthedocs/doc_builder/base.py
readthedocs/doc_builder/base.py
import os def restoring_chdir(fn): def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
import os from functools import wraps def restoring_chdir(fn): @wraps(fn) def decorator(*args, **kw): try: path = os.getcwd() return fn(*args, **kw) finally: os.chdir(path) return decorator class BaseBuilder(object): """ The Base for all Builders. Defines the API for subclasses. """ @restoring_chdir def force(self, version): """ An optional step to force a build even when nothing has changed. """ print "Forcing a build by touching files" os.chdir(version.project.conf_dir(version.slug)) os.system('touch * && touch */*') def clean(self, version): """ Clean up the version so it's ready for usage. This is used to add RTD specific stuff to Sphinx, and to implement whitelists on projects as well. It is guaranteed to be called before your project is built. """ raise NotImplementedError def build(self, version): """ Do the actual building of the documentation. """ raise NotImplementedError def move(self, version): """ Move the documentation from it's generated place to its final home. This needs to understand both a single server dev environment, as well as a multi-server environment. """ raise NotImplementedError
Call wraps on the restoring_chdir decorator.
Call wraps on the restoring_chdir decorator.
Python
mit
alex/readthedocs.org,safwanrahman/readthedocs.org,royalwang/readthedocs.org,VishvajitP/readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,tddv/readthedocs.org,dirn/readthedocs.org,takluyver/readthedocs.org,nikolas/readthedocs.org,LukasBoersma/readthedocs.org,mhils/readthedocs.org,royalwang/readthedocs.org,agjohnson/readthedocs.org,jerel/readthedocs.org,hach-que/readthedocs.org,wanghaven/readthedocs.org,atsuyim/readthedocs.org,kdkeyser/readthedocs.org,asampat3090/readthedocs.org,tddv/readthedocs.org,espdev/readthedocs.org,sunnyzwh/readthedocs.org,clarkperkins/readthedocs.org,gjtorikian/readthedocs.org,sunnyzwh/readthedocs.org,michaelmcandrew/readthedocs.org,gjtorikian/readthedocs.org,KamranMackey/readthedocs.org,atsuyim/readthedocs.org,gjtorikian/readthedocs.org,takluyver/readthedocs.org,Carreau/readthedocs.org,johncosta/private-readthedocs.org,Carreau/readthedocs.org,emawind84/readthedocs.org,soulshake/readthedocs.org,KamranMackey/readthedocs.org,wijerasa/readthedocs.org,Tazer/readthedocs.org,agjohnson/readthedocs.org,dirn/readthedocs.org,soulshake/readthedocs.org,sils1297/readthedocs.org,emawind84/readthedocs.org,hach-que/readthedocs.org,sid-kap/readthedocs.org,mhils/readthedocs.org,wanghaven/readthedocs.org,asampat3090/readthedocs.org,titiushko/readthedocs.org,SteveViss/readthedocs.org,michaelmcandrew/readthedocs.org,GovReady/readthedocs.org,KamranMackey/readthedocs.org,nikolas/readthedocs.org,SteveViss/readthedocs.org,davidfischer/readthedocs.org,agjohnson/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,wanghaven/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,LukasBoersma/readthedocs.org,sils1297/readthedocs.org,techtonik/readthedocs.org,emawind84/readthedocs.org,stevepiercy/readthedocs.org,Tazer/readthedocs.org,sid-kap/readthedocs.org,LukasBoersma/readthedocs.org,istresearch/readthedocs.org,singingwolfboy/readthedocs.org,CedarLogic/readthedocs.org,soulshake/readthedocs.org,hach-que/readthedocs.org,d0ugal/readthedocs.org,davidfischer/readthedocs.org,dirn/readthedocs.org,attakei/readthedocs-oauth,fujita-shintaro/readthedocs.org,pombredanne/readthedocs.org,kenwang76/readthedocs.org,kenshinthebattosai/readthedocs.org,rtfd/readthedocs.org,kenshinthebattosai/readthedocs.org,raven47git/readthedocs.org,johncosta/private-readthedocs.org,laplaceliu/readthedocs.org,agjohnson/readthedocs.org,nyergler/pythonslides,michaelmcandrew/readthedocs.org,laplaceliu/readthedocs.org,Tazer/readthedocs.org,safwanrahman/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,singingwolfboy/readthedocs.org,kdkeyser/readthedocs.org,LukasBoersma/readthedocs.org,emawind84/readthedocs.org,raven47git/readthedocs.org,tddv/readthedocs.org,Tazer/readthedocs.org,cgourlay/readthedocs.org,wanghaven/readthedocs.org,gjtorikian/readthedocs.org,pombredanne/readthedocs.org,takluyver/readthedocs.org,kenshinthebattosai/readthedocs.org,raven47git/readthedocs.org,wijerasa/readthedocs.org,sils1297/readthedocs.org,kenshinthebattosai/readthedocs.org,d0ugal/readthedocs.org,SteveViss/readthedocs.org,espdev/readthedocs.org,mrshoki/readthedocs.org,cgourlay/readthedocs.org,rtfd/readthedocs.org,techtonik/readthedocs.org,ojii/readthedocs.org,fujita-shintaro/readthedocs.org,KamranMackey/readthedocs.org,titiushko/readthedocs.org,clarkperkins/readthedocs.org,VishvajitP/readthedocs.org,atsuyim/readthedocs.org,CedarLogic/readthedocs.org,CedarLogic/readthedocs.org,asampat3090/readthedocs.org,hach-que/readthedocs.org,michaelmcandrew/readthedocs.org,jerel/readthedocs.org,cgourlay/readthedocs.org,atsuyim/readthedocs.org,royalwang/readthedocs.org,johncosta/private-readthedocs.org,cgourlay/readthedocs.org,GovReady/readthedocs.org,wijerasa/readthedocs.org,Carreau/readthedocs.org,nikolas/readthedocs.org,takluyver/readthedocs.org,sid-kap/readthedocs.org,mrshoki/readthedocs.org,nikolas/readthedocs.org,CedarLogic/readthedocs.org,wijerasa/readthedocs.org,istresearch/readthedocs.org,Carreau/readthedocs.org,espdev/readthedocs.org,singingwolfboy/readthedocs.org,fujita-shintaro/readthedocs.org,istresearch/readthedocs.org,stevepiercy/readthedocs.org,ojii/readthedocs.org,jerel/readthedocs.org,VishvajitP/readthedocs.org,laplaceliu/readthedocs.org,istresearch/readthedocs.org,pombredanne/readthedocs.org,mrshoki/readthedocs.org,mhils/readthedocs.org,kdkeyser/readthedocs.org,rtfd/readthedocs.org,safwanrahman/readthedocs.org,alex/readthedocs.org,VishvajitP/readthedocs.org,dirn/readthedocs.org,GovReady/readthedocs.org,sunnyzwh/readthedocs.org,jerel/readthedocs.org,rtfd/readthedocs.org,nyergler/pythonslides,sils1297/readthedocs.org,kdkeyser/readthedocs.org,attakei/readthedocs-oauth,ojii/readthedocs.org,alex/readthedocs.org,clarkperkins/readthedocs.org,fujita-shintaro/readthedocs.org,clarkperkins/readthedocs.org,nyergler/pythonslides,raven47git/readthedocs.org,sunnyzwh/readthedocs.org,davidfischer/readthedocs.org,GovReady/readthedocs.org,titiushko/readthedocs.org,asampat3090/readthedocs.org,techtonik/readthedocs.org,attakei/readthedocs-oauth,royalwang/readthedocs.org,stevepiercy/readthedocs.org,laplaceliu/readthedocs.org,titiushko/readthedocs.org,sid-kap/readthedocs.org,ojii/readthedocs.org,nyergler/pythonslides,kenwang76/readthedocs.org,davidfischer/readthedocs.org,attakei/readthedocs-oauth,espdev/readthedocs.org,stevepiercy/readthedocs.org,d0ugal/readthedocs.org,kenwang76/readthedocs.org,SteveViss/readthedocs.org,soulshake/readthedocs.org,techtonik/readthedocs.org
76bf8966a25932822fca1c94586fccfa096ee02b
tests/misc/test_base_model.py
tests/misc/test_base_model.py
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project Cosmos Landromat>") self.project.name = u"Big Buck Bunny" self.assertEqual(str(self.project), "<Project Big Buck Bunny>") def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
# -*- coding: UTF-8 -*- from tests.base import ApiDBTestCase class BaseModelTestCase(ApiDBTestCase): def test_repr(self): self.generate_fixture_project_status() self.generate_fixture_project() self.assertEqual(str(self.project), "<Project %s>" % self.project.id) def test_query(self): pass def test_get(self): pass def test_get_by(self): pass def test_get_all_by(self): pass def test_create(self): pass def test_get_id_map(self): pass def save(self): pass def delete(self): pass def update(self): pass
Change base model string representation
Change base model string representation
Python
agpl-3.0
cgwire/zou
c908edadadb866292a612103d2854bef4673efab
shinken/__init__.py
shinken/__init__.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>. # shinken.objects must be imported first: import objects
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2009-2012: # Gabes Jean, naparuba@gmail.com # Gerhard Lausser, Gerhard.Lausser@consol.de # Gregory Starck, g.starck@gmail.com # Hartmut Goebel, h.goebel@goebel-consult.de # # This file is part of Shinken. # # Shinken is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Shinken is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with Shinken. If not, see <http://www.gnu.org/licenses/>.
Remove superfluous import of shinken.objects in shinken/_init__.py.
Remove superfluous import of shinken.objects in shinken/_init__.py. Every script or test-case importing shinken has all the objects loaded, even if they are not required by the script or test-case at all. Also see <http://sourceforge.net/mailarchive/message.php?msg_id=29553474>.
Python
agpl-3.0
naparuba/shinken,claneys/shinken,naparuba/shinken,tal-nino/shinken,mohierf/shinken,titilambert/alignak,lets-software/shinken,KerkhoffTechnologies/shinken,Simage/shinken,mohierf/shinken,gst/alignak,lets-software/shinken,Aimage/shinken,Aimage/shinken,geektophe/shinken,Aimage/shinken,staute/shinken_package,savoirfairelinux/shinken,staute/shinken_deb,rednach/krill,peeyush-tm/shinken,geektophe/shinken,kaji-project/shinken,tal-nino/shinken,claneys/shinken,ddurieux/alignak,ddurieux/alignak,h4wkmoon/shinken,rednach/krill,rledisez/shinken,xorpaul/shinken,savoirfairelinux/shinken,mohierf/shinken,peeyush-tm/shinken,Simage/shinken,savoirfairelinux/shinken,savoirfairelinux/shinken,peeyush-tm/shinken,titilambert/alignak,kaji-project/shinken,tal-nino/shinken,ddurieux/alignak,xorpaul/shinken,fpeyre/shinken,tal-nino/shinken,lets-software/shinken,kaji-project/shinken,rledisez/shinken,h4wkmoon/shinken,naparuba/shinken,peeyush-tm/shinken,h4wkmoon/shinken,dfranco/shinken,kaji-project/shinken,xorpaul/shinken,peeyush-tm/shinken,lets-software/shinken,ddurieux/alignak,KerkhoffTechnologies/shinken,rledisez/shinken,h4wkmoon/shinken,tal-nino/shinken,fpeyre/shinken,Simage/shinken,h4wkmoon/shinken,staute/shinken_deb,gst/alignak,staute/shinken_deb,lets-software/shinken,Aimage/shinken,Simage/shinken,dfranco/shinken,geektophe/shinken,kaji-project/shinken,xorpaul/shinken,staute/shinken_package,rledisez/shinken,mohierf/shinken,claneys/shinken,gst/alignak,h4wkmoon/shinken,staute/shinken_package,rledisez/shinken,geektophe/shinken,geektophe/shinken,dfranco/shinken,fpeyre/shinken,claneys/shinken,mohierf/shinken,dfranco/shinken,ddurieux/alignak,ddurieux/alignak,KerkhoffTechnologies/shinken,naparuba/shinken,titilambert/alignak,titilambert/alignak,KerkhoffTechnologies/shinken,staute/shinken_deb,rledisez/shinken,savoirfairelinux/shinken,h4wkmoon/shinken,staute/shinken_deb,xorpaul/shinken,geektophe/shinken,xorpaul/shinken,claneys/shinken,Simage/shinken,claneys/shinken,staute/shinken_deb,peeyush-tm/shinken,kaji-project/shinken,KerkhoffTechnologies/shinken,Alignak-monitoring/alignak,rednach/krill,Simage/shinken,Aimage/shinken,rednach/krill,fpeyre/shinken,savoirfairelinux/shinken,staute/shinken_package,lets-software/shinken,staute/shinken_package,rednach/krill,rednach/krill,h4wkmoon/shinken,xorpaul/shinken,Aimage/shinken,Alignak-monitoring/alignak,dfranco/shinken,naparuba/shinken,fpeyre/shinken,gst/alignak,naparuba/shinken,dfranco/shinken,xorpaul/shinken,mohierf/shinken,kaji-project/shinken,fpeyre/shinken,KerkhoffTechnologies/shinken,tal-nino/shinken,staute/shinken_package
85f759a9446cf988cc859d3b74d11e6b224bbd16
request/managers.py
request/managers.py
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') users = [] done = [] for request in requests: if not (request.user.pk in done): done.append(request.user.pk) users.append(request.user) return users
from datetime import timedelta, datetime from django.db import models from django.contrib.auth.models import User class RequestManager(models.Manager): def active_users(self, **options): """ Returns a list of active users. Any arguments passed to this method will be given to timedelta for time filtering. Example: >>> Request.object.active_users(minutes=15) [<User: kylef>, <User: krisje8>] """ qs = self.filter(user__isnull=False) if options: time = datetime.now() - timedelta(**options) qs = qs.filter(time__gte=time) requests = qs.select_related('user').only('user') return set([request.user for request in requests])
Use a list comprehension and set() to make the active_users query simpler and faster.
Use a list comprehension and set() to make the active_users query simpler and faster.
Python
bsd-2-clause
kylef/django-request,gnublade/django-request,Derecho/django-request,kylef/django-request,gnublade/django-request,gnublade/django-request,kylef/django-request
11583cfca501164c5c08af70f66d430cd180dbc5
examples/basic_nest/make_nest.py
examples/basic_nest/make_nest.py
#!/usr/bin/env python import collections import os import os.path import sys from nestly import nestly wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') ctl = collections.OrderedDict() ctl['strategy'] = nestly.repeat_iterable(('exhaustive', 'approximate')) ctl['run_count'] = nestly.repeat_iterable([10**(i + 1) for i in xrange(3)]) ctl['input_file'] = lambda x: map(nestly.file_nv, nestly.collect_globs(input_dir, ['file*'])) nestly.build(ctl, 'runs')
#!/usr/bin/env python import glob import os import os.path from nestly import Nest wd = os.getcwd() input_dir = os.path.join(wd, 'inputs') nest = Nest() nest.add_level('strategy', ('exhaustive', 'approximate')) nest.add_level('run_count', [10**i for i in xrange(3)]) nest.add_level('input_file', glob.glob(os.path.join(input_dir, 'file*')), label_func=os.path.basename) nest.build('runs')
Update basic_nest for new API
Update basic_nest for new API
Python
mit
fhcrc/nestly
978b41a29eda295974ed5cf1a7cd5b79b148f479
coverage/execfile.py
coverage/execfile.py
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) try: src = open(filename) imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
"""Execute files of Python code.""" import imp, os, sys def run_python_file(filename, args): """Run a python source file as if it were the main program on the python command line. `filename` is the path to the file to execute, must be a .py file. `args` is the argument array to present as sys.argv. """ # Most code that does this does it in a way that leaves __main__ or __file__ # with the wrong values. Importing the code as __main__ gets all of this # right automatically. # # One difference from python.exe: if I run foo.py from the command line, it # always uses foo.py. With this code, it might find foo.pyc instead. sys.argv = args sys.path[0] = os.path.dirname(filename) src = open(filename) try: imp.load_module('__main__', src, filename, (".py", "r", imp.PY_SOURCE)) finally: src.close()
Move the open outside the try, since the finally is only needed once the file is successfully opened.
Move the open outside the try, since the finally is only needed once the file is successfully opened.
Python
apache-2.0
7WebPages/coveragepy,blueyed/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,jayhetee/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,hugovk/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,blueyed/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,blueyed/coveragepy,hugovk/coveragepy,jayhetee/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,larsbutler/coveragepy,hugovk/coveragepy
84d3738d2eb8a24dcb66cb329994f88bd55128c0
tests/test_utils.py
tests/test_utils.py
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' )
import pytest def test_scrub_doi(): from vdm.utils import scrub_doi d = 'http://dx.doi.org/10.1234' scrubbed = scrub_doi(d) assert(scrubbed == '10.1234') d = '10.123 4' assert( scrub_doi(d) == '10.1234' ) d = '<p>10.1234</p>' assert( scrub_doi(d) == '10.1234' ) def test_pull(): from vdm.utils import pull d = {} d['mykey'] = 'Value' assert( pull(d, 'mykey') == 'Value' )
Add utils tests. Rework pull.
Add utils tests. Rework pull.
Python
mit
Brown-University-Library/vivo-data-management,Brown-University-Library/vivo-data-management
c34840a7ac20d22e650be09a515cee9dbfcf6043
tests/test_views.py
tests/test_views.py
from django.http import HttpResponse from djproxy.views import HttpProxy DOWNSTREAM_INJECTION = lambda x: x class LocalProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/ide/Yola/Yola.session.jsp" class SBProxy(HttpProxy): base_url = "http://sitebuilder.qa.yola.net/en/APIController" def index(request): DOWNSTREAM_INJECTION(request) return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
from django.http import HttpResponse from djproxy.views import HttpProxy class LocalProxy(HttpProxy): base_url = "http://localhost:8000/some/content/" def index(request): return HttpResponse('Some content!', status=200) class BadTestProxy(HttpProxy): pass class GoodTestProxy(HttpProxy): base_url = "https://google.com/"
Remove accidentally committed test code
Remove accidentally committed test code
Python
mit
thomasw/djproxy
ab418734f432691ec4a927be32364ee85baab35c
__init__.py
__init__.py
import inspect import python2.httplib2 as httplib2 globals().update(inspect.getmembers(httplib2))
import inspect import sys if sys.version_info[0] == 2: from .python2 import httplib2 else: from .python3 import httplib2 globals().update(inspect.getmembers(httplib2))
Use python version dependent import
Use python version dependent import Change-Id: Iae6bc0cc8d526162b91d0c18cf1fba1461aa9f98
Python
mit
wikimedia/pywikibot-externals-httplib2,wikimedia/pywikibot-externals-httplib2,jayvdb/httplib2,jayvdb/httplib2
9ab7efa44a8e7267b2902b6e23ff61381d31692c
profile_collection/startup/85-robot.py
profile_collection/startup/85-robot.py
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C class Robot(Device): robot_sample_number = C(EpicsSignal, 'ID:Tgt-SP') robot_load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') robot_unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') robot_execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') robot_status = C(EpicsSignal, 'Sts-Sts') robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
from ophyd import Device, EpicsSignal, EpicsSignalRO from ophyd import Component as C from ophyd.utils import set_and_wait class Robot(Device): sample_number = C(EpicsSignal, 'ID:Tgt-SP') load_cmd = C(EpicsSignal, 'Cmd:Load-Cmd.PROC') unload_cmd = C(EpicsSignal, 'Cmd:Unload-Cmd.PROC') execute_cmd = C(EpicsSignal, 'Cmd:Exec-Cmd') status = C(EpicsSignal, 'Sts-Sts') TH_POS = {'capilary':{'load':0, 'measure': 0}, 'flat': {'load': 0, 'measure': 0}, '':{}} DIFF_POS = {'capilary': (1,2),} def __init__(self, theta, diff): self.theta = theta self.diff = diff def load_sample(sample_number, sample_type): # self.theta.move(self.TH_POS[sample_type]['load'], wait=True) set_and_wait(self.sample_number, sample_number) set_and_wait(self.load_cmd, 1) self.execute_cmd.put(1) while self.status.get() != 'Idle': time.sleep(.1) # self.theta.move(self.TH_POS[sample_type]['measure'], wait=True) robot = Robot('XF:28IDC-ES:1{SM}') # old RobotPositioner code is .ipython/profile_2015_collection/startup/robot.py
Add sample loading logic to Robot.
WIP: Add sample loading logic to Robot.
Python
bsd-2-clause
NSLS-II-XPD/ipython_ophyd,NSLS-II-XPD/ipython_ophyd
2c6f5cfb2e90e815d74dca11c395e25875d475be
corehq/ex-submodules/phonelog/tasks.py
corehq/ex-submodules/phonelog/tasks.py
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from phonelog.models import DeviceReportEntry, UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) DeviceReportEntry.objects.filter(server_date__lt=max_age).delete() UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
from datetime import datetime, timedelta from celery.schedules import crontab from celery.task import periodic_task from django.conf import settings from django.db import connection from phonelog.models import UserErrorEntry, ForceCloseEntry, UserEntry @periodic_task(run_every=crontab(minute=0, hour=0), queue=getattr(settings, 'CELERY_PERIODIC_QUEUE', 'celery')) def purge_old_device_report_entries(): max_age = datetime.utcnow() - timedelta(days=settings.DAYS_TO_KEEP_DEVICE_LOGS) with connection.cursor() as cursor: partitoned_db_format = 'phonelog_daily_partitioned_devicereportentry_y%Yd%j' table_to_drop = (max_age - timedelta(days=1)).strftime(partitoned_db_format) cursor.execute("DROP TABLE {}".format(table_to_drop)) UserErrorEntry.objects.filter(server_date__lt=max_age).delete() ForceCloseEntry.objects.filter(server_date__lt=max_age).delete() UserEntry.objects.filter(server_date__lt=max_age).delete()
Drop table for device report logs.
Drop table for device report logs.
Python
bsd-3-clause
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
5025bff2ca9a4f31a371ecbd9255b1fb92b9cc4d
kafka_influxdb/encoder/echo_encoder.py
kafka_influxdb/encoder/echo_encoder.py
class Encoder(object): @staticmethod def encode(msg): """ Don't change the message at all :param msg: """ return msg
try: # Test for mypy support (requires Python 3) from typing import Text except: pass class Encoder(object): @staticmethod def encode(msg): # type: (bytes) -> List[bytes] """ Don't change the message at all :param msg: """ return [msg]
Return a list of messages in echo encoder and add mypy type hints
Return a list of messages in echo encoder and add mypy type hints
Python
apache-2.0
mre/kafka-influxdb,mre/kafka-influxdb
2722a59aad0775f1bcd1e81232ff445b9012a2ae
ssim/compat.py
ssim/compat.py
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: basestring = basestring # pylint: disable=invalid-name
"""Compatibility routines.""" from __future__ import absolute_import import sys try: import Image # pylint: disable=import-error,unused-import except ImportError: from PIL import Image # pylint: disable=unused-import try: import ImageOps # pylint: disable=import-error,unused-import except ImportError: from PIL import ImageOps # pylint: disable=unused-import if sys.version_info[0] > 2: basestring = (str, bytes) # pylint: disable=redefined-builtin,invalid-name else: # pylint: disable=redefined-variable-type basestring = basestring # pylint: disable=invalid-name
Add pylint to disable redefined variable.
Add pylint to disable redefined variable.
Python
mit
jterrace/pyssim
659659270ef067baf0edea5de5bb10fdab532eaa
run-tests.py
run-tests.py
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
#!/usr/bin/env python from __future__ import print_function from optparse import OptionParser from subprocess import Popen import os import sys def run_command(cmdline): proc = Popen(cmdline, shell=True) proc.communicate() return proc.returncode def main(): parser = OptionParser() parser.add_option( '-c', '--coverage', action='store_true', help='Measure code coverage') options, args = parser.parse_args() if args: parser.print_help() return 2 if run_command('which cram >/dev/null') != 0: print('Error: cram is not installed', file=sys.stderr) return 1 if options.coverage: if run_command('which coverage >/dev/null') != 0: print('Error: coverage is not installed', file=sys.stderr) return 1 if options.coverage: run_command('coverage erase') os.environ['COVERAGE'] = 'yes' os.environ['COVERAGE_FILE'] = os.path.abspath('.coverage') if 'SALADIR' in os.environ: # Remove SALADIR from environ to avoid failing tests del os.environ['SALADIR'] run_command('cram test') if options.coverage: run_command('coverage report -m') if __name__ == '__main__': sys.exit(main() or 0)
Remove SALADIR from environment if present
tests: Remove SALADIR from environment if present
Python
mit
akheron/sala,akheron/sala
431720194c20dde7b19236d2302c0f9910fd7ea4
pseudorandom.py
pseudorandom.py
import os from flask import Flask, render_template from names import get_full_name app = Flask(__name__) @app.route("/") def index(): return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
import os from flask import Flask, render_template, request from names import get_full_name app = Flask(__name__) @app.route("/") def index(): if request.headers.get('User-Agent', '')[:4].lower() == 'curl': return u"{0}\n".format(get_full_name()) else: return render_template('index.html', name=get_full_name()) if __name__ == "__main__": port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port)
Send just plaintext name if curl is used
Send just plaintext name if curl is used
Python
mit
treyhunner/pseudorandom.name,treyhunner/pseudorandom.name
411ae98889d3611151a6f94d661b86b1bbc5e026
apis/Google.Cloud.Speech.V1/synth.py
apis/Google.Cloud.Speech.V1/synth.py
import os from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name shell.run( f'"C:\\Program Files\\Git\\bin\\bash.exe" generateapis.sh {package}', cwd = root, hide_output = False)
import sys from synthtool import shell from pathlib import Path # Parent of the script is the API-specific directory # Parent of the API-specific directory is the apis directory # Parent of the apis directory is the repo root root = Path(__file__).parent.parent.parent package = Path(__file__).parent.name bash = '/bin/bash' if sys.platform == 'win32': bash = '"C:\\Program Files\\Git\\bin\\bash.exe"' shell.run( f'{bash} generateapis.sh {package}', cwd = root, hide_output = False)
Use the right bash command based on platform
Use the right bash command based on platform
Python
apache-2.0
googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/google-cloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet,jskeet/gcloud-dotnet,googleapis/google-cloud-dotnet,jskeet/google-cloud-dotnet
a1f26386bec0c4d39bce77d0fd3975ae4b0930d0
apps/package/tests/test_handlers.py
apps/package/tests/test_handlers.py
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos())
from django.test import TestCase class TestRepoHandlers(TestCase): def test_repo_registry(self): from package.handlers import get_repo, supported_repos g = get_repo("github") self.assertEqual(g.title, "Github") self.assertEqual(g.url, "https://github.com") self.assertTrue("github" in supported_repos()) self.assertRaises(ImportError, lambda: get_repo("xyzzy"))
Test what get_repo() does for unsupported repos
Test what get_repo() does for unsupported repos
Python
mit
nanuxbe/djangopackages,nanuxbe/djangopackages,pydanny/djangopackages,QLGu/djangopackages,miketheman/opencomparison,benracine/opencomparison,QLGu/djangopackages,cartwheelweb/packaginator,miketheman/opencomparison,audreyr/opencomparison,benracine/opencomparison,audreyr/opencomparison,QLGu/djangopackages,cartwheelweb/packaginator,nanuxbe/djangopackages,pydanny/djangopackages,pydanny/djangopackages,cartwheelweb/packaginator
ab91d525abb5bb1ef476f3aac2c034e50f85617a
src/apps/contacts/mixins.py
src/apps/contacts/mixins.py
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: :return: """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
from apps.contacts.models import BaseContact class ContactMixin(object): """ Would be used for adding contacts functionality to models with contact data. """ def get_contacts(self, is_primary=False): """ Returns dict with all contacts. Example: >> obj.get_contacts() << {'email': [], 'skype': []} :param is_primary: bool Return only primary contacts. :return: dict """ subclasses = BaseContact.__subclasses__() results = {} for cls in subclasses: queryset = cls.objects.filter(employee_id=self.id) key, verbose = cls.CONTACT_EXTRA_DATA if is_primary: queryset = queryset.filter(is_primary=True) results.setdefault(key, queryset) return results
Fix description for contact mixin
Fix description for contact mixin
Python
mit
wis-software/office-manager
b05eacfa7f2a3fb653ec4a9653780d211245bfb1
pyvac/helpers/calendar.py
pyvac/helpers/calendar.py
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = caldav.Principal(client, url) calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
import logging import caldav from dateutil.relativedelta import relativedelta log = logging.getLogger(__file__) def addToCal(url, date_from, date_end, summary): """ Add entry in calendar to period date_from, date_end """ vcal_entry = """BEGIN:VCALENDAR VERSION:2.0 PRODID:Pyvac Calendar BEGIN:VEVENT SUMMARY:%s DTSTART;VALUE=DATE:%s DTEND;VALUE=DATE:%s END:VEVENT END:VCALENDAR """ client = caldav.DAVClient(url) principal = client.principal() calendars = principal.calendars() if not len(calendars): return False vcal_entry = vcal_entry % (summary, date_from.strftime('%Y%m%d'), (date_end + relativedelta(days=1)).strftime('%Y%m%d')) calendar = calendars[0] log.info('Using calendar %r' % calendar) log.info('Using entry: %s' % vcal_entry) event = caldav.Event(client, data=vcal_entry, parent=calendar).save() log.info('Event %s created' % event) url_obj = event.url return str(url_obj) def delFromCal(url, ics): """ Delete entry in calendar""" if not url: return False client = caldav.DAVClient(url) log.info('Deleting entry %r' % ics) client.delete(ics) return True
Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server
Use now 'oop' method from creating principal object, prevent 'path handling error' with baikal caldav server
Python
bsd-3-clause
doyousoft/pyvac,sayoun/pyvac,doyousoft/pyvac,sayoun/pyvac,doyousoft/pyvac,sayoun/pyvac
d51d9cc67eca9566673e963e824dc335eb47a9af
recipy/utils.py
recipy/utils.py
import sys from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if 'mode' in kwargs.keys(): mode = kwargs['mode'] # Python 3 else: try: mode = args[1] # Python 2 except: mode = 'r' # Default (in Python 2) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') if sys.version_info[0] == 2: if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) return(f)
import six from .log import log_input, log_output def open(*args, **kwargs): """Built-in open replacement that logs input and output Workaround for issue #44. Patching `__builtins__['open']` is complicated, because many libraries use standard open internally, while we only want to log inputs and outputs that are opened explicitly by the user. The user can either use `recipy.open` (only requires `import recipy` at the top of the script), or add `from recipy import open` and just use `open`. If python 2 is used, and an `encoding` parameter is passed to this function, `codecs` is used to open the file with proper encoding. """ if six.PY3: mode = kwargs['mode'] f = __builtins__['open'](*args, **kwargs) else: try: mode = args[1] except: mode = 'r' if 'encoding' in kwargs.keys(): import codecs f = codecs.open(*args, **kwargs) else: f = __builtins__['open'](*args, **kwargs) # open file for reading? for c in 'r+': if c in mode: log_input(args[0], 'recipy.open') # open file for writing? for c in 'wax+': if c in mode: log_output(args[0], 'recipy.open') return(f)
Use six instead of sys.version_info
Use six instead of sys.version_info
Python
apache-2.0
recipy/recipy,recipy/recipy
9e1cf6ecf8104b38c85a00e973873cbfa7d78236
bytecode.py
bytecode.py
class BytecodeBase: def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
class BytecodeBase: autoincrement = True # For jump def __init__(self): # Eventually might want to add subclassed bytecodes here # Though __subclasses__ works quite well pass def execute(self, machine): pass class Push(BytecodeBase): def __init__(self, data): self.data = data def execute(self, machine): machine.push(self.data) class Pop(BytecodeBase): def execute(self, machine): return machine.pop() class Add(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a+b) class Sub(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a-b) class Mul(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a*b) class Div(BytecodeBase): def execute(self, machine): a = machine.pop() b = machine.pop() machine.push(a/b) class Terminate(BytecodeBase): def execute(self, machine): machine.executing = False
Add autoincrement for jump in the future
Add autoincrement for jump in the future
Python
bsd-3-clause
darbaga/simple_compiler
2034c8280800291227232435786441bfb0edace0
tests/cli.py
tests/cli.py
import os from spec import eq_ from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n")
import os from spec import eq_, skip from invoke import run from _utils import support # Yea, it's not really object-oriented, but whatever :) class CLI(object): "Command-line interface" # Yo dogfood, I heard you like invoking def basic_invocation(self): os.chdir(support) result = run("invoke -c integration print_foo") eq_(result.stdout, "foo\n") def implicit_task_module(self): # Contains tasks.py os.chdir(support + '/implicit/') # Doesn't specify --collection result = run("invoke foo") eq_(result.stdout, "Hm\n") def boolean_args(self): cmd = "taskname --boolean" skip() def flag_then_space_then_value(self): cmd = "taskname --flag value" skip() def flag_then_equals_sign_then_value(self): cmd = "taskname --flag=value" skip() def short_boolean_flag(self): cmd = "taskname -f" skip() def short_flag_then_space_then_value(self): cmd = "taskname -f value" skip() def short_flag_then_equals_sign_then_value(self): cmd = "taskname -f=value" skip() def short_flag_with_adjacent_value(self): cmd = "taskname -fvalue" skip() def flag_value_then_task(self): cmd = "task1 -f notatask task2" skip() def flag_value_same_as_task_name(self): cmd = "task1 -f mytask mytask" skip() def complex_multitask_invocation(self): cmd = "-c integration task1 --bool_arg --val_arg=value task2 --val_arg othervalue" skip() def three_tasks_with_args(self): cmd = "task1 --task1_bool task2 --task2_arg task2_arg_value task3" skip()
Add common CLI invocation test stubs.
Add common CLI invocation test stubs. Doesn't go into positional args.
Python
bsd-2-clause
frol/invoke,alex/invoke,mkusz/invoke,mattrobenolt/invoke,mattrobenolt/invoke,kejbaly2/invoke,kejbaly2/invoke,pyinvoke/invoke,pfmoore/invoke,sophacles/invoke,pfmoore/invoke,mkusz/invoke,tyewang/invoke,pyinvoke/invoke,singingwolfboy/invoke,frol/invoke
2e0286632b9120fe6a788db4483911513a39fe04
fabfile.py
fabfile.py
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git pull --rebase") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
from fabric.api import * # noqa env.hosts = [ '104.131.30.135', ] env.user = "root" env.directory = "/home/django/api.freemusic.ninja" env.deploy_path = "/home/django/django_project" def deploy(): with cd(env.directory): run("git reset --hard origin/master") sudo("pip3 install -r requirements.txt") sudo("python3 manage.py collectstatic --noinput", user='django') sudo("python3 manage.py migrate --noinput", user='django') run("rm -f {deploy_path}".format(deploy_path=env.deploy_path)) run("ln -s {project_path} {deploy_path}".format( project_path=env.directory, deploy_path=env.deploy_path)) run("service gunicorn restart") def dbshell(): with cd(env.directory): sudo("python3 manage.py dbshell", user='django') def shell(): with cd(env.directory): sudo("python3 manage.py shell", user='django') def migrate(): with cd(env.directory): sudo("python3 manage.py migrate", user='django') def gunicorn_restart(): run("service gunicorn restart")
Reset to upstream master instead of rebasing during deployment
Reset to upstream master instead of rebasing during deployment
Python
bsd-3-clause
FreeMusicNinja/api.freemusic.ninja
caf94786ca8c0bc9e3995da0a160c84921a3bfc6
fabfile.py
fabfile.py
from fabric.api import task, sudo, env, local from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
from fabric.api import task, sudo, env, local, hosts from fabric.contrib.project import rsync_project from fabric.contrib.console import confirm @task @hosts("paramiko.org") def upload_docs(): target = "/var/www/paramiko.org" staging = "/tmp/paramiko_docs" sudo("mkdir -p %s" % staging) sudo("chown -R %s %s" % (env.user, staging)) sudo("rm -rf %s/*" % target) rsync_project(local_dir='docs/', remote_dir=staging, delete=True) sudo("cp -R %s/* %s/" % (staging, target)) @task def build_docs(): local("epydoc --no-private -o docs/ paramiko") @task def clean(): local("rm -rf build dist docs") local("rm -f MANIFEST *.log demos/*.log") local("rm -f paramiko/*.pyc") local("rm -f test.log") local("rm -rf paramiko.egg-info") @task def test(): local("python ./test.py") @task def release(): confirm("Only hit Enter if you remembered to update the version!") build_docs() local("python setup.py sdist register upload") upload_docs()
Update doc upload task w/ static hostname
Update doc upload task w/ static hostname
Python
lgpl-2.1
torkil/paramiko,redixin/paramiko,SebastianDeiss/paramiko,fvicente/paramiko,zpzgone/paramiko,mirrorcoder/paramiko,reaperhulk/paramiko,jaraco/paramiko,rcorrieri/paramiko,paramiko/paramiko,CptLemming/paramiko,anadigi/paramiko,digitalquacks/paramiko,ameily/paramiko,selboo/paramiko,remram44/paramiko,varunarya10/paramiko,davidbistolas/paramiko,Automatic/paramiko,dlitz/paramiko,thusoy/paramiko,mhdaimi/paramiko,zarr12steven/paramiko,thisch/paramiko,esc/paramiko,toby82/paramiko,jorik041/paramiko,dorianpula/paramiko,mitsuhiko/paramiko,alex/paramiko
e0b7b6ccdd947324ac72b48a28d6c68c7e980d96
ibmcnx/doc/DataSources.py
ibmcnx/doc/DataSources.py
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource', AdminConfig.getid('/Cell:cnxwas1Cell01/')) for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
###### # Check ExId (GUID) by Email through JDBC # # Author: Christoph Stoettner # Mail: christoph.stoettner@stoeps.de # Documentation: http://scripting101.stoeps.de # # Version: 2.0 # Date: 2014-06-04 # # License: Apache 2.0 # # Check ExId of a User in all Connections Applications import ibmcnx.functions dbs = AdminConfig.list('DataSource',AdminConfig.getid('/Cell:cnxwas1Cell01/')).splitlines() for db in dbs: print db # dbs = dbs.split('(')[0] # print dbs # dbs = ['FNOSDS', 'FNGCDDS', 'IBM_FORMS_DATA_SOURCE', 'activities', 'blogs', 'communities', 'dogear', 'files', 'forum', 'homepage', 'metrics', 'mobile', 'news', 'oauth provider', 'profiles', 'search', 'wikis'] # List of all databases to check # # for db in dbs: # t1 = ibmcnx.functions.getDSId( db ) # AdminConfig.show( t1 ) # print '\n\n' # AdminConfig.showall( t1 ) # AdminConfig.showAttribute(t1,'statementCacheSize' ) # AdminConfig.showAttribute(t1,'[statementCacheSize]' )
Create documentation of DataSource Settings
8: Create documentation of DataSource Settings Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
Python
apache-2.0
stoeps13/ibmcnx2,stoeps13/ibmcnx2
c6275896adb429fad7f8bebb74ce932739ecfb63
edx_shopify/views.py
edx_shopify/views.py
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], email=data['customer']['email'], first_name=data['customer']['first_name'], last_name=data['customer']['last_name']) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
import copy, json from django.conf import settings from django.http import HttpResponse from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from .utils import hmac_is_valid from .models import Order from .tasks import ProcessOrder @csrf_exempt @require_POST def order_create(request): # Load configuration conf = settings.WEBHOOK_SETTINGS['edx_shopify'] # Process request try: hmac = request.META['HTTP_X_SHOPIFY_HMAC_SHA256'] shop_domain = request.META['HTTP_X_SHOPIFY_SHOP_DOMAIN'] data = json.loads(request.body) except (KeyError, ValueError): return HttpResponse(status=400) if (not hmac_is_valid(conf['api_key'], request.body, hmac) or conf['shop_domain'] != shop_domain): return HttpResponse(status=403) # Record order order, created = Order.objects.get_or_create( id=data['id'], defaults={ 'email': data['customer']['email'], 'first_name': data['customer']['first_name'], 'last_name': data['customer']['last_name'] } ) # Process order if order.status == Order.UNPROCESSED: order.status = Order.PROCESSING ProcessOrder().apply_async(args=(data,)) return HttpResponse(status=200)
Use get_or_create correctly on Order
Use get_or_create correctly on Order
Python
agpl-3.0
hastexo/edx-shopify,fghaas/edx-shopify
cfdbe06da6e35f2cb166374cf249d51f18e1224e
pryvate/blueprints/packages/packages.py
pryvate/blueprints/packages/packages.py
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype})
"""Package blueprint.""" import os import magic from flask import Blueprint, current_app, make_response, render_template blueprint = Blueprint('packages', __name__, url_prefix='/packages') @blueprint.route('') def foo(): return 'ok' @blueprint.route('/<package_type>/<letter>/<name>/<version>', methods=['GET', 'HEAD']) def packages(package_type, letter, name, version): """Get the contents of a package.""" filepath = os.path.join(current_app.config['BASEDIR'], name.lower(), version.lower()) if os.path.isfile(filepath): with open(filepath, 'rb') as egg: mimetype = magic.from_file(filepath, mime=True) contents = egg.read() return make_response(contents, 200, {'Content-Type': mimetype}) return make_response('Package not found', 404)
Return a 404 if the package was not found
Return a 404 if the package was not found
Python
mit
Dinoshauer/pryvate,Dinoshauer/pryvate
e1b0222c8a3ed39bf76af10484a94aa4cfe5adc8
googlesearch/templatetags/search_tags.py
googlesearch/templatetags/search_tags.py
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) last_page = int(context['current_page']) + pages_to_show - 1 last_page = max_pages if last_page > max_pages else last_page prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
import math from django import template from ..conf import settings register = template.Library() @register.inclusion_tag('googlesearch/_pagination.html', takes_context=True) def show_pagination(context, pages_to_show=10): max_pages = int(math.ceil(context['total_results'] / settings.GOOGLE_SEARCH_RESULTS_PER_PAGE)) prev_page = context['current_page'] - 1 next_page = context['current_page'] + 1 context.update({ 'pages': range(1, max_pages + 1), 'prev_page': prev_page if context['current_page'] - 1 > 0 else None, 'next_page': next_page if next_page < max_pages else None, }) return context
Remove last_page not needed anymore.
Remove last_page not needed anymore.
Python
mit
hzdg/django-google-search,hzdg/django-google-search
451a435ca051305517c79216d7ab9441939f4004
src/amr.py
src/amr.py
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = df.grad(u) costheta = df.dot(m, E) sigma = 1/(1 + costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
import dolfin as df def amr(mesh, m, DirichletBoundary, g, d, s0=1, alpha=1): V = df.FunctionSpace(mesh, "CG", 1) # Define boundary condition bc = df.DirichletBC(V, g, DirichletBoundary()) # Define variational problem u = df.Function(V) v = df.TestFunction(V) E = -df.grad(u) costheta = df.dot(m, E) sigma = s0/(1 + alpha*costheta**2) F = df.inner(sigma*df.grad(u), df.grad(v))*df.dx # Compute solution df.solve(F == 0, u, bc, solver_parameters={"newton_solver": {"relative_tolerance": 1e-6}}) # Plot solution and solution gradient df.plot(u, title="Solution") df.plot(sigma*df.grad(u), title="Solution gradient") df.interactive()
Add sigma0 and alpha AMR parameters to the function.
Add sigma0 and alpha AMR parameters to the function.
Python
bsd-2-clause
fangohr/fenics-anisotropic-magneto-resistance
9a2169e38374429db7792537e2c4c1a78281200d
src/application/models.py
src/application/models.py
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class ExampleModel(ndb.Model): """Example Model""" example_name = ndb.StringProperty(required=True) example_description = ndb.TextProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) class SchoolModel(ndb.Model): name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
""" models.py App Engine datastore models """ from google.appengine.ext import ndb class SchoolModel(ndb.Model): """"Basic Model"""" name = ndb.StringProperty(required=True) place = ndb.StringProperty(required=True) added_by = ndb.UserProperty() timestamp = ndb.DateTimeProperty(auto_now_add=True) score = ndb.IntegerProperty()
Add Docstrings and fix basic model
Add Docstrings and fix basic model
Python
mit
shashisp/reWrite-SITA,shashisp/reWrite-SITA,shashisp/reWrite-SITA
08ae805a943be3cdd5e92c050512374180b9ae35
indra/sources/geneways/geneways_api.py
indra/sources/geneways/geneways_api.py
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str from indra.sources.geneways.processor import GenewaysProcessor def process_geneways(search_path=None): """Reads in Geneways data and returns a list of statements. Parameters ---------- search_path : list a list of directories in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default search path. Returns ------- statements : list[indra.statements.Statement] A list of INDRA statements generated from the Geneways action mentions. """ if search_path is None: search_path = ['./data', '../data', '../../data', '~/data', '.'] processor = GenewaysProcessor(search_path) return processor.statements
""" This module provides a simplified API for invoking the Geneways input processor , which converts extracted information collected with Geneways into INDRA statements. See publication: Rzhetsky, Andrey, Ivan Iossifov, Tomohiro Koike, Michael Krauthammer, Pauline Kra, Mitzi Morris, Hong Yu et al. "GeneWays: a system for extracting, analyzing, visualizing, and integrating molecular pathway data." Journal of biomedical informatics 37, no. 1 (2004): 43-53. """ from __future__ import absolute_import, print_function, unicode_literals from builtins import dict, str import os from indra.sources.geneways.processor import GenewaysProcessor # Path to the INDRA data folder path_this = os.path.dirname(os.path.abspath(__file__)) data_folder = os.path.join(path_this, '../../../data') def process_geneways(input_folder=data_folder): """Reads in Geneways data and returns a list of statements. Parameters ---------- input_folder : Optional[str] A folder in which to search for Geneways data. Looks for these Geneways extraction data files: human_action.txt, human_actionmention.txt, human_symbols.txt. Omit this parameter to use the default input folder which is indra/data. Returns ------- gp : GenewaysProcessor A GenewaysProcessor object which contains a list of INDRA statements generated from the Geneways action mentions. """ gp = GenewaysProcessor(input_folder) return gp
Update API to look at one folder and return processor
Update API to look at one folder and return processor
Python
bsd-2-clause
pvtodorov/indra,johnbachman/indra,sorgerlab/indra,johnbachman/indra,pvtodorov/indra,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,bgyori/indra,bgyori/indra,sorgerlab/belpy,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,sorgerlab/belpy,bgyori/indra,johnbachman/belpy
69aa0ec7c79139167e7a2adce1e0effac960755a
flaskrst/__init__.py
flaskrst/__init__.py
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): kwargs = item.copy() del kwargs['route'] del kwargs['name'] link = url_for(item['route'], **kwargs) navigation.append((link, item['name'])) return dict(navigation=navigation)
# -*- coding: utf-8 -*- """ flask-rstblog ~~~~~~~~~~~~~ :copyright: (c) 2011 by Christoph Heer. :license: BSD, see LICENSE for more details. """ from flask import Flask, url_for app = Flask("flaskrst") @app.context_processor def inject_navigation(): navigation = [] for item in app.config.get('NAVIGATION', []): if item.has_key('route') and item.has_key('label'): kwargs = item.copy() del kwargs['route'] del kwargs['label'] link = url_for(item['route'], **kwargs) navigation.append((link, item['label'])) elif item.has_key('url') and item.has_key('label'): navigation.append((item['url'], item['label'])) return dict(navigation=navigation)
Rename navigation config key name to label and add support for links to external sites over the url key name
Rename navigation config key name to label and add support for links to external sites over the url key name
Python
bsd-3-clause
jarus/flask-rst
5c21f105057f8c5d10721b6de2c5cf698668fd3c
src/events/admin.py
src/events/admin.py
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']}
from django.contrib import admin from .models import SponsoredEvent @admin.register(SponsoredEvent) class SponsoredEventAdmin(admin.ModelAdmin): fields = [ 'host', 'title', 'slug', 'category', 'language', 'abstract', 'python_level', 'detailed_description', 'recording_policy', 'slide_link', ] search_fields = ['title', 'abstract'] list_display = ['title', 'category', 'language', 'python_level'] list_filter = ['category', 'language', 'python_level'] prepopulated_fields = {'slug': ['title']} raw_id_fields = ['host']
Make host field raw ID instead of select
Make host field raw ID instead of select
Python
mit
pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016,pycontw/pycontw2016
74e8bf6574ce3658e1b276479c3b6ebec36844a4
kuhn_poker/agents/kuhn_random_agent.py
kuhn_poker/agents/kuhn_random_agent.py
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return print('%s: %s %s' % ( match_state.get_viewing_player(), self.is_fold_valid(), self.is_raise_valid() )) # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
import random import sys import acpc_python_client as acpc class KuhnRandomAgent(acpc.Agent): def __init__(self): super().__init__() def on_game_start(self, game): pass def on_next_turn(self, game, match_state, is_acting_player): if not is_acting_player: return # Select between passing (fold or initial call) # or betting (raising or calling a bet) selected_action = random.randrange(2) if selected_action == 0 and self.is_fold_valid(): self.set_next_action(acpc.ActionType.FOLD) elif selected_action == 1 and self.is_raise_valid(): self.set_next_action(acpc.ActionType.RAISE) else: self.set_next_action(acpc.ActionType.CALL) def on_game_finished(self, game, match_state): pass if __name__ == "__main__": if len(sys.argv) < 4: print("Usage {game_file_path} {dealer_hostname} {dealer_port}") sys.exit(1) client = acpc.Client(sys.argv[1], sys.argv[2], sys.argv[3]) client.play(KuhnRandomAgent())
Remove unnecessary log from random agent
Remove unnecessary log from random agent
Python
mit
JakubPetriska/poker-cfr,JakubPetriska/poker-cfr
fe76abc03f7152f318712e1a233aad42f2e9870a
jsonfield/widgets.py
jsonfield/widgets.py
from django import forms from django.utils import simplejson as json import staticmedia class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( staticmedia.url('js/jquery.js'), staticmedia.url('js/jquery.tmpl.js'), staticmedia.url('js/json-table.js'), staticmedia.url('js/json-table-templates.js'), )
from django import forms from django.utils import simplejson as json from django.conf import settings class JSONWidget(forms.Textarea): def render(self, name, value, attrs=None): if value is None: value = "" if not isinstance(value, basestring): value = json.dumps(value, indent=2) return super(JSONWidget, self).render(name, value, attrs) class JSONSelectWidget(forms.SelectMultiple): pass class JSONTableWidget(JSONWidget): class Media: js = ( settings.STATICFILES_URL + 'js/jquery.js', settings.STATICFILES_URL + 'js/jquery.tmpl.js', settings.STATICFILES_URL + 'js/json-table.js', settings.STATICFILES_URL + 'js/json-table-templates.js', )
Use staticfiles instead of staticmedia
Use staticfiles instead of staticmedia
Python
bsd-3-clause
SideStudios/django-jsonfield,chrismeyersfsu/django-jsonfield
c13208dcc4fe1715db10d86e4dfd584c18f396fa
sympy/calculus/singularities.py
sympy/calculus/singularities.py
from sympy.solvers import solve from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solve(simplify(1/expr), sym)))
from sympy.solvers import solve from sympy.solvers.solveset import solveset from sympy.simplify import simplify def singularities(expr, sym): """ Finds singularities for a function. Currently supported functions are: - univariate real rational functions Examples ======== >>> from sympy.calculus.singularities import singularities >>> from sympy import Symbol >>> x = Symbol('x', real=True) >>> singularities(x**2 + x + 1, x) () >>> singularities(1/(x + 1), x) (-1,) References ========== .. [1] http://en.wikipedia.org/wiki/Mathematical_singularity """ if not expr.is_rational_function(sym): raise NotImplementedError("Algorithms finding singularities for" " non rational functions are not yet" " implemented") else: return tuple(sorted(solveset(simplify(1/expr), sym)))
Replace solve with solveset in sympy.calculus
Replace solve with solveset in sympy.calculus
Python
bsd-3-clause
skidzo/sympy,chaffra/sympy,pandeyadarsh/sympy,VaibhavAgarwalVA/sympy,abhiii5459/sympy,jbbskinny/sympy,aktech/sympy,lindsayad/sympy,kevalds51/sympy,Titan-C/sympy,hargup/sympy,yukoba/sympy,farhaanbukhsh/sympy,moble/sympy,emon10005/sympy,bukzor/sympy,sahmed95/sympy,mafiya69/sympy,kaushik94/sympy,VaibhavAgarwalVA/sympy,jbbskinny/sympy,cswiercz/sympy,moble/sympy,lindsayad/sympy,Titan-C/sympy,bukzor/sympy,jaimahajan1997/sympy,abhiii5459/sympy,sampadsaha5/sympy,saurabhjn76/sympy,yukoba/sympy,aktech/sympy,mcdaniel67/sympy,ChristinaZografou/sympy,wyom/sympy,postvakje/sympy,iamutkarshtiwari/sympy,wyom/sympy,yashsharan/sympy,ga7g08/sympy,debugger22/sympy,ahhda/sympy,grevutiu-gabriel/sympy,oliverlee/sympy,chaffra/sympy,Designist/sympy,yashsharan/sympy,Davidjohnwilson/sympy,drufat/sympy,skidzo/sympy,oliverlee/sympy,jerli/sympy,AkademieOlympia/sympy,debugger22/sympy,kaushik94/sympy,Shaswat27/sympy,VaibhavAgarwalVA/sympy,ChristinaZografou/sympy,iamutkarshtiwari/sympy,wyom/sympy,ahhda/sympy,mcdaniel67/sympy,atreyv/sympy,kaichogami/sympy,Davidjohnwilson/sympy,abhiii5459/sympy,postvakje/sympy,Arafatk/sympy,Titan-C/sympy,pandeyadarsh/sympy,cswiercz/sympy,wanglongqi/sympy,Shaswat27/sympy,drufat/sympy,maniteja123/sympy,rahuldan/sympy,jerli/sympy,Arafatk/sympy,iamutkarshtiwari/sympy,MechCoder/sympy,farhaanbukhsh/sympy,Arafatk/sympy,madan96/sympy,souravsingh/sympy,chaffra/sympy,wanglongqi/sympy,ChristinaZografou/sympy,madan96/sympy,shikil/sympy,Designist/sympy,sahmed95/sympy,ahhda/sympy,jbbskinny/sympy,Curious72/sympy,souravsingh/sympy,postvakje/sympy,lindsayad/sympy,kevalds51/sympy,sampadsaha5/sympy,jaimahajan1997/sympy,rahuldan/sympy,mcdaniel67/sympy,yukoba/sympy,kumarkrishna/sympy,souravsingh/sympy,MechCoder/sympy,ga7g08/sympy,bukzor/sympy,oliverlee/sympy,Curious72/sympy,grevutiu-gabriel/sympy,MechCoder/sympy,sahmed95/sympy,Vishluck/sympy,shikil/sympy,atreyv/sympy,emon10005/sympy,mafiya69/sympy,kaushik94/sympy,pandeyadarsh/sympy,Curious72/sympy,saurabhjn76/sympy,kaichogami/sympy,cswiercz/sympy,aktech/sympy,sampadsaha5/sympy,maniteja123/sympy,atreyv/sympy,ga7g08/sympy,Davidjohnwilson/sympy,shikil/sympy,moble/sympy,AkademieOlympia/sympy,kaichogami/sympy,skidzo/sympy,emon10005/sympy,grevutiu-gabriel/sympy,Vishluck/sympy,hargup/sympy,debugger22/sympy,wanglongqi/sympy,Shaswat27/sympy,Vishluck/sympy,kevalds51/sympy,AkademieOlympia/sympy,farhaanbukhsh/sympy,Designist/sympy,rahuldan/sympy,maniteja123/sympy,kumarkrishna/sympy,jaimahajan1997/sympy,yashsharan/sympy,kumarkrishna/sympy,jerli/sympy,madan96/sympy,mafiya69/sympy,saurabhjn76/sympy,drufat/sympy,hargup/sympy
b71f3c726aa6bde4ab0e2b471c5cb9064abfb3fa
apps/webdriver_testing/api_v2/test_user_resources.py
apps/webdriver_testing/api_v2/test_user_resources.py
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) users = user_data['objects'] print '#######' for x in users: print x['username']
from apps.webdriver_testing.webdriver_base import WebdriverTestCase from apps.webdriver_testing import data_helpers from apps.webdriver_testing.data_factories import UserFactory class WebdriverTestCaseSubtitlesUpload(WebdriverTestCase): """TestSuite for uploading subtitles via the api. """ def setUp(self): WebdriverTestCase.setUp(self) self.user = UserFactory.create(username = 'user') data_helpers.create_user_api_key(self, self.user) def api_create_user(self, **kwargs): """Create a user via the api. Creating Users: POST /api2/partners/users/ """ create_url = 'users/' create_data = {'username': None, 'email': None, 'password': 'password', 'first_name': None, 'last_name': None, 'create_login_token': None } create_data.update(kwargs) status, response = data_helpers.post_api_request(self, create_url, create_data) print status return response def test_create(self): new_user = {'username': 'newuser', 'email': 'newuser@example.com', 'first_name': 'New', 'last_name': 'User_1', } user_data = self.api_create_user(**new_user) print user_data['username']
Fix webdriver user creation bug
Fix webdriver user creation bug
Python
agpl-3.0
eloquence/unisubs,ofer43211/unisubs,eloquence/unisubs,eloquence/unisubs,pculture/unisubs,norayr/unisubs,pculture/unisubs,norayr/unisubs,wevoice/wesub,pculture/unisubs,ujdhesa/unisubs,ofer43211/unisubs,ujdhesa/unisubs,pculture/unisubs,ReachingOut/unisubs,ofer43211/unisubs,wevoice/wesub,ujdhesa/unisubs,wevoice/wesub,ReachingOut/unisubs,wevoice/wesub,ofer43211/unisubs,ReachingOut/unisubs,eloquence/unisubs,norayr/unisubs,ReachingOut/unisubs,norayr/unisubs,ujdhesa/unisubs
236ad637e05ab8ff48b7c169dd54228e48470e1b
mediacloud/mediawords/util/test_sql.py
mediacloud/mediawords/util/test_sql.py
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
from mediawords.util.sql import * import time import datetime def test_get_sql_date_from_epoch(): assert get_sql_date_from_epoch(int(time.time())) == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') assert get_sql_date_from_epoch(0) == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') # noinspection PyTypeChecker assert get_sql_date_from_epoch('badger') == datetime.datetime.fromtimestamp(0).strftime('%Y-%m-%d %H:%M:%S') def test_sql_now(): assert sql_now() == datetime.datetime.today().strftime('%Y-%m-%d %H:%M:%S') def test_get_epoch_from_sql_date(): assert get_epoch_from_sql_date('2016-10-11 10:34:24.598883+03') == 1476171264 def test_increment_day(): assert increment_day(date='2016-10-11', days=3) == '2016-10-14'
Add some more unit tests for get_sql_date_from_epoch()
Add some more unit tests for get_sql_date_from_epoch()
Python
agpl-3.0
berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
d2991a6385be74debf71eb8404e362c6027e6d50
molecule/default/tests/test_default.py
molecule/default/tests/test_default.py
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0 assert host.command('Xorg -version').rc == 0
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_command(host): assert host.command('i3 --version').rc == 0 assert host.command('pactl --version').rc == 0
Remove redundant xorg command test
Remove redundant xorg command test
Python
mit
nephelaiio/ansible-role-i3,nephelaiio/ansible-role-i3
325902c169424ec76307efa71a2e4885180e5cbb
tests/integration/shell/call.py
tests/integration/shell/call.py
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
# -*- coding: utf-8 -*- """ tests.integration.shell.call ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :copyright: © 2012 UfSoft.org - :email:`Pedro Algarvio (pedro@algarvio.me)` :license: Apache 2.0, see LICENSE for more details. """ import sys # Import salt libs from saltunittest import TestLoader, TextTestRunner, skipIf import integration from integration import TestDaemon class CallTest(integration.ShellCase, integration.ShellCaseCommonTestsMixIn): _call_binary_ = 'salt-call' def test_default_output(self): out = self.run_call('test.fib 3') self.assertEqual( "local: !!python/tuple\n- [0, 1, 1, 2]", '\n'.join(out[:-3]) ) def test_text_output(self): out = self.run_call('--text-out test.fib 3') self.assertEqual("local: ([0, 1, 1, 2]", ''.join(out).rsplit(",", 1)[0]) @skipIf(sys.platform.startswith('win'), 'This test does not apply on Win') def test_user_delete_kw_output(self): ret = self.run_call('-d user.delete') self.assertIn( 'salt \'*\' user.delete name remove=True force=True', ''.join(ret) ) if __name__ == "__main__": loader = TestLoader() tests = loader.loadTestsFromTestCase(CallTest) print('Setting up Salt daemons to execute tests') with TestDaemon(): runner = TextTestRunner(verbosity=1).run(tests) sys.exit(runner.wasSuccessful())
Test to make sure we're outputting kwargs on the user.delete documentation.
Test to make sure we're outputting kwargs on the user.delete documentation.
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
981bac39056584ec9c16e5a8d0f7a972d7365a3f
tests/test_module_dispatcher.py
tests/test_module_dispatcher.py
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError): # The following allows us to introspect the exception object try: hosts.all.a_module_that_most_certainly_does_not_exist() except AnsibleModuleError, e: assert e.message == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist") raise else: pytest.fail("pytest_ansible.errors.AnsibleModuleError was not raised as expected")
import pytest from conftest import (POSITIVE_HOST_PATTERNS, NEGATIVE_HOST_PATTERNS) @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_len(host_pattern, num_hosts, hosts): assert len(getattr(hosts, host_pattern)) == num_hosts @pytest.mark.parametrize("host_pattern, num_hosts", POSITIVE_HOST_PATTERNS) def test_contains(host_pattern, num_hosts, hosts): assert host_pattern in hosts.all assert host_pattern in hosts['all'] @pytest.mark.parametrize("host_pattern, num_hosts", NEGATIVE_HOST_PATTERNS) def test_not_contains(host_pattern, num_hosts, hosts): assert host_pattern not in hosts.all assert host_pattern not in hosts['all'] def test_ansible_module_error(hosts): '''Verify that AnsibleModuleError is raised when no such module exists.''' from pytest_ansible.errors import AnsibleModuleError with pytest.raises(AnsibleModuleError) as exc_info: hosts.all.a_module_that_most_certainly_does_not_exist() assert str(exc_info.value) == "The module {0} was not found in configured module paths.".format("a_module_that_most_certainly_does_not_exist")
Use more prefered exc_info inspection technique
Use more prefered exc_info inspection technique
Python
mit
jlaska/pytest-ansible
ba564c7e2cacc8609d52f03e501786be3c7c8f44
tests/config.py
tests/config.py
import sys sys.path.append("../ideascaly") from ideascaly.auth import AuthNonSSO from ideascaly.api import API import ConfigParser import unittest config = ConfigParser.ConfigParser() config.read('config') class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = config.get('test', 'community_url') def create_auth(): auth = AuthNonSSO(config.get('test', 'token')) return auth
import sys sys.path.append('../ideascaly') from ideascaly.auth import AuthNonSSO from ideascaly.api import API import unittest testing_community = 'fiveheads.ideascale.com' testing_token = '5b3326f8-50a5-419d-8f02-eef6a42fd61a' class IdeascalyTestCase(unittest.TestCase): def setUp(self): self.auth = create_auth() self.api = API(self.auth) self.api.community_url = testing_community def create_auth(): auth = AuthNonSSO(testing_token) return auth
Change the way used to read the testing information
Change the way used to read the testing information
Python
mit
joausaga/ideascaly
d07109e07e4d9fab488dfbbcf56fdfe18baa56ab
lib/python/plow/test/test_static.py
lib/python/plow/test/test_static.py
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.findJobs() if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
import unittest import manifest import plow class StaticModuletests(unittest.TestCase): def testFindJobs(self): plow.getJobs() def testGetGroupedJobs(self): result = [ {"id": 1, "parent":0, "name": "High"}, {"id": 2, "parent":1, "name": "Foo"} ] for p in result: print p if __name__ == "__main__": suite = unittest.TestLoader().loadTestsFromTestCase(StaticModuletests) unittest.TextTestRunner(verbosity=2).run(suite)
Set the column count value based on size of header list.
Set the column count value based on size of header list.
Python
apache-2.0
Br3nda/plow,Br3nda/plow,chadmv/plow,Br3nda/plow,chadmv/plow,chadmv/plow,Br3nda/plow,Br3nda/plow,chadmv/plow,chadmv/plow,chadmv/plow,chadmv/plow
189c7a7c982739cd7a3026e34a9969ea9278a12b
api/data/src/lib/middleware.py
api/data/src/lib/middleware.py
import os import re class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT'): request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
import os class SetBaseEnv(object): """ Figure out which port we are on if we are running and set it. So that the links will be correct. Not sure if we need this always... """ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): if os.environ.get('HTTP_PORT') and ':' not in request.META['HTTP_HOST']: request.META['HTTP_HOST'] = '{}:{}'.format(request.META['HTTP_HOST'], os.environ['HTTP_PORT']) response = self.get_response(request) return response
Fix so we can do :5000 queries from api container
Fix so we can do :5000 queries from api container
Python
mit
xeor/hohu,xeor/hohu,xeor/hohu,xeor/hohu
e0c70b2b20349b8f1c0f6df8cc641c3267a63a06
crypto.py
crypto.py
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) f = open('key.pem', 'w') f.write(key.exportKey('PEM')) f.close() # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): f = open('key.pem', 'r') key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key_obj = retrieve_key() return key_obj.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
""" Sending a message: Encrypt your plaintext with encrypt_message Your id will serve as your public key Reading a message Use decrypt_message and validate contents """ from Crypto.PublicKey import RSA # Generates and writes byte string with object of RSA key object def create_key(): key = RSA.generate(2048) with open('key.pem', 'w') as f: f.write(key.exportKey('PEM')) # Reads an exported key-bytestring from file and returns an RSA key object def retrieve_key(): with open('key.pem', 'r') as f: key = RSA.importKey(f.read()) return key def get_public_bytestring(): key = retrieve_key() return key.publickey().exportKey() # Use own private key to decrypt broadcasted message def decrypt_message(message): key = retrieve_key() return key.decrypt(message) # Use given id to encrypt message def encrypt_message(key_string, message): key = RSA.importKey(key_string) return key.encrypt(message, 123)
Use 'with ... as ...' for file opening. Standardize variable names.
Use 'with ... as ...' for file opening. Standardize variable names.
Python
mit
Tribler/decentral-market
f75e245f461e57cc868ee5452c88aea92b6681bf
chainer/functions/parameter.py
chainer/functions/parameter.py
import numpy from chainer import function class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
import numpy from chainer import function from chainer.utils import type_check class Parameter(function.Function): """Function that outputs its weight array. This is a parameterized function that takes no input and returns a variable holding a shallow copy of the parameter array. Args: array: Initial parameter array. """ parameter_names = 'W', gradient_names = 'gW', def __init__(self, array): self.W = array self.gW = numpy.empty_like(array) def check_type_forward(self, in_types): type_check.expect(in_types.size() == 0) def forward(self, x): return self.W, def backward(self, x, gy): self.gW += gy[0] return ()
Add typecheck to Parameter function
Add typecheck to Parameter function
Python
mit
t-abe/chainer,chainer/chainer,pfnet/chainer,jnishi/chainer,sou81821/chainer,ikasumi/chainer,tigerneil/chainer,delta2323/chainer,1986ks/chainer,chainer/chainer,yanweifu/chainer,ronekko/chainer,muupan/chainer,truongdq/chainer,chainer/chainer,muupan/chainer,okuta/chainer,jnishi/chainer,anaruse/chainer,hvy/chainer,cupy/cupy,cupy/cupy,truongdq/chainer,ktnyt/chainer,keisuke-umezawa/chainer,jfsantos/chainer,ytoyama/yans_chainer_hackathon,masia02/chainer,niboshi/chainer,jnishi/chainer,kikusu/chainer,AlpacaDB/chainer,woodshop/chainer,keisuke-umezawa/chainer,jnishi/chainer,ktnyt/chainer,cupy/cupy,elviswf/chainer,kiyukuta/chainer,tkerola/chainer,Kaisuke5/chainer,keisuke-umezawa/chainer,rezoo/chainer,niboshi/chainer,kuwa32/chainer,kikusu/chainer,wavelets/chainer,benob/chainer,okuta/chainer,AlpacaDB/chainer,hidenori-t/chainer,okuta/chainer,woodshop/complex-chainer,wkentaro/chainer,t-abe/chainer,okuta/chainer,keisuke-umezawa/chainer,wkentaro/chainer,hvy/chainer,cemoody/chainer,ktnyt/chainer,benob/chainer,niboshi/chainer,wkentaro/chainer,laysakura/chainer,niboshi/chainer,tscohen/chainer,hvy/chainer,ysekky/chainer,cupy/cupy,hvy/chainer,aonotas/chainer,chainer/chainer,sinhrks/chainer,ktnyt/chainer,kashif/chainer,wkentaro/chainer,sinhrks/chainer,minhpqn/chainer,umitanuki/chainer
3cf9473bdf1714460478b4cd36a54b09b2a57173
lib/feedeater/validate.py
lib/feedeater/validate.py
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator(validator, report=report) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
"""Validate GTFS""" import os import mzgtfs.feed import mzgtfs.validation import task class FeedEaterValidate(task.FeedEaterTask): def __init__(self, *args, **kwargs): super(FeedEaterValidate, self).__init__(*args, **kwargs) self.feedvalidator = kwargs.get('feedvalidator') def parser(self): parser = super(FeedEaterValidate, self).parser() parser.add_argument( '--feedvalidator', help='Path to feedvalidator.py' ) return parser def run(self): # Validate feeds self.log("===== Feed: %s ====="%self.feedid) feed = self.registry.feed(self.feedid) filename = self.filename or os.path.join(self.workdir, '%s.zip'%feed.onestop()) report = os.path.join(self.workdir, '%s.html'%feed.onestop()) self.log("Validating: %s"%filename) gtfsfeed = mzgtfs.feed.Feed(filename) validator = mzgtfs.validation.ValidationReport() # gtfsfeed.validate(validator) gtfsfeed.validate_feedvalidator( validator, feedvalidator=self.feedvalidator, report=report, ) # validator.report() self.log("Validation report:") if not validator.exceptions: self.log("No errors") for e in validator.exceptions: self.log("%s: %s"%(e.source, e.message)) if __name__ == "__main__": task = FeedEaterValidate.from_args() task.run()
Add --feedvaldiator option to validator
Add --feedvaldiator option to validator
Python
mit
transitland/transitland-datastore,transitland/transitland-datastore,transitland/transitland-datastore,brechtvdv/transitland-datastore,brechtvdv/transitland-datastore,brechtvdv/transitland-datastore
d1174017c6b282aa1d808b784ffde8a3d3190472
fabfile.py
fabfile.py
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * env.roledefs = hostinfo.load_roledefs()
# -*- coding: utf-8 -*- """Generic Fabric-commands which should be usable without further configuration""" from fabric.api import * from os.path import dirname, split, abspath import os import sys import glob # Hacking our way into __init__.py of current package current_dir = dirname(abspath(__file__)) sys_path, package_name = split(current_dir) sys.path.append(sys_path) __import__(package_name, globals(), locals(), [], -1) __package__ = package_name from . import * try: open("roledefs.pickle") env.roledefs = hostinfo.load_roledefs() except IOError: pass
Allow running tasks even if roledefs.pickle is missing
Allow running tasks even if roledefs.pickle is missing Signed-off-by: Samuli Seppänen <be49b59234361de284476e9a2215fb6477f46673@openvpn.net>
Python
bsd-2-clause
mattock/fabric,mattock/fabric
3f394e47841b2d9e49554b21c67b06a46f99f25c
celery_app.py
celery_app.py
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', ]
# -*- encoding: utf-8 -*- import config import logging from celery.schedules import crontab from lazyblacksmith.app import create_app from lazyblacksmith.extension.celery_app import celery_app # disable / enable loggers we want logging.getLogger('pyswagger').setLevel(logging.ERROR) app = create_app(config) app.app_context().push() celery_app.init_app(app) #celery_app.conf.broker_url = config.broker_url celery_app.conf.beat_schedule.update({ 'character-task-spawner': { 'task': 'schedule.character_task_spawner', 'schedule': crontab(minute='*'), }, 'universe-task-spawner': { 'task': 'schedule.universe_task_spawner', 'schedule': crontab(minute='*/30'), }, }) celery_app.conf.imports = [ 'lazyblacksmith.tasks.task_spawner', 'lazyblacksmith.tasks.market.adjusted_price', 'lazyblacksmith.tasks.market.market_order', 'lazyblacksmith.tasks.industry.indexes', 'lazyblacksmith.tasks.character.skills', 'lazyblacksmith.tasks.character.blueprints', 'lazyblacksmith.tasks.corporation.blueprints', ]
Add corporation task in celery data
Add corporation task in celery data
Python
bsd-3-clause
Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith,Kyria/LazyBlacksmith
9d94a753c4824df210753996edaa9f7910df5fa8
tests/test_sample_app.py
tests/test_sample_app.py
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): client.get('/')
import pytest @pytest.fixture def app(): import sys sys.path.append('.') from sample_app import create_app return create_app() @pytest.fixture def client(app): return app.test_client() def test_index(client): resp = client.get('/') assert resp.status == 200
Check for status code of 200 in sample app.
Check for status code of 200 in sample app.
Python
apache-2.0
JingZhou0404/flask-bootstrap,scorpiovn/flask-bootstrap,suvorom/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,suvorom/flask-bootstrap,victorbjorklund/flask-bootstrap,BeardedSteve/flask-bootstrap,ser/flask-bootstrap,livepy/flask-bootstrap,victorbjorklund/flask-bootstrap,dingocuster/flask-bootstrap,Coxious/flask-bootstrap,Coxious/flask-bootstrap,vishnugonela/flask-bootstrap,moha24/flask-bootstrap,eshijia/flask-bootstrap,dingocuster/flask-bootstrap,victorbjorklund/flask-bootstrap,eshijia/flask-bootstrap,vishnugonela/flask-bootstrap,JingZhou0404/flask-bootstrap,ser/flask-bootstrap,scorpiovn/flask-bootstrap,vishnugonela/flask-bootstrap,livepy/flask-bootstrap,JingZhou0404/flask-bootstrap,dingocuster/flask-bootstrap,livepy/flask-bootstrap,moha24/flask-bootstrap,Coxious/flask-bootstrap,BeardedSteve/flask-bootstrap,eshijia/flask-bootstrap,suvorom/flask-bootstrap,scorpiovn/flask-bootstrap,moha24/flask-bootstrap
f48afc99a7e7aa076aa27b33deda824b5509bab2
test_qt_helpers_qt5.py
test_qt_helpers_qt5.py
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): print('-' * 72) os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui def test_load_ui_qt5(self): self._load_qt5() from qt_helpers import load_ui, get_qapp qpp = get_qapp() load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt def test_submodule_import_pyside(self): self._load_pyside() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PySide.QtGui import QMessageBox as qmb from PySide.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
from __future__ import absolute_import, division, print_function import os import sys import pytest from mock import MagicMock # At the moment it is not possible to have PyQt5 and PyQt4 installed # simultaneously because one requires the Qt4 libraries while the other # requires the Qt5 libraries class TestQT5(object): def setup_class(cls): os.environ['QT_API'] = 'pyqt5' import qt_helpers as qt def _load_qt5(self): import qt_helpers as qt def test_main_import_qt5(self): self._load_qt5() from qt_helpers import QtCore from qt_helpers import QtGui from PyQt5 import QtCore as core, QtGui as gui assert QtCore is core assert QtGui is gui # At the moment, PyQt5 does not run correctly on Travis so we can't run # this without causing an Abort Trap. # def test_load_ui_qt5(self): # self._load_qt5() # from qt_helpers import load_ui, get_qapp # qpp = get_qapp() # load_ui('test.ui') def test_submodule_import_qt5(self): self._load_qt5() from qt_helpers.QtGui import QMessageBox from qt_helpers.QtCore import Qt from PyQt5.QtWidgets import QMessageBox as qmb from PyQt5.QtCore import Qt as _qt assert qmb is QMessageBox assert _qt is Qt
Comment out problematic test for now
Comment out problematic test for now
Python
bsd-3-clause
glue-viz/qt-helpers
15fe43d0be3c665c09c898864bd2815b39fbc8a5
toolbox/config/common.py
toolbox/config/common.py
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' ACTIVE_REMOTE_BRANCHES = ['master', 'staging', 'demo'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
CURRENT_MIN_VERSION = 'v3.0' CURRENT_MAX_VERSION = 'v3.1' # Once the Next platform supports challenge versions this can be extended. ACTIVE_REMOTE_BRANCHES = ['master'] DEFAULT_COMMAND_TIMEOUT = 60 * 60 CONTROLLER_PROTOCOL = 'controller' PROTOCOLS = {'udp', 'tcp', 'ssh', 'http', 'ws', CONTROLLER_PROTOCOL} CRP_TYPES = {'docker', 'gce', 'static'}
Change v3 active branches to
Change v3 active branches to [master] Extend the list when it becomes relevant. The old platform shall use the legacy branch.
Python
apache-2.0
avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox,avatao-content/challenge-toolbox
6bcc15b6d018560ebc368efcfc2c2c7d435c7dcc
strictify-coqdep.py
strictify-coqdep.py
#!/usr/bin/env python2 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
#!/usr/bin/env python3 import sys, subprocess import re if __name__ == '__main__': p = subprocess.Popen(sys.argv[1:], stderr=subprocess.PIPE) (stdout, stderr) = p.communicate() stderr = stderr.decode('utf-8') reg = re.compile(r'''Warning(: in file .*?,\s*required library .*? matches several files in path)''') if reg.search(stderr): sys.stderr.write(reg.sub(r'Error\1', stderr)) sys.exit(1) sys.stderr.write(stderr) sys.exit(p.returncode)
Switch from python2 to python3
Switch from python2 to python3 Closes #6
Python
mit
JasonGross/coq-scripts,JasonGross/coq-scripts
0415bc9e4a174b7cebb634a449473131fe16b3b2
bulbs/content/management/commands/reindex_content.py
bulbs/content/management/commands/reindex_content.py
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all()[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
from django.core.management.base import NoArgsCommand from bulbs.content.models import Content class Command(NoArgsCommand): help = 'Runs Content.index on all content.' def handle(self, **options): num_processed = 0 content_count = Content.objects.all().count() chunk_size = 10 while num_processed < content_count: for content in Content.objects.all().order_by('id')[num_processed:num_processed + chunk_size]: content.index() num_processed += 1 if not num_processed % 100: print 'Processed %d content items' % num_processed
Add ordering to queryset in reindex admin command
Add ordering to queryset in reindex admin command
Python
mit
theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,pombredanne/django-bulbs,theonion/django-bulbs
a9a55f87abc0a26d41e3fa3091f2f2efad7a2543
autoencoder/encode.py
autoencoder/encode.py
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
import numpy as np from .network import autoencoder, get_encoder from .io import read_records, load_model def encode(input_file, output_file, log_dir): X = read_records(input_file) size = X.shape[1] model = load_model(log_dir) assert model.input_shape[1] == size, \ 'Input size of data and pretrained model must be same' encoder = get_encoder(model) predictions = encoder.predict(X) np.savetxt(output_file, predictions) def encode_with_args(args): encode(input_file = args.dataset, output_file = args.outputfile, log_dir = args.logdir)
Check input dimensions of pretrained model and input file
Check input dimensions of pretrained model and input file
Python
apache-2.0
theislab/dca,theislab/dca,theislab/dca
68d7b3995c49abd8f7096f9498bdbddf6b696d81
back_office/models.py
back_office/models.py
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) user = models.OneToOneField(to=User, related_name='teachers')
from django.db import models from django.utils.translation import ugettext as _ from Django.contrib.auth.models import User FEMALE = 'F' MALE = 'M' class Teacher(models.Model): """ halaqat teachers informations """ GENDET_CHOICES = ( (MALE, _('Male')), (FEMALE, _('Female')), ) name = models.CharField(max_length=100, verbose_name=_('Name')) gender = models.CharField(max_length=1, verbose_name=_('Gender'), choices=GENDET_CHOICES) civil_id = models.CharField(max_length=12, verbose_name=_('Civil ID')) phone_number = models.CharField(max_length=15, verbose_name=_('Phone Number')) job_title = models.CharField(max_length=15, verbose_name=_('Title')) enabled = models.BooleanField(default=True) user = models.OneToOneField(to=User, related_name='teachers') def enable(self): """ Enable teacher profile :return: """ self.enabled = True self.save() def disable(self): """ Disable teacher profile :return: """ self.enabled = False self.save()
Add enabled field to teacher model
Add enabled field to teacher model
Python
mit
EmadMokhtar/halaqat,EmadMokhtar/halaqat,EmadMokhtar/halaqat
7bdd06f568856c010a4eacb1e70c262fa4c3388c
bin/trigger_upload.py
bin/trigger_upload.py
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. Useful for manually triggering Fedimg jobs. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 2: print 'Usage: trigger_upload.py <rawxz_image_url>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] fedimg.uploader.upload(upload_pool, [url])
#!/bin/env python # -*- coding: utf8 -*- """ Triggers an upload process with the specified raw.xz URL. """ import logging import logging.config import multiprocessing.pool import sys import fedmsg import fedmsg.config import fedimg import fedimg.services from fedimg.services.ec2 import EC2Service, EC2ServiceException import fedimg.uploader from fedimg.util import virt_types_from_url if len(sys.argv) != 3: print 'Usage: trigger_upload.py <rawxz_image_url> <compose_id>' sys.exit(1) logging.config.dictConfig(fedmsg.config.load_config()['logging']) log = logging.getLogger('fedmsg') upload_pool = multiprocessing.pool.ThreadPool(processes=4) url = sys.argv[1] compose_id = sys.argv[2] compose_meta = { 'compose_id': compose_id } fedimg.uploader.upload(upload_pool, [url], compose_meta=compose_meta)
Fix the manual upload trigger script
scripts: Fix the manual upload trigger script Signed-off-by: Sayan Chowdhury <5f0367a2b3b757615b57f51d912cf16f2c0ad827@gmail.com>
Python
agpl-3.0
fedora-infra/fedimg,fedora-infra/fedimg
b503a6e893d71b96b3737e567dde16f110db5fc7
src/prepare_turk_batch.py
src/prepare_turk_batch.py
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for fname in os.listdir(args.input): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ """ import os import sys import csv import json import html def do_command(args): assert os.path.exists(args.input) writer = csv.writer(args.output) writer.writerow(["document"]) for i, fname in enumerate(os.listdir(args.input)): if not fname.endswith('.json'): continue with open(os.path.join(args.input, fname)) as f: doc = json.load(f) for j, (prompt, time_range) in enumerate(doc["prompts"]): doc["id"] = "doc-{}-{}".format(i,j) doc["prompt"] = prompt doc["recommendedMinWordCount"] = time_range[0] doc["recommendedMaxWordCount"] = time_range[1] writer.writerow([html.escape(json.dumps(doc))]) if __name__ == "__main__": import argparse parser = argparse.ArgumentParser(description='') parser.add_argument('-i', '--input', type=str, default='../data/pilot', help="Directory with JSON files") parser.add_argument('-o', '--output', type=argparse.FileType('w'), default=sys.stdout, help="A CSV to use with MTurk") parser.set_defaults(func=do_command) #subparsers = parser.add_subparsers() #command_parser = subparsers.add_parser('command', help='' ) #command_parser.set_defaults(func=do_command) ARGS = parser.parse_args() if ARGS.func is None: parser.print_help() sys.exit(1) else: ARGS.func(ARGS)
Prepare data with the new fiields and prompts
Prepare data with the new fiields and prompts
Python
mit
arunchaganty/briefly,arunchaganty/briefly,arunchaganty/briefly,arunchaganty/briefly
dfb79b9f148663617048a3c2a310b2a66a1c7103
marxbot.py
marxbot.py
from errbot import BotPlugin, botcmd, webhook class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' @botcmd(split_args_with=None) def marx(self, message, args): return "what a guy"
from errbot import BotPlugin, botcmd, webhook import pytumblr class MarxBot(BotPlugin): """Your daily dose of Marx""" min_err_version = '1.6.0' tumblr_client = None def activate(self): super().activate() if self.config is None or self.config["consumer_key"] == "" or self.config["consumer_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth consumer key/secret") return if self.config["oauth_token"] == "" or self.config["oauth_token_secret"] == "": self.warn_admins("MarxBot must be configured with OAuth token key/secret (for now)") return self.tumblr_client = pytumblr.TumblrRestClient( self.config["consumer_key"], self.config["consumer_secret"], self.config["oauth_token"], self.config["oauth_token_secret"]) def get_configuration_template(self): return {"consumer_key": "", "consumer_secret": "", "oauth_token": "", "oauth_token_secret": ""} @botcmd def marx(self, message, args): if self.tumblr_client is None: return "MarxBot must be configured and restarted to serve quotes." post = self.tumblr_client.posts("dailymarx", limit=1)['posts'][0] self['latest_post'] = post return str(post['text'])
Use the Tumblr API to get Marx quotes
Use the Tumblr API to get Marx quotes
Python
mit
AbigailBuccaneer/err-dailymarx
19354bd82a89383d795cdada8d6af78e8f12eed8
src/server/test_client.py
src/server/test_client.py
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: caller.setData("test", "success") print(caller.getData("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
#!/usr/bin/env python # Echo client program import socket import sys from RemoteFunctionCaller import * from SocketNetworker import SocketNetworker HOST = 'localhost' # The remote host PORT = 8553 # The same port as used by the server s = None for res in socket.getaddrinfo(HOST, PORT, socket.AF_UNSPEC, socket.SOCK_STREAM): af, socktype, proto, canonname, sa = res try: s = socket.socket(af, socktype, proto) except OSError as msg: s = None continue try: s.connect(sa) except OSError as msg: s.close() s = None continue break if s is None: print('could not open socket') sys.exit(1) nw = SocketNetworker(s) caller = RemoteFunctionCaller(nw) try: print(caller.SharedClientDataStore__set("test", "success")) print(caller.SharedClientDtaStore__get("test", default="failish")) except TimeoutError: print("Timed out.") nw.close()
Update call method in test client
Update call method in test client
Python
mit
cnlohr/bridgesim,cnlohr/bridgesim,cnlohr/bridgesim,cnlohr/bridgesim
d097f773260d06b898ab70e99596a07b056a7cb3
ccdproc/__init__.py
ccdproc/__init__.py
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The ccdproc package is a collection of code that will be helpful in basic CCD processing. These steps will allow reduction of basic CCD data as either a stand-alone processing or as part of a pipeline. """ # Affiliated packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # ---------------------------------------------------------------------------- # set up namespace, unless we are in setup... if not _ASTROPY_SETUP_: from .core import * from .ccddata import * from .combiner import *
# Licensed under a 3-clause BSD style license - see LICENSE.rst """ The ccdproc package is a collection of code that will be helpful in basic CCD processing. These steps will allow reduction of basic CCD data as either a stand-alone processing or as part of a pipeline. """ # Affiliated packages may add whatever they like to this file, but # should keep this content at the top. # ---------------------------------------------------------------------------- from ._astropy_init import * # ---------------------------------------------------------------------------- # set up namespace, unless we are in setup... if not _ASTROPY_SETUP_: from .core import * from .ccddata import * from .combiner import * from .image_collection import *
Add ImageFileCollection to ccdproc namespace
Add ImageFileCollection to ccdproc namespace
Python
bsd-3-clause
indiajoe/ccdproc,mwcraig/ccdproc,astropy/ccdproc,evertrol/ccdproc,crawfordsm/ccdproc,pulsestaysconstant/ccdproc
16b21e6e3ddf0e26cb1412bffbe2be4acca1deb6
app/readers/basereader.py
app/readers/basereader.py
from lxml import etree from app import formatting def get_namespace_from_top(fn, key='xmlns'): ac, el = next(etree.iterparse(fn)) return {'xmlns': el.nsmap[key]} def generate_tags_multiple_files(input_files, tag, ignore_tags, ns=None): """ Calls xmltag generator for multiple files. """ # Deprecate? for fn in input_files: return generate_xmltags(fn, tag, ignore_tags, ns) def generate_tags_multiple_files_strings(input_files, ns, tag, ignore_tags): """ Creates stringified xml output of elements with certain tag. """ for el in generate_tags_multiple_files(input_files, tag, ignore_tags, ns): yield formatting.string_and_clear(el, ns) def generate_xmltags(fn, tag, ignore_tags, ns=None): """ Base generator for percolator xml psm, peptide, protein output, as well as for mzML, mzIdentML. ignore_tags are the ones that are not cleared when met by parser. """ if ns is None: xmlns = '' else: xmlns = '{%s}' % ns['xmlns'] for ac, el in etree.iterparse(fn): if el.tag == '{0}{1}'.format(xmlns, tag): yield el elif el.tag in ['{0}{1}'.format(xmlns, x) for x in ignore_tags]: formatting.clear_el(el)
from lxml import etree import itertools from app import formatting def get_namespace_from_top(fn, key='xmlns'): ac, el = next(etree.iterparse(fn)) return {'xmlns': el.nsmap[key]} def generate_tags_multiple_files(input_files, tag, ignore_tags, ns=None): """ Calls xmltag generator for multiple files. """ return itertools.chain.from_iterable([generate_xmltags( fn, tag, ignore_tags, ns) for fn in input_files]) def generate_tags_multiple_files_strings(input_files, ns, tag, ignore_tags): """ Creates stringified xml output of elements with certain tag. """ for el in generate_tags_multiple_files(input_files, tag, ignore_tags, ns): yield formatting.string_and_clear(el, ns) def generate_xmltags(fn, tag, ignore_tags, ns=None): """ Base generator for percolator xml psm, peptide, protein output, as well as for mzML, mzIdentML. ignore_tags are the ones that are not cleared when met by parser. """ if ns is None: xmlns = '' else: xmlns = '{%s}' % ns['xmlns'] for ac, el in etree.iterparse(fn): if el.tag == '{0}{1}'.format(xmlns, tag): yield el elif el.tag in ['{0}{1}'.format(xmlns, x) for x in ignore_tags]: formatting.clear_el(el)
Return chained iterators instead of only first of multiple iterators
Return chained iterators instead of only first of multiple iterators
Python
mit
glormph/msstitch
7d8f291dea725c28e4d904a3195fde46a3418925
parafermions/tests/test_peschel_emery.py
parafermions/tests/test_peschel_emery.py
#!/usr/bin/env python """ Test the MPS class """ import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros N, l = 8, 0.2 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) assert((d[1]-d[0]) < 1e-15) assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
#!/usr/bin/env python """ Test the MPS class """ import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros N, l = 8, 0.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) # check that all eigenvalues are degenerate assert(np.sum(d[1:10:2]-d[:10:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) # check only the ground state eigenvalues are degenerate assert((d[1]-d[0]) < 1e-15) assert(np.sum(d[1:10:2]-d[:10:2]) > 1e-2)
Update slicing so that array sizes match
Update slicing so that array sizes match
Python
bsd-2-clause
nmoran/pf_resonances
5bc1731288b76978fa66acab7387a688cea76b4c
wallabag/wallabag_add.py
wallabag/wallabag_add.py
""" Module for adding new entries """ import re import api import conf def add(target_url, title=None, star=False, read=False): conf.load() valid_url = False if not re.compile("(?i)https?:\\/\\/.+").match(target_url): for protocol in "https://", "http://": if api.is_valid_url("{0}{1}".format(protocol, target_url)): target_url = "{0}{1}".format(protocol, target_url) valid_url = True break else: valid_url = api.is_valid_url(target_url) if not valid_url: print("Error: Invalid url to add.") print() exit(-1) try: request = api.api_add_entry(target_url, title, star, read) if(request.hasError()): print("Error: {0} - {1}".format(request.error_text, request.error_description)) exit(-1) else: print("Entry successfully added") exit(0) except api.OAuthException as e: print("Error: {0}".format(e.text)) print() exit(-1)
""" Module for adding new entries """ import re import api import conf import json def add(target_url, title=None, star=False, read=False): conf.load() valid_url = False if not re.compile("(?i)https?:\\/\\/.+").match(target_url): for protocol in "https://", "http://": if api.is_valid_url("{0}{1}".format(protocol, target_url)): target_url = "{0}{1}".format(protocol, target_url) valid_url = True break else: valid_url = api.is_valid_url(target_url) if not valid_url: print("Error: Invalid url to add.") print() exit(-1) try: request = api.api_entry_exists(target_url) if(request.hasError()): print("Error: {0} - {1}".format(request.error_text, request.error_description)) exit(-1) response = json.loads(request.response) print(response['exists']) if response['exists'] == True: print("The url was already saved.") exit(0) except api.OAuthException as e: print("Error: {0}".format(e.text)) print() exit(-1) try: request = api.api_add_entry(target_url, title, star, read) if(request.hasError()): print("Error: {0} - {1}".format(request.error_text, request.error_description)) exit(-1) else: print("Entry successfully added") exit(0) except api.OAuthException as e: print("Error: {0}".format(e.text)) print() exit(-1)
Check if an anetry already exists before adding it
Check if an anetry already exists before adding it
Python
mit
Nepochal/wallabag-cli