commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
7c5ce8e80bd4cf6d70288039673e7e597b19dd66 | fix problem with python 3.x | kivio/pysllo,kivio/python-structured-logging | pysllo/loggers/tracking_logger.py | pysllo/loggers/tracking_logger.py | # coding:utf-8
import logging
from .propagation_logger import PropagationLogger
from ..utils.tracer import Tracer, TraceContext
class TrackingLogger(PropagationLogger):
_tracer = Tracer()
_is_tracking_enable = False
def __init__(self, name, level=logging.NOTSET, propagation=False):
PropagationLogger.__init__(self, name, level, propagation)
self._trace_ctx = TraceContext(self)
@property
def trace(self):
return self._trace_ctx
@staticmethod
def _proper_extra(kwargs):
return kwargs
def enable_tracking(self, force_level=logging.DEBUG):
TrackingLogger._is_tracking_enable = True
self.force_level(force_level)
def _flush_tracer(self, reset_level_before=False, reset_level_after=False):
TrackingLogger._is_tracking_enable = False
if reset_level_before:
self.reset_level()
logs = TrackingLogger._tracer.dump_logs()
for log in logs:
level, msg, args, kwargs = log
self._log(level, msg, args, **kwargs)
if reset_level_after:
self.reset_level()
def disable_tracking(self):
self._flush_tracer(reset_level_before=True)
def exit_with_exc(self):
self._flush_tracer(reset_level_after=True)
def _log(self, level, msg, args, **kwargs):
kwargs = self._proper_extra(kwargs)
if TrackingLogger._is_tracking_enable:
TrackingLogger._tracer.log(level, msg, args, **kwargs)
else:
if self.isEnabledFor(level):
PropagationLogger._log(self, level, msg, args, **kwargs)
| # coding:utf-8
import logging
from .propagation_logger import PropagationLogger
from ..utils.tracer import Tracer, TraceContext
class TrackingLogger(PropagationLogger):
_tracer = Tracer()
_is_tracking_enable = False
def __init__(self, name, level=logging.NOTSET, propagation=False):
PropagationLogger.__init__(self, name, level, propagation)
self._trace_ctx = TraceContext(self)
@property
def trace(self):
return self._trace_ctx
@staticmethod
def _proper_extra(kwargs):
return kwargs
def enable_tracking(self, force_level=logging.DEBUG):
TrackingLogger._is_tracking_enable = True
self.force_level(force_level)
def _flush_tracer(self, reset_level_before=False, reset_level_after=False):
TrackingLogger._is_tracking_enable = False
if reset_level_before:
self.reset_level()
logs = TrackingLogger._tracer.dump_logs()
map(lambda log: self.log(*log), logs)
if reset_level_after:
self.reset_level()
def disable_tracking(self):
self._flush_tracer(reset_level_before=True)
def exit_with_exc(self):
self._flush_tracer(reset_level_after=True)
def _log(self, level, msg, args, **kwargs):
kwargs = self._proper_extra(kwargs)
if TrackingLogger._is_tracking_enable:
TrackingLogger._tracer.log(level, msg, args, **kwargs)
else:
PropagationLogger._log(self, level, msg, args, **kwargs)
| bsd-3-clause | Python |
c2e4f9055666043afee888f8feab69978da4bc07 | Add positional arg for specifying the emote name. | d6e/emotion | emote/emote.py | emote/emote.py | """ A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
# TODO: Read from an env var and a harcoded .dotfile
with open("mapping.json") as f:
emotes = json.load(f)
def parse_arguments():
parser = argparse.ArgumentParser(description=sys.modules[__name__].__doc__)
parser.add_argument('-l','--list', action="store_true",
help="List all available emotes.")
parser.add_argument("name", type=str, help="The name of the emote.")
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
sys.exit(0)
return parser.parse_args()
def list_emotes():
print [e for e in emotes.keys()]
print [e for e in emotes.values()]
def main():
args = parse_arguments()
if args.list:
list_emotes()
if args.name:
try:
print emotes[args.name]
except KeyError:
print("That emote does not exist. You can see all existing emotes "
"with the command: `emote -l`.")
if __name__ == "__main__":
main()
| """ A simple CLI tool for quickly copying common emoticon/emoji to your
clipboard. """
import pyperclip
import json
import sys
import argparse
with open("mapping.json") as f:
emotes = json.load(f)
def parse_arguments():
parser = argparse.ArgumentParser(
description=sys.modules[__name__].__doc__,
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('-l','--list', action="store_true",
help="List all available emotes.")
# Print help if no cli args are specified.
if len(sys.argv) < 2:
parser.print_help()
return parser.parse_args()
def list_emotes():
print [e for e in emotes.keys()]
print [e for e in emotes.values()]
def main():
args = parse_arguments()
if args.list:
list_emotes()
if __name__ == "__main__":
main()
| mit | Python |
2663ddc50405de8baf45fddab6ca414b124e758a | Create a BaseCustomException | Didero/DideRobot | CustomExceptions.py | CustomExceptions.py | class BaseCustomException(Exception):
"""
An abstract class that other custom exceptions should inherit from. Don't instantiate this directly
"""
def __init__(self, displayMessage):
"""
Create a new exception
:param displayMessage: An optional user-facing message, that will also be used as the exception's string representation
"""
super(BaseCustomException, self).__init__(displayMessage)
self.displayMessage = displayMessage
def __str__(self):
if self.displayMessage:
return self.displayMessage
return super(BaseCustomException, self).__str__()
def __repr__(self):
if self.displayMessage:
return "<{}> {}".format(self.__class__.__name__, self.displayMessage)
return super(BaseCustomException, self).__repr__()
class CommandException(BaseCustomException):
"""
This custom exception can be thrown by commands when something goes wrong during execution.
The parameter is a message sent to the source that called the command (a channel or a user)
"""
def __init__(self, displayMessage=None, shouldLogError=True):
"""
Create a new CommandException, to be thrown when something goes wrong during Command execution
:param displayMessage: An optional message to display to the IRC chat the bot is in
:param shouldLogError: Whether this exception should be logged to the program log. This is useful if it's a problem that needs to be solved, but can be set to False if it's a user input error
"""
super(CommandException, self).__init__(displayMessage)
self.shouldLogError = shouldLogError
class CommandInputException(CommandException):
"""
This custom exception can be raised when the input to some module or command is invalid or can't be parsed.
It is a more specific implementation of the CommandException, that doesn't log itself to the logfile
"""
def __init__(self, displayMessage):
"""
Create a new InputException. The display message will be shown to the user
:param displayMessage: The message to show to the user that called the command. This message should explain how the input should be correctly formatted
"""
super(CommandInputException, self).__init__(displayMessage, False)
| class CommandException(Exception):
"""
This custom exception can be thrown by commands when something goes wrong during execution.
The parameter is a message sent to the source that called the command (a channel or a user)
"""
def __init__(self, displayMessage=None, shouldLogError=True):
"""
Create a new CommandException, to be thrown when something goes wrong during Command execution
:param displayMessage: An optional message to display to the IRC chat the bot is in
:param shouldLogError: Whether this exception should be logged to the program log. This is useful if it's a problem that needs to be solved, but can be set to False if it's a user input error
"""
self.displayMessage = displayMessage
self.shouldLogError = shouldLogError
def __str__(self):
return self.displayMessage
class CommandInputException(CommandException):
"""
This custom exception can be raised when the input to some module or command is invalid or can't be parsed.
It is a more specific implementation of the CommandException, that doesn't log itself to the logfile
"""
def __init__(self, displayMessage):
"""
Create a new InputException. The display message will be shown to the user
:param displayMessage: The message to show to the user that called the command. This message should explain how the input should be correctly formatted
"""
super(CommandInputException, self).__init__(displayMessage, False)
| mit | Python |
41492440dccd2458c88cf008249dd9b3ef2db25c | Fix skipping in test/test_issue200.py | RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib,RDFLib/rdflib | test/test_issue200.py | test/test_issue200.py | #!/usr/bin/env python
import os
import rdflib
import unittest
import pytest
try:
from os import fork
from os import pipe
except ImportError:
pytestmark = pytest.mark.skip(
reason="No os.fork() and/or os.pipe() on this platform, skipping"
)
class TestRandomSeedInFork(unittest.TestCase):
def test_bnode_id_differs_in_fork(self):
"""Checks that os.fork()ed child processes produce a
different sequence of BNode ids from the parent process.
"""
r, w = os.pipe() # these are file descriptors, not file objects
pid = os.fork()
if pid:
pb1 = rdflib.term.BNode()
os.close(w) # use os.close() to close a file descriptor
r = os.fdopen(r) # turn r into a file object
txt = r.read()
os.waitpid(pid, 0) # make sure the child process gets cleaned up
r.close()
else:
os.close(r)
w = os.fdopen(w, "w")
cb = rdflib.term.BNode()
w.write(cb)
w.close()
os._exit(0)
assert txt != str(
pb1
), "Parent process BNode id: " + "%s, child process BNode id: %s" % (
txt,
str(pb1),
)
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
import os
import rdflib
import unittest
import pytest
try:
from os import fork
from os import pipe
except ImportError:
pytest.skip("No os.fork() and/or os.pipe() on this platform, skipping")
class TestRandomSeedInFork(unittest.TestCase):
def test_bnode_id_differs_in_fork(self):
"""Checks that os.fork()ed child processes produce a
different sequence of BNode ids from the parent process.
"""
r, w = os.pipe() # these are file descriptors, not file objects
pid = os.fork()
if pid:
pb1 = rdflib.term.BNode()
os.close(w) # use os.close() to close a file descriptor
r = os.fdopen(r) # turn r into a file object
txt = r.read()
os.waitpid(pid, 0) # make sure the child process gets cleaned up
r.close()
else:
os.close(r)
w = os.fdopen(w, "w")
cb = rdflib.term.BNode()
w.write(cb)
w.close()
os._exit(0)
assert txt != str(
pb1
), "Parent process BNode id: " + "%s, child process BNode id: %s" % (
txt,
str(pb1),
)
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | Python |
b0f5faa5d5ca18bbecbaa3ba16e629840fa63364 | Update template to have correct docstring format. | brenns10/social,brenns10/social | templates/template.py | templates/template.py | """
**ReplaceMe**
[Describe what your subclass does here.]
- In this bullet point, describe what your account matches.
- Here, describe what your account expands to.
- Demonstrate use on command line: ``key:value``.
"""
import re
#import requests
#from lxml import html
from . import Account
_URL_RE = re.compile(r'https?://(www.)?ReplaceMe.com/(?P<username>\w+)/?')
class ReplaceMeAccount(Account):
def __init__(self, username=None, url=None, **_):
if username is not None:
self._username = username
elif url is not None:
match = _URL_RE.fullmatch(url)
if match:
self._username = match.group('username')
else:
raise ValueError('No username match.')
else:
raise ValueError('No usable parameters')
def expand(self, info):
return [] # TODO: fill out the expand....
@staticmethod
def match(**options):
return (
'url' in options
and _URL_RE.fullmatch(options['url'])
)
@staticmethod
def shortname():
return 'ReplaceMe'
def __str__(self):
return 'ReplaceMeAccount(username=%r)' % self._username
def __hash__(self):
return hash(self._username)
def __eq__(self, other):
return type(other) is ReplaceMeAccount and self._username == other._username
| """ReplaceMe abstraction."""
import re
#import requests
#from lxml import html
from . import Account
_URL_RE = re.compile(r'https?://(www.)?ReplaceMe.com/(?P<username>\w+)/?')
class ReplaceMeAccount(Account):
def __init__(self, username=None, url=None, **_):
if username is not None:
self._username = username
elif url is not None:
match = _URL_RE.fullmatch(url)
if match:
self._username = match.group('username')
else:
raise ValueError('No username match.')
else:
raise ValueError('No usable parameters')
def expand(self, info):
"""
Return a generator of "breadcrumbs".
The info parameter is a dictionary you can read and update with
information about the person you're searching for. So you can scrape
names, birthdays, etc, and stick them into the dict! Creepy, right?
"""
return [] # TODO: fill out the expand....
@staticmethod
def match(**options):
"""
Return truthy if the breadcrumbs would match this type of account.
"""
return (
'url' in options
and _URL_RE.fullmatch(options['url'])
)
@staticmethod
def shortname():
"""
The name used on the CLI so that you don't have to type the class name.
"""
return 'ReplaceMe'
def __str__(self):
return 'ReplaceMeAccount(username=%r)' % self._username
def __hash__(self):
return hash(self._username)
def __eq__(self, other):
"""
If you want the search to terminate, make sure this is right!
"""
return type(other) is ReplaceMeAccount and self._username == other._username
| bsd-3-clause | Python |
5825204f6a4abe8f22bf3d2c22e2d8ba78f3c340 | fix qibuild foreach | aldebaran/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild,aldebaran/qibuild,aldebaran/qibuild,aldebaran/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild,dmerejkowsky/qibuild | python/qibuild/actions/foreach.py | python/qibuild/actions/foreach.py | ## Copyright (c) 2012 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
"""Run the same command on each buildable project.
Use -- to separate qibuild arguments from the arguments of the command.
For instance
qibuild --ignore-errors -- ls -l
"""
import qibuild.log
import qisrc
import qibuild
def configure_parser(parser):
"""Configure parser for this action """
qibuild.parsers.worktree_parser(parser)
parser.add_argument("command", metavar="COMMAND", nargs="+")
parser.add_argument("--ignore-errors", action="store_true", help="continue on error")
def do(args):
"""Main entry point"""
qiwt = qisrc.open_worktree(args.worktree)
logger = qibuild.log.get_logger(__name__)
for project in qiwt.buildable_projects:
logger.info("Running `%s` for %s", " ".join(args.command), project.src)
try:
qibuild.command.call(args.command, cwd=project.path)
except qibuild.command.CommandFailedException, err:
if args.ignore_errors:
logger.error(str(err))
continue
else:
raise
| ## Copyright (c) 2012 Aldebaran Robotics. All rights reserved.
## Use of this source code is governed by a BSD-style license that can be
## found in the COPYING file.
"""Run the same command on each buildable project.
Use -- to separate qibuild arguments from the arguments of the command.
For instance
qibuild --ignore-errors -- ls -l
"""
import qibuild.log
import qisrc
import qibuild
def configure_parser(parser):
"""Configure parser for this action """
qibuild.parsers.worktree_parser(parser)
parser.add_argument("command", metavar="COMMAND", nargs="+")
parser.add_argument("--ignore-errors", action="store_true", help="continue on error")
def do(args):
"""Main entry point"""
qiwt = qisrc.open_worktree(args.worktree)
logger = qibuild.log.get_logger(__name__)
for project in qiwt.buildable_projects:
logger.info("Running `%s` for %s", " ".join(args.command), project.name)
try:
qibuild.command.call(args.command, cwd=project.path)
except qibuild.command.CommandFailedException, err:
if args.ignore_errors:
logger.error(str(err))
continue
else:
raise
| bsd-3-clause | Python |
a4ee97af97ccbbe519941fef02638d46fea8a0ff | include the stream ERROR message on sys.exit | MSLNZ/msl-package-manager | conftest.py | conftest.py | import io
import sys
import logging
from msl.package_manager import (
pypi,
github,
utils,
)
stream = io.BytesIO()
handler = logging.StreamHandler(stream=stream)
orig_level = int(utils.log.level)
utils.set_log_level(logging.ERROR)
utils.log.addHandler(handler)
if not pypi():
pypi(update_cache=True)
value = stream.getvalue()
if value:
sys.exit('Cannot update PyPI cache\n{}'.format(value))
if not github():
github(update_cache=True)
value = stream.getvalue()
if value:
sys.exit('Cannot update GitHub cache\n{}'.format(value))
utils.log.removeHandler(handler)
utils.set_log_level(orig_level)
stream.close()
| import io
import sys
import logging
from msl.package_manager import (
pypi,
github,
utils,
)
stream = io.BytesIO()
handler = logging.StreamHandler(stream=stream)
orig_level = int(utils.log.level)
utils.set_log_level(logging.ERROR)
utils.log.addHandler(handler)
if not pypi():
pypi(update_cache=True)
if stream.getbuffer():
sys.exit('Cannot update PyPI cache')
if not github():
github(update_cache=True)
if stream.getbuffer():
sys.exit('Cannot update GitHub cache')
utils.log.removeHandler(handler)
utils.set_log_level(orig_level)
stream.close()
| mit | Python |
0ddecd41ddb569bb07ab07c1c474df605236d7b0 | Bump version to 2.7.dev | zwadar/pyqode.python,pyQode/pyqode.python,pyQode/pyqode.python | pyqode/python/__init__.py | pyqode/python/__init__.py | # -*- coding: utf-8 -*-
"""
pyqode.python is an extension of pyqode.core that brings support
for the python programming language. It provides a set of additional modes and
panels for the frontend and a series of worker for the backend (code
completion, documentation lookups, code linters, and so on...).
"""
__version__ = '2.7.dev0'
| # -*- coding: utf-8 -*-
"""
pyqode.python is an extension of pyqode.core that brings support
for the python programming language. It provides a set of additional modes and
panels for the frontend and a series of worker for the backend (code
completion, documentation lookups, code linters, and so on...).
"""
__version__ = '2.6.3'
| mit | Python |
9fa74daaace9c39ee6664526d89436d6918f4f79 | make return results consistent, test return results in other files | mitar/pychecker,mitar/pychecker | test_input/test17.py | test_input/test17.py | 'doc'
class X:
'should get a warning for returning value from __init__'
def __init__(self):
print 'howdy'
return 1
class Y:
'should get a warning for returning value from __init__'
def __init__(self, x):
if x == 0 :
return 0
if x == 1 :
return 53
return None
class Z:
'should not get a warning'
def __init__(self, x):
return
| 'doc'
class X:
'should get a warning for returning value from __init__'
def __init__(self):
print 'howdy'
return 1
class Y:
'should get a warning for returning value from __init__'
def __init__(self, x):
if x == 0 :
return 0
if x == 1 :
return []
return None
class Z:
'should not get a warning'
def __init__(self, x):
return
| bsd-3-clause | Python |
7203d989b2c126b23b44b61f9a2a980d4e7eacdd | Refactor build_update_query | AntoineToubhans/MongoTs | mongots/query.py | mongots/query.py | from datetime import datetime
AGGREGATION_KEYS = [
'',
'months.{month}.',
'months.{month}.days.{day}.',
'months.{month}.days.{day}.hours.{hour}.',
]
DATETIME_KEY = 'datetime'
def build_filter_query(timestamp, tags=None):
filters = tags or {}
filters[DATETIME_KEY] = datetime(timestamp.year, 1, 1)
return filters
def build_update_query(value, timestamp):
datetime_args = {
'month': str(timestamp.month - 1), # Array index: range from 0 to 11
'day': str(timestamp.day - 1), # Array index: range from 0 to 27 / 28 / 29 or 30
'hour': str(timestamp.hour), # range from 0 to 23
}
inc_keys = [
key.format(**datetime_args)
for key in AGGREGATION_KEYS
]
inc_update = {
'%s%s' % (inc_key, aggregate_type): value if aggregate_type is "sum" else 1
for inc_key in inc_keys
for aggregate_type in ['count', 'sum']
}
return {
'$inc': inc_update,
}
| from datetime import datetime
AGGR_MONTH_KEY = 'months'
AGGR_DAY_KEY = 'days'
AGGR_HOUR_KEY = 'hours'
DATETIME_KEY = 'datetime'
def build_filter_query(timestamp, tags=None):
filters = tags or {}
filters[DATETIME_KEY] = datetime(timestamp.year, 1, 1)
return filters
def build_update_query(value, timestamp):
month = str(timestamp.month - 1) # Array index: range from 0 to 11
day = str(timestamp.day - 1) # Array index: range from 0 to 27 / 28 / 29 or 30
hour = str(timestamp.hour) # range from 0 to 23
base_inc_keys = [
''.join([]),
''.join([AGGR_MONTH_KEY, '.', month, '.']),
''.join([AGGR_MONTH_KEY, '.', month, '.', AGGR_DAY_KEY, '.', day, '.']),
''.join([AGGR_MONTH_KEY, '.', month, '.', AGGR_DAY_KEY, '.', day, '.', AGGR_HOUR_KEY, '.', hour, '.']),
]
inc_update = {
'%s%s' % (base_inc_key, aggregate_type): value if aggregate_type is "sum" else 1
for base_inc_key in base_inc_keys
for aggregate_type in ['count', 'sum']
}
return {
'$inc': inc_update,
}
| mit | Python |
503fdc12533eed77c49e9fdb0cc855b9bfaa1449 | Correct exit code check for existing db | wfxiang08/changes,bowlofstew/changes,dropbox/changes,dropbox/changes,wfxiang08/changes,wfxiang08/changes,dropbox/changes,bowlofstew/changes,bowlofstew/changes,dropbox/changes,wfxiang08/changes,bowlofstew/changes | conftest.py | conftest.py | import os
import sys
root = os.path.abspath(os.path.join(os.path.dirname(__file__)))
if root not in sys.path:
sys.path.insert(0, root)
from alembic.config import Config
from alembic import command
alembic_cfg = Config(os.path.join(root, 'alembic.ini'))
# force model registration
from changes.config import create_app, db
app, app_context, connection, transaction = None, None, None, None
from flask_sqlalchemy import _SignallingSession
class SignallingSession(_SignallingSession):
def __init__(self, db, autocommit=False, autoflush=False, **options):
self.app = db.get_app()
self._model_changes = {}
bind = options.pop('bind', db.engine)
super(_SignallingSession, self).__init__(
autocommit=autocommit,
autoflush=autoflush,
bind=bind,
binds=db.get_binds(self.app), **options)
def pytest_sessionstart(session):
global app, app_context, connection, transaction
app = create_app(
TESTING=True,
SQLALCHEMY_DATABASE_URI='postgresql://localhost/test_changes',
)
app_context = app.test_request_context()
app_context.push()
# 9.1 does not support --if-exists
if os.system("psql -l | grep 'test_changes'") == 0:
assert not os.system('dropdb test_changes')
assert not os.system('createdb -E utf-8 test_changes')
command.upgrade(alembic_cfg, 'head')
db.session = db.create_scoped_session({
'autoflush': True,
})
connection = db.engine.connect()
transaction = connection.begin()
def pytest_sessionfinish():
transaction.rollback()
connection.close()
# TODO: mock session commands
def pytest_runtest_setup(item):
item.__sqla_transaction = db.session.begin_nested()
def pytest_runtest_teardown(item):
item.__sqla_transaction.rollback()
| import os
import sys
root = os.path.abspath(os.path.join(os.path.dirname(__file__)))
if root not in sys.path:
sys.path.insert(0, root)
from alembic.config import Config
from alembic import command
alembic_cfg = Config(os.path.join(root, 'alembic.ini'))
# force model registration
from changes.config import create_app, db
app, app_context, connection, transaction = None, None, None, None
from flask_sqlalchemy import _SignallingSession
class SignallingSession(_SignallingSession):
def __init__(self, db, autocommit=False, autoflush=False, **options):
self.app = db.get_app()
self._model_changes = {}
bind = options.pop('bind', db.engine)
super(_SignallingSession, self).__init__(
autocommit=autocommit,
autoflush=autoflush,
bind=bind,
binds=db.get_binds(self.app), **options)
def pytest_sessionstart(session):
global app, app_context, connection, transaction
app = create_app(
TESTING=True,
SQLALCHEMY_DATABASE_URI='postgresql://localhost/test_changes',
)
app_context = app.test_request_context()
app_context.push()
# 9.1 does not support --if-exists
if os.system("psql -l | grep 'test_changes'"):
assert not os.system('dropdb test_changes')
assert not os.system('createdb -E utf-8 test_changes')
command.upgrade(alembic_cfg, 'head')
db.session = db.create_scoped_session({
'autoflush': True,
})
connection = db.engine.connect()
transaction = connection.begin()
def pytest_sessionfinish():
transaction.rollback()
connection.close()
# TODO: mock session commands
def pytest_runtest_setup(item):
item.__sqla_transaction = db.session.begin_nested()
def pytest_runtest_teardown(item):
item.__sqla_transaction.rollback()
| apache-2.0 | Python |
dc79eff39cb97ea9a57be5eee8ac3ac225029d85 | Fix pam test. | kgiusti/gofer,credativ/gofer,jortel/gofer,jortel/gofer,kgiusti/gofer,credativ/gofer | test/unit/test_pam.py | test/unit/test_pam.py | # Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
import PAM as _PAM
from unittest import TestCase
from mock import patch
from gofer.pam import Query, PAM
USERID = 'test-user'
PASSWORD = 'test-password'
class TestQuery(TestCase):
def test_init(self):
query = Query(USERID, PASSWORD)
self.assertEqual(query.userid, USERID)
self.assertEqual(query.password, PASSWORD)
def test_call(self):
query_list = [
(None, _PAM.PAM_PROMPT_ECHO_ON),
(None, _PAM.PAM_PROMPT_ECHO_OFF)
]
query = Query(USERID, PASSWORD)
result = query(None, query_list)
self.assertEqual(result[0], (USERID, 0))
self.assertEqual(result[1], (PASSWORD, 0))
class TestPAM(TestCase):
def test_init(self):
self.assertEqual(PAM.SERVICE, 'passwd')
@patch('gofer.pam.Query')
@patch('gofer.pam._PAM.pam')
def test_authenticate(self, _pam, _query):
pam = PAM()
# default service
pam.authenticate(USERID, PASSWORD)
_pam().start.assert_called_with(PAM.SERVICE, USERID, _query())
_pam().authenticate.assert_called_with()
# specified service
pam.authenticate(USERID, PASSWORD, service='ssh')
_pam().start.assert_called_with('ssh', USERID, _query())
_pam().authenticate.assert_called_with()
| # Copyright (c) 2014 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public
# License as published by the Free Software Foundation; either version
# 2 of the License (GPLv2) or (at your option) any later version.
# There is NO WARRANTY for this software, express or implied,
# including the implied warranties of MERCHANTABILITY,
# NON-INFRINGEMENT, or FITNESS FOR A PARTICULAR PURPOSE. You should
# have received a copy of GPLv2 along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
import PAM as _PAM
from unittest import TestCase
from mock import patch
from gofer.pam import Query, PAM
USERID = 'test-user'
PASSWORD = 'test-password'
class TestQuery(TestCase):
def test_init(self):
query = Query(USERID, PASSWORD)
self.assertEqual(query.user, USERID)
self.assertEqual(query.password, PASSWORD)
def test_call(self):
query_list = [
(None, _PAM.PAM_PROMPT_ECHO_ON),
(None, _PAM.PAM_PROMPT_ECHO_OFF)
]
query = Query(USERID, PASSWORD)
result = query(None, query_list)
self.assertEqual(result[0], (USERID, 0))
self.assertEqual(result[1], (PASSWORD, 0))
class TestPAM(TestCase):
def test_init(self):
self.assertEqual(PAM.SERVICE, 'passwd')
@patch('gofer.pam.Query')
@patch('gofer.pam._PAM.pam')
def test_authenticate(self, _pam, _query):
pam = PAM()
# default service
pam.authenticate(USERID, PASSWORD)
_pam().start.assert_called_with(PAM.SERVICE, USERID, _query())
_pam().authenticate.assert_called_with()
# specified service
pam.authenticate(USERID, PASSWORD, service='ssh')
_pam().start.assert_called_with('ssh', USERID, _query())
_pam().authenticate.assert_called_with()
| lgpl-2.1 | Python |
a3ed049169fbd1f8cb4bab39b9aad5701d933dc4 | Correct documentation cross-reference | textbook/aslack | aslack/utils.py | aslack/utils.py | """Utility functionality."""
import os
from aiohttp import web_exceptions
API_TOKEN_ENV = 'SLACK_API_TOKEN'
"""The environment variable to store the user's API token in."""
class FriendlyError(Exception):
"""Exception with friendlier error messages.
Notes:
The ``err_msg`` is resolved in :py:data:`EXPECTED_ERRORS`,
or passed through as-is if not found there.
Arguments:
err_msg (:py:class:`str`): The error message to attempt to
resolve.
*args (:py:class:`tuple`): Any additional positional arguments.
"""
EXPECTED_ERRORS = {}
"""Friendly messages for expected errors."""
def __init__(self, err_msg, *args):
super().__init__(self.EXPECTED_ERRORS.get(err_msg, err_msg), *args)
def raise_for_status(response):
"""Raise an appropriate error for a given response.
Arguments:
response (:py:class:`aiohttp.ClientResponse`): The API response.
Raises:
:py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate
error for the response's status.
"""
for err_name in web_exceptions.__all__:
err = getattr(web_exceptions, err_name)
if err.status_code == response.status:
payload = dict(
headers=response.headers,
reason=response.reason,
)
if issubclass(err, web_exceptions._HTTPMove):
raise err(response.headers['Location'], **payload)
raise err(**payload)
def get_api_token():
"""Allow the user to enter their API token.
Note:
The token is added to the environment using the variable defined
in :py:attr:`API_TOKEN_ENV`.
Returns:
:py:class:`str`: The user's API token.
"""
token = os.getenv(API_TOKEN_ENV)
if token:
return token
template = ('Enter your API token (this will be stored '
'as {} for future use): ').format(API_TOKEN_ENV)
token = input(template)
os.environ[API_TOKEN_ENV] = token
return token
| """Utility functionality."""
import os
from aiohttp import web_exceptions
API_TOKEN_ENV = 'SLACK_API_TOKEN'
"""The environment variable to store the user's API token in."""
class FriendlyError(Exception):
"""Exception with friendlier error messages.
Notes:
The ``err_msg`` is resolved in :py:attr:`EXPECTED_ERRORS`,
or passed through as-is if not found there.
Arguments:
err_msg (:py:class:`str`): The error message to attempt to
resolve.
*args (:py:class:`tuple`): Any additional positional arguments.
"""
EXPECTED_ERRORS = {}
"""Friendly messages for expected errors."""
def __init__(self, err_msg, *args):
super().__init__(self.EXPECTED_ERRORS.get(err_msg, err_msg), *args)
def raise_for_status(response):
"""Raise an appropriate error for a given response.
Arguments:
response (:py:class:`aiohttp.ClientResponse`): The API response.
Raises:
:py:class:`aiohttp.web_exceptions.HTTPException`: The appropriate
error for the response's status.
"""
for err_name in web_exceptions.__all__:
err = getattr(web_exceptions, err_name)
if err.status_code == response.status:
payload = dict(
headers=response.headers,
reason=response.reason,
)
if issubclass(err, web_exceptions._HTTPMove):
raise err(response.headers['Location'], **payload)
raise err(**payload)
def get_api_token():
"""Allow the user to enter their API token.
Note:
The token is added to the environment using the variable defined
in :py:attr:`API_TOKEN_ENV`.
Returns:
:py:class:`str`: The user's API token.
"""
token = os.getenv(API_TOKEN_ENV)
if token:
return token
template = ('Enter your API token (this will be stored '
'as {} for future use): ').format(API_TOKEN_ENV)
token = input(template)
os.environ[API_TOKEN_ENV] = token
return token
| isc | Python |
c74fa1fd7f99cf331a884a4a86f606b8dcfb0eb8 | Revert "Quit fix release". Back to normal versioning. | saltstack/pytest-logging | pytest_logging/version.py | pytest_logging/version.py | # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
:copyright: © 2015 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
pytest_logging.version
~~~~~~~~~~~~~~~~~~~~~~
pytest logging plugin version information
'''
# Import Python Libs
from __future__ import absolute_import
__version_info__ = (2015, 11, 3)
__version__ = '{0}.{1}.{2}'.format(*__version_info__)
| # -*- coding: utf-8 -*-
'''
:codeauthor: :email:`Pedro Algarvio (pedro@algarvio.me)`
:copyright: © 2015 by the SaltStack Team, see AUTHORS for more details.
:license: Apache 2.0, see LICENSE for more details.
pytest_logging.version
~~~~~~~~~~~~~~~~~~~~~~
pytest logging plugin version information
'''
# Import Python Libs
from __future__ import absolute_import
__version_info__ = (2015, 11, 3, 1)
__version__ = '{0}.{1}.{2}.{3}'.format(*__version_info__)
| apache-2.0 | Python |
ddeffc09ce1eab426fe46129bead712059f93f45 | Remove DEBUG=False that's not needed anymore | rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org | docker/settings/web.py | docker/settings/web.py | from .docker_compose import DockerBaseSettings
class WebDevSettings(DockerBaseSettings):
pass
WebDevSettings.load_settings(__name__)
| from .docker_compose import DockerBaseSettings
class WebDevSettings(DockerBaseSettings):
# Needed to serve 404 pages properly
# NOTE: it may introduce some strange behavior
DEBUG = False
WebDevSettings.load_settings(__name__)
| mit | Python |
1e12b00a6ca73c5b2071d5ab9aa3e4d1bbea64a0 | Add custom WebRequestException | Didero/DideRobot | CustomExceptions.py | CustomExceptions.py | class BaseCustomException(Exception):
"""
An abstract class that other custom exceptions should inherit from. Don't instantiate this directly
"""
def __init__(self, displayMessage):
"""
Create a new exception
:param displayMessage: An optional user-facing message, that will also be used as the exception's string representation
"""
super(BaseCustomException, self).__init__(displayMessage)
self.displayMessage = displayMessage
def __str__(self):
if self.displayMessage:
return self.displayMessage
return super(BaseCustomException, self).__str__()
def __repr__(self):
if self.displayMessage:
return "<{}> {}".format(self.__class__.__name__, self.displayMessage)
return super(BaseCustomException, self).__repr__()
class CommandException(BaseCustomException):
"""
This custom exception can be thrown by commands when something goes wrong during execution.
The parameter is a message sent to the source that called the command (a channel or a user)
"""
def __init__(self, displayMessage=None, shouldLogError=True):
"""
Create a new CommandException, to be thrown when something goes wrong during Command execution
:param displayMessage: An optional message to display to the IRC chat the bot is in
:param shouldLogError: Whether this exception should be logged to the program log. This is useful if it's a problem that needs to be solved, but can be set to False if it's a user input error
"""
super(CommandException, self).__init__(displayMessage)
self.shouldLogError = shouldLogError
class CommandInputException(CommandException):
"""
This custom exception can be raised when the input to some module or command is invalid or can't be parsed.
It is a more specific implementation of the CommandException, that doesn't log itself to the logfile
"""
def __init__(self, displayMessage):
"""
Create a new InputException. The display message will be shown to the user
:param displayMessage: The message to show to the user that called the command. This message should explain how the input should be correctly formatted
"""
super(CommandInputException, self).__init__(displayMessage, False)
class SettingException(BaseCustomException):
"""
This custom exception gets thrown when something is wrong with a bot setting
"""
pass
class WebRequestException(BaseCustomException):
"""
This custom exception gets thrown when a web request goes wrong, either through timeout, a missing API key, or something else
"""
pass
| class BaseCustomException(Exception):
"""
An abstract class that other custom exceptions should inherit from. Don't instantiate this directly
"""
def __init__(self, displayMessage):
"""
Create a new exception
:param displayMessage: An optional user-facing message, that will also be used as the exception's string representation
"""
super(BaseCustomException, self).__init__(displayMessage)
self.displayMessage = displayMessage
def __str__(self):
if self.displayMessage:
return self.displayMessage
return super(BaseCustomException, self).__str__()
def __repr__(self):
if self.displayMessage:
return "<{}> {}".format(self.__class__.__name__, self.displayMessage)
return super(BaseCustomException, self).__repr__()
class CommandException(BaseCustomException):
"""
This custom exception can be thrown by commands when something goes wrong during execution.
The parameter is a message sent to the source that called the command (a channel or a user)
"""
def __init__(self, displayMessage=None, shouldLogError=True):
"""
Create a new CommandException, to be thrown when something goes wrong during Command execution
:param displayMessage: An optional message to display to the IRC chat the bot is in
:param shouldLogError: Whether this exception should be logged to the program log. This is useful if it's a problem that needs to be solved, but can be set to False if it's a user input error
"""
super(CommandException, self).__init__(displayMessage)
self.shouldLogError = shouldLogError
class CommandInputException(CommandException):
"""
This custom exception can be raised when the input to some module or command is invalid or can't be parsed.
It is a more specific implementation of the CommandException, that doesn't log itself to the logfile
"""
def __init__(self, displayMessage):
"""
Create a new InputException. The display message will be shown to the user
:param displayMessage: The message to show to the user that called the command. This message should explain how the input should be correctly formatted
"""
super(CommandInputException, self).__init__(displayMessage, False)
class SettingException(BaseCustomException):
"""
This custom exception gets thrown when something is wrong with a bot setting
"""
pass
| mit | Python |
01e1900a139d7525a0803d5a160a9d91210fe219 | Add logging output when called from command line | aguinane/csvtokmz | csv2kmz/csv2kmz.py | csv2kmz/csv2kmz.py | import os
import argparse
import logging
from buildkmz import create_kmz_from_csv
def main():
""" Build file as per user inputs
"""
logging.basicConfig(format='%(asctime)s %(levelname)s: %(message)s', level=logging.INFO)
args = get_cmd_args()
iPath = args.input
sPath = args.styles
oDir = args.output
create_kmz_from_csv(iPath,sPath,oDir)
def get_cmd_args():
"""Get, process and return command line arguments to the script
"""
help_description = '''
CSVtoKMZ
Converts a parsed .csv file to a .kmz Google Earth overlay.
'''
help_epilog = ''
parser = argparse.ArgumentParser(description=help_description,
formatter_class=argparse.RawTextHelpFormatter,
epilog=help_epilog)
parser.add_argument('-o','--output', help='Specify alternate output directory',
default='../output/')
parser.add_argument('-s','--styles', help='Specify location of settings for point styles',
default='settings/styles.json')
parser.add_argument('-i','--input', help='Specify file to convert',
default='data/Example.csv')
return parser.parse_args()
if __name__ == '__main__':
main()
| import os
import argparse
from buildkmz import create_kmz_from_csv
def main():
""" Build file as per user inputs
"""
args = get_cmd_args()
iPath = args.input
sPath = args.styles
oDir = args.output
create_kmz_from_csv(iPath,sPath,oDir)
def get_cmd_args():
"""Get, process and return command line arguments to the script
"""
help_description = '''
CSVtoKMZ
Converts a parsed .csv file to a .kmz Google Earth overlay.
'''
help_epilog = ''
parser = argparse.ArgumentParser(description=help_description,
formatter_class=argparse.RawTextHelpFormatter,
epilog=help_epilog)
parser.add_argument('-o','--output', help='Specify alternate output directory',
default='../output/')
parser.add_argument('-s','--styles', help='Specify location of settings for point styles',
default='settings/styles.json')
parser.add_argument('-i','--input', help='Specify file to convert',
default='data/Example.csv')
return parser.parse_args()
if __name__ == '__main__':
main()
| mit | Python |
a48151f5188484002e025c3861cb0bd770a17357 | FIX type policy back comp. | ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale | sale_order_type_invoice_policy/models/sale_order_line.py | sale_order_type_invoice_policy/models/sale_order_line.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import api, models, _
from openerp.exceptions import ValidationError
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
# .invoice_policy (no hacemos depends en el .invoice_policy para que si
# lo cambiamos mas adelante no reprosese todas las ventas)
@api.depends('order_id.type_id')
def _get_to_invoice_qty(self):
"""
Modificamos la funcion original para sobre escribir con la policy
del sale type si es que viene definida distinta de by product
"""
super(SaleOrderLine, self)._get_to_invoice_qty()
for line in self:
# igual que por defecto, si no en estos estados, no hay a facturar
if line.order_id.state not in ['sale', 'done']:
continue
type_policy = line.order_id.type_id.invoice_policy
# if by product, dont overwrite invoice qty
# si no hay type_policy puede ser por ordenes que antes de instalar
# sale_order_type que no tienen type
if not type_policy or type_policy == 'by_product':
continue
# elif type_policy == 'delivery':
# if order, we force ordered qty
# por ahora las dos opciones que quedan son prepaid y order
# y ambas funcionan como order una vez configramada
elif type_policy in ['order', 'prepaid']:
line.qty_to_invoice = (
line.product_uom_qty - line.qty_returned -
line.qty_invoiced)
else:
raise ValidationError(_(
'Invoicing Policy %s not implemented!' % type_policy))
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in module root
# directory
##############################################################################
from openerp import api, models, _
from openerp.exceptions import ValidationError
class SaleOrderLine(models.Model):
_inherit = 'sale.order.line'
# .invoice_policy (no hacemos depends en el .invoice_policy para que si
# lo cambiamos mas adelante no reprosese todas las ventas)
@api.depends('order_id.type_id')
def _get_to_invoice_qty(self):
"""
Modificamos la funcion original para sobre escribir con la policy
del sale type si es que viene definida distinta de by product
"""
super(SaleOrderLine, self)._get_to_invoice_qty()
for line in self:
# igual que por defecto, si no en estos estados, no hay a facturar
if line.order_id.state not in ['sale', 'done']:
continue
type_policy = line.order_id.type_id.invoice_policy
# if by product, dont overwrite invoice qty
if type_policy == 'by_product':
continue
# elif type_policy == 'delivery':
# if order, we force ordered qty
# por ahora las dos opciones que quedan son prepaid y order
# y ambas funcionan como order una vez configramada
elif type_policy in ['order', 'prepaid']:
line.qty_to_invoice = (
line.product_uom_qty - line.qty_returned -
line.qty_invoiced)
else:
raise ValidationError(_(
'Invoicing Policy %s not implemented!' % type_policy))
| agpl-3.0 | Python |
ab6e0d5d51b781258a38dbf0e41d82b06a7005f4 | bump for testing tag | brentp/cyvcf2,brentp/cyvcf2,brentp/cyvcf2 | cyvcf2/__init__.py | cyvcf2/__init__.py | from .cyvcf2 import (VCF, Variant, Writer, r_ as r_unphased, par_relatedness,
par_het)
Reader = VCFReader = VCF
__version__ = "0.30.0"
| from .cyvcf2 import (VCF, Variant, Writer, r_ as r_unphased, par_relatedness,
par_het)
Reader = VCFReader = VCF
__version__ = "0.20.9"
| mit | Python |
c9484d0c07543c687820bbb2485ea202647ec232 | use self defined logging module instead of standlib logging | Andy-hpliu/AirtestX,NetEaseGame/AutomatorX,Andy-hpliu/AirtestX,Andy-hpliu/AirtestX,codeskyblue/AutomatorX,codeskyblue/AutomatorX,NetEaseGame/AutomatorX,NetEaseGame/AutomatorX,NetEaseGame/ATX,NetEaseGame/AutomatorX,Andy-hpliu/AirtestX,NetEaseGame/ATX,NetEaseGame/ATX,codeskyblue/AutomatorX,Andy-hpliu/AirtestX,codeskyblue/AutomatorX,NetEaseGame/AutomatorX,NetEaseGame/ATX,NetEaseGame/ATX,codeskyblue/AutomatorX | atx/logutils.py | atx/logutils.py | #!/usr/bin/env python
# coding: utf-8
import inspect
import logging
import os
import sys
import time
import threading
import datetime
class Logger(object):
__alias = {
'WARNING': 'WARN',
'CRITICAL': 'FATAL'
}
def __init__(self, name=None, level=logging.INFO):
if name is None:
name = '-'
self._name = name
self._level = level
self._lock = threading.Lock()
def _write(self, s):
self._lock.acquire()
sys.stdout.write(s.rstrip() + '\n')
self._lock.release()
def setLevel(self, level):
'''
set format level
Args:
- level: for example, logging.INFO
'''
self._level = level
return self
def _level_write(self, level, format, *args):
if level < self._level:
return
levelname = logging.getLevelName(level)
message = format % args
frame, filename, line_number, function_name, lines, index = inspect.stack()[2]
props = dict(
asctime=datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3],
name=self._name,
filename=os.path.basename(filename),
lineno=line_number,
message=message.encode('utf-8'),
)
props['levelname'] = Logger.__alias.get(levelname, levelname)
output = '{asctime} {levelname:<5s} [{name}:{lineno:>4}] {message}'.format(**props)
self._write(output)
def debug(self, *args, **kwargs):
self._level_write(logging.DEBUG, *args, **kwargs)
def info(self, *args, **kwargs):
self._level_write(logging.INFO, *args, **kwargs)
def warn(self, *args, **kwargs):
self._level_write(logging.WARN, *args, **kwargs)
def error(self, *args, **kwargs):
self._level_write(logging.ERROR, *args, **kwargs)
def fatal(self, *args, **kwargs):
self._level_write(logging.FATAL, *args, **kwargs)
raise SystemExit(1)
def getLogger(name, level=logging.INFO):
# logger = logging.getLogger(name)
# ch = logging.StreamHandler()
# fmt = "%(asctime)s %(levelname)-8.8s [%(name)s:%(lineno)4s] %(message)s"
# ch.setFormatter(logging.Formatter(fmt))
# ch.setLevel(level)
# logger.handlers = [ch]
return Logger(name, level=level)
if __name__ == '__main__':
log = getLogger('test')
log.debug("Should not see it.")
log.setLevel(logging.DEBUG)
log.setLevel(logging.DEBUG)
log.info("This is info message")
log.debug("This is debug message")
log = getLogger('test')
log.warn("This is warning message")
log.error("This is error message")
log.fatal("This is fatal message") | #!/usr/bin/env python
# coding: utf-8
import logging
def getLogger(name, init=True, level=logging.INFO):
logger = logging.getLogger(name)
ch = logging.StreamHandler()
fmt = "%(asctime)s %(levelname)-8.8s [%(name)s:%(lineno)4s] %(message)s"
ch.setFormatter(logging.Formatter(fmt))
ch.setLevel(level)
logger.handlers = [ch]
return logger
if __name__ == '__main__':
log = getLogger('test')
log.setLevel(logging.DEBUG)
log.info("Hello")
log.debug("dd Hello")
log = getLogger('test')
log.warn("dd Hello") | apache-2.0 | Python |
32710f1f28773b36df6788b236256c9f4fbb52a1 | add v2020.6.20 (#18060) | LLNL/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,iulian787/spack,iulian787/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-certifi/package.py | var/spack/repos/builtin/packages/py-certifi/package.py | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyCertifi(PythonPackage):
"""Certifi: A carefully curated collection of Root Certificates for validating
the trustworthiness of SSL certificates while verifying the identity of TLS
hosts."""
homepage = "http://certifi.io/"
url = "https://pypi.io/packages/source/c/certifi/certifi-2020.6.20.tar.gz"
import_modules = ['certifi']
version('2020.6.20', sha256='5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3')
version('2019.9.11', sha256='e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50')
version('2019.6.16', sha256='945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695')
version('2019.3.9', sha256='b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae')
version('2017.4.17', sha256='f7527ebf7461582ce95f7a9e03dd141ce810d40590834f4ec20cddd54234c10a')
version('2017.1.23', sha256='81877fb7ac126e9215dfb15bfef7115fdc30e798e0013065158eed0707fd99ce')
depends_on('py-setuptools', type='build')
| # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyCertifi(PythonPackage):
"""Certifi: A carefully curated collection of Root Certificates for validating
the trustworthiness of SSL certificates while verifying the identity of TLS
hosts."""
homepage = "http://certifi.io/"
url = "https://pypi.io/packages/source/c/certifi/certifi-2019.6.16.tar.gz"
import_modules = ['certifi']
version('2019.9.11', sha256='e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50')
version('2019.6.16', sha256='945e3ba63a0b9f577b1395204e13c3a231f9bc0223888be653286534e5873695')
version('2019.3.9', sha256='b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae')
version('2017.4.17', sha256='f7527ebf7461582ce95f7a9e03dd141ce810d40590834f4ec20cddd54234c10a')
version('2017.1.23', sha256='81877fb7ac126e9215dfb15bfef7115fdc30e798e0013065158eed0707fd99ce')
depends_on('py-setuptools', type='build')
| lgpl-2.1 | Python |
2a1b63c66d7721940346ddead8330a6060cdcd2e | Fix several errors that cause script to end prematurely | aerovolts/python-scripts | redditscrape.py | redditscrape.py | #!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "3.0"
"""
Scrapes the list of provided subreddits for images and downloads them to a local directory
"""
import os, praw, wget, urllib.error
# Configuration
root_path = 'scrape' # Download folder (Default: scrape)
sub_list = ['vexillology', 'mapporn', 'pics'] # Subreddit list
post_limit = 15 # Sumbission limit to check and download
user_agent = 'Image Scraper 3.0 by /u/aeroblitz' # Use your own reddit username
# Do not edit beyond this comment
def main():
create_folders()
def create_folders():
os.mkdir(root_path)
for sub in sub_list:
os.mkdir(os.path.join(root_path,str(sub)))
download_images()
def download_images():
u = praw.Reddit(user_agent=user_agent)
for sub in sub_list:
post_list = u.get_subreddit(sub).get_hot(limit=post_limit)
path = root_path + '/' + sub
for post in post_list:
if post.url is not None:
file_name = post.url
extension = post.url[-4:]
if extension == '.jpg' or extension == '.png':
print ("\n" + post.url)
try:
wget.download(post.url, path)
except (IndexError, urllib.error.HTTPError):
print ("\n" + "Error Downloading -- Skipping Image")
pass
else:
pass
else:
pass
else:
continue
else:
print("\n" + "\n" + "Scrape Completed." + "\n")
if __name__ == '__main__':
main()
| #!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "2.1"
"""
Scrapes the list of provided subreddits for images and downloads them to a local directory
"""
import os
import praw
import wget
import urllib.error
# Configuration
root_path = 'scrape' # Download folder (Default: scrape)
sub_list = ['vexillology', 'mapporn', 'pics'] # Subreddit list
post_limit = 50 # Sumbission limit to check and download
user_agent = 'Image Scraper 2.1 by /u/aeroblitz' # Use your own reddit username
# Do not edit beyond this comment
def main():
create_folders()
def create_folders():
os.mkdir(root_path)
for sub in sub_list:
os.mkdir(os.path.join(root_path,str(sub)))
download_images()
def download_images():
u = praw.Reddit(user_agent=user_agent)
for sub in sub_list:
post_list = u.get_subreddit(sub).get_hot(limit=post_limit)
path = root_path + '/' + sub
for post in post_list:
if post.url is not None:
file_name = post.url
extension = post.url[-4:]
if extension == '.jpg' or extension == '.png':
print (post.url)
wget.download(post.url, path)
else:
pass
else:
pass
else:
continue
else:
print("Scrape Completed.")
if __name__ == '__main__':
main()
| mit | Python |
984c395e3f43764a4d8125aea7556179bb4766dd | Remove the doc that describes the setup. Setup is automated now | moritzschaefer/luigi,kalaidin/luigi,riga/luigi,foursquare/luigi,dylanjbarth/luigi,Dawny33/luigi,harveyxia/luigi,graingert/luigi,slvnperron/luigi,harveyxia/luigi,sahitya-pavurala/luigi,hadesbox/luigi,rayrrr/luigi,humanlongevity/luigi,Tarrasch/luigi,Magnetic/luigi,percyfal/luigi,torypages/luigi,stroykova/luigi,theoryno3/luigi,leafjungle/luigi,slvnperron/luigi,vine/luigi,Houzz/luigi,SeedScientific/luigi,stephenpascoe/luigi,fw1121/luigi,LamCiuLoeng/luigi,ivannotes/luigi,kalaidin/luigi,dhruvg/luigi,meyerson/luigi,ViaSat/luigi,jw0201/luigi,anyman/luigi,bowlofstew/luigi,kalaidin/luigi,Yoone/luigi,stephenpascoe/luigi,SkyTruth/luigi,dlstadther/luigi,dstandish/luigi,ZhenxingWu/luigi,theoryno3/luigi,Dawny33/luigi,PeteW/luigi,ZhenxingWu/luigi,mbruggmann/luigi,aeron15/luigi,foursquare/luigi,penelopy/luigi,laserson/luigi,h3biomed/luigi,wakamori/luigi,adaitche/luigi,kevhill/luigi,ContextLogic/luigi,slvnperron/luigi,dstandish/luigi,samepage-labs/luigi,huiyi1990/luigi,rayrrr/luigi,meyerson/luigi,ContextLogic/luigi,bmaggard/luigi,penelopy/luigi,torypages/luigi,slvnperron/luigi,hellais/luigi,linearregression/luigi,ehdr/luigi,springcoil/luigi,neilisaac/luigi,lungetech/luigi,spotify/luigi,Tarrasch/luigi,PeteW/luigi,dstandish/luigi,javrasya/luigi,dhruvg/luigi,torypages/luigi,wakamori/luigi,jw0201/luigi,rayrrr/luigi,springcoil/luigi,lungetech/luigi,laserson/luigi,h3biomed/luigi,realgo/luigi,mfcabrera/luigi,jw0201/luigi,joeshaw/luigi,oldpa/luigi,soxofaan/luigi,republic-analytics/luigi,ChrisBeaumont/luigi,rizzatti/luigi,graingert/luigi,linsomniac/luigi,penelopy/luigi,casey-green/luigi,DomainGroupOSS/luigi,neilisaac/luigi,altaf-ali/luigi,belevtsoff/luigi,gpoulin/luigi,edx/luigi,laserson/luigi,anyman/luigi,altaf-ali/luigi,DomainGroupOSS/luigi,dhruvg/luigi,adaitche/luigi,PeteW/luigi,huiyi1990/luigi,walkers-mv/luigi,dkroy/luigi,alkemics/luigi,JackDanger/luigi,moandcompany/luigi,altaf-ali/luigi,dlstadther/luigi,jamesmcm/luigi,tuulos/luigi,edx/luigi,vine/luigi,walkers-mv/luigi,anyman/luigi,linearregression/luigi,jamesmcm/luigi,glenndmello/luigi,JackDanger/luigi,moritzschaefer/luigi,thejens/luigi,soxofaan/luigi,kalaidin/luigi,stroykova/luigi,neilisaac/luigi,dkroy/luigi,ViaSat/luigi,17zuoye/luigi,anyman/luigi,ehdr/luigi,samuell/luigi,samuell/luigi,graingert/luigi,vine/luigi,belevtsoff/luigi,SkyTruth/luigi,upworthy/luigi,upworthy/luigi,lichia/luigi,joeshaw/luigi,fabriziodemaria/luigi,neilisaac/luigi,humanlongevity/luigi,wakamori/luigi,percyfal/luigi,Dawny33/luigi,lichia/luigi,ViaSat/luigi,hadesbox/luigi,dylanjbarth/luigi,17zuoye/luigi,Houzz/luigi,PeteW/luigi,joeshaw/luigi,samepage-labs/luigi,meyerson/luigi,ChrisBeaumont/luigi,ehdr/luigi,LamCiuLoeng/luigi,ZhenxingWu/luigi,dylanjbarth/luigi,Magnetic/luigi,theoryno3/luigi,samepage-labs/luigi,dhruvg/luigi,theoryno3/luigi,mfcabrera/luigi,kevhill/luigi,h3biomed/luigi,DomainGroupOSS/luigi,17zuoye/luigi,fabriziodemaria/luigi,leafjungle/luigi,drincruz/luigi,javrasya/luigi,LamCiuLoeng/luigi,mbruggmann/luigi,cpcloud/luigi,dstandish/luigi,lichia/luigi,ViaSat/luigi,fw1121/luigi,dylanjbarth/luigi,hadesbox/luigi,bmaggard/luigi,ivannotes/luigi,oldpa/luigi,Tarrasch/luigi,lichia/luigi,soxofaan/luigi,tuulos/luigi,linsomniac/luigi,realgo/luigi,qpxu007/luigi,qpxu007/luigi,bmaggard/luigi,moritzschaefer/luigi,graingert/luigi,springcoil/luigi,foursquare/luigi,soxofaan/luigi,riga/luigi,springcoil/luigi,spotify/luigi,realgo/luigi,upworthy/luigi,ivannotes/luigi,SeedScientific/luigi,percyfal/luigi,republic-analytics/luigi,pkexcellent/luigi,ehdr/luigi,walkers-mv/luigi,thejens/luigi,humanlongevity/luigi,rizzatti/luigi,ChrisBeaumont/luigi,ThQ/luigi,drincruz/luigi,fabriziodemaria/luigi,samepage-labs/luigi,glenndmello/luigi,vine/luigi,republic-analytics/luigi,ContextLogic/luigi,mfcabrera/luigi,republic-analytics/luigi,jamesmcm/luigi,Wattpad/luigi,mbruggmann/luigi,ThQ/luigi,rizzatti/luigi,bowlofstew/luigi,hadesbox/luigi,linsomniac/luigi,stephenpascoe/luigi,dlstadther/luigi,belevtsoff/luigi,stroykova/luigi,upworthy/luigi,stroykova/luigi,moritzschaefer/luigi,casey-green/luigi,leafjungle/luigi,SkyTruth/luigi,casey-green/luigi,dlstadther/luigi,JackDanger/luigi,moandcompany/luigi,moandcompany/luigi,Houzz/luigi,ivannotes/luigi,lungetech/luigi,javrasya/luigi,Tarrasch/luigi,mfcabrera/luigi,DomainGroupOSS/luigi,spotify/luigi,humanlongevity/luigi,wakamori/luigi,linearregression/luigi,qpxu007/luigi,moandcompany/luigi,hellais/luigi,17zuoye/luigi,bowlofstew/luigi,riga/luigi,pkexcellent/luigi,altaf-ali/luigi,harveyxia/luigi,riga/luigi,h3biomed/luigi,samuell/luigi,ChrisBeaumont/luigi,kevhill/luigi,rayrrr/luigi,LamCiuLoeng/luigi,penelopy/luigi,sahitya-pavurala/luigi,aeron15/luigi,samuell/luigi,rizzatti/luigi,jw0201/luigi,SkyTruth/luigi,alkemics/luigi,Dawny33/luigi,bmaggard/luigi,linearregression/luigi,alkemics/luigi,SeedScientific/luigi,ThQ/luigi,sahitya-pavurala/luigi,jamesmcm/luigi,dkroy/luigi,oldpa/luigi,lungetech/luigi,Wattpad/luigi,dkroy/luigi,aeron15/luigi,gpoulin/luigi,thejens/luigi,Magnetic/luigi,JackDanger/luigi,belevtsoff/luigi,sahitya-pavurala/luigi,pkexcellent/luigi,casey-green/luigi,walkers-mv/luigi,Yoone/luigi,glenndmello/luigi,Magnetic/luigi,qpxu007/luigi,drincruz/luigi,Wattpad/luigi,hellais/luigi,ZhenxingWu/luigi,glenndmello/luigi,huiyi1990/luigi,linsomniac/luigi,edx/luigi,aeron15/luigi,adaitche/luigi,fw1121/luigi,thejens/luigi,meyerson/luigi,fw1121/luigi,SeedScientific/luigi,kevhill/luigi,fabriziodemaria/luigi,harveyxia/luigi,foursquare/luigi,percyfal/luigi,laserson/luigi,leafjungle/luigi,tuulos/luigi,drincruz/luigi,bowlofstew/luigi,oldpa/luigi,spotify/luigi,tuulos/luigi,joeshaw/luigi,gpoulin/luigi,pkexcellent/luigi,gpoulin/luigi,Yoone/luigi,adaitche/luigi,huiyi1990/luigi,ThQ/luigi,torypages/luigi,hellais/luigi,stephenpascoe/luigi,edx/luigi,cpcloud/luigi,ContextLogic/luigi,javrasya/luigi,realgo/luigi,mbruggmann/luigi,Houzz/luigi,alkemics/luigi,Yoone/luigi | test/_mysqldb_test.py | test/_mysqldb_test.py | import mysql.connector
from luigi.contrib.mysqldb import MySqlTarget
import unittest
host = 'localhost'
port = 3306
database = 'luigi_test'
username = None
password = None
table_updates = 'table_updates'
def _create_test_database():
con = mysql.connector.connect(user=username,
password=password,
host=host,
port=port,
autocommit=True)
con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database)
_create_test_database()
target = MySqlTarget(host, database, username, password, '', 'update_id')
class MySqlTargetTest(unittest.TestCase):
def test_touch_and_exists(self):
drop()
self.assertFalse(target.exists(),
'Target should not exist before touching it')
target.touch()
self.assertTrue(target.exists(),
'Target should exist after touching it')
def drop():
con = target.connect(autocommit=True)
con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
| '''
$ mysql
Welcome to the MySQL monitor. Commands end with ; or \g.
Your MySQL connection id is 211
Server version: 5.6.15 Homebrew
Copyright (c) 2000, 2013, Oracle and/or its affiliates. All rights reserved.
Oracle is a registered trademark of Oracle Corporation and/or its
affiliates. Other names may be trademarks of their respective
owners.
Type 'help;' or '\h' for help. Type '\c' to clear the current input statement.
mysql> create database luigi;
Query OK, 1 row affected (0.00 sec)
'''
import mysql.connector
from luigi.contrib.mysqldb import MySqlTarget
import unittest
host = 'localhost'
port = 3306
database = 'luigi_test'
username = None
password = None
table_updates = 'table_updates'
def _create_test_database():
con = mysql.connector.connect(user=username,
password=password,
host=host,
port=port,
autocommit=True)
con.cursor().execute('CREATE DATABASE IF NOT EXISTS %s' % database)
_create_test_database()
target = MySqlTarget(host, database, username, password, '', 'update_id')
class MySqlTargetTest(unittest.TestCase):
def test_touch_and_exists(self):
drop()
self.assertFalse(target.exists(),
'Target should not exist before touching it')
target.touch()
self.assertTrue(target.exists(),
'Target should exist after touching it')
def drop():
con = target.connect(autocommit=True)
con.cursor().execute('DROP TABLE IF EXISTS %s' % table_updates)
| apache-2.0 | Python |
ec7605f522268fe008a1438433f986e9e5243d35 | Update TODO for periodogram. | cournape/talkbox,cournape/talkbox | scikits/talkbox/spectral/basic.py | scikits/talkbox/spectral/basic.py | import numpy as np
from scipy.fftpack import fft, ifft
def periodogram(x, nfft=None, fs=1):
"""Compute the periodogram of the given signal, with the given fft size.
Parameters
----------
x : array-like
input signal
nfft : int
size of the fft to compute the periodogram. If None (default), the
length of the signal is used. if nfft > n, the signal is 0 padded.
fs : float
Sampling rate. By default, is 1 (normalized frequency. e.g. 0.5 is the
Nyquist limit).
Returns
-------
pxx : array-like
The psd estimate.
fgrid : array-like
Frequency grid over which the periodogram was estimated.
Notes
-----
Only real signals supported for now.
Returns the one-sided version of the periodogram.
Discrepency with matlab: matlab compute the psd in unit of power / radian /
sample, and we compute the psd in unit of power / sample: to get the same
result as matlab, just multiply the result from talkbox by 2pi"""
# TODO: this is basic to the point of being useless:
# - support Daniel smoothing
# - support windowing
# - trend/mean handling
# - one-sided vs two-sided
# - plot
# - support complex input
x = np.atleast_1d(x)
n = x.size
if x.ndim > 1:
raise ValueError("Only rank 1 input supported for now.")
if not np.isrealobj(x):
raise ValueError("Only real input supported for now.")
if not nfft:
nfft = n
if nfft < n:
raise ValueError("nfft < signal size not supported yet")
pxx = np.abs(fft(x, nfft)) ** 2
if nfft % 2 == 0:
pn = nfft / 2 + 1
else:
pn = (nfft + 1 )/ 2
fgrid = np.linspace(0, fs * 0.5, pn)
return pxx[:pn] / (n * fs), fgrid
| import numpy as np
from scipy.fftpack import fft, ifft
def periodogram(x, nfft=None, fs=1):
"""Compute the periodogram of the given signal, with the given fft size.
Parameters
----------
x: array-like
input signal
nfft: int
size of the fft to compute the periodogram. If None (default), the
length of the signal is used. if nfft > n, the signal is 0 padded.
fs: float
Sampling rate. By default, is 1 (normalized frequency. e.g. 0.5 is the
Nyquist limit).
Returns
-------
pxx: array-like
The psd estimate.
fgrid: array-like
Frequency grid over which the periodogram was estimated.
Notes
-----
Only real signals supported for now.
Returns the one-sided version of the periodogram.
Discrepency with matlab: matlab compute the psd in unit of power / radian /
sample, and we compute the psd in unit of power / sample: to get the same
result as matlab, just multiply the result from talkbox by 2pi"""
# TODO: this is basic to the point of being useless:
# - support windowing
# - normalization/frequency unit + definition
# - one-sided vs two-sided
# - plot
# - support complex input
# - trend/mean handling
x = np.atleast_1d(x)
n = x.size
if x.ndim > 1:
raise ValueError("Only rank 1 input supported for now.")
if not np.isrealobj(x):
raise ValueError("Only real input supported for now.")
if not nfft:
nfft = n
if nfft < n:
raise ValueError("nfft < signal size not supported yet")
pxx = np.abs(fft(x, nfft)) ** 2
if nfft % 2 == 0:
pn = nfft / 2 + 1
else:
pn = (nfft + 1 )/ 2
fgrid = np.linspace(0, fs * 0.5, pn)
return pxx[:pn] / (n * fs), fgrid
| mit | Python |
5f4fdb23767f1cc04dc133497b866dfa9feeb7f9 | fix pep8 issue | raygomez/python-exercise-4 | exercise4-5.py | exercise4-5.py | from __future__ import print_function
__author__ = 'ragomez'
class MultiplesOf7(object):
def __init__(self, n):
self.n = n
self.num = 0
def next(self):
self.num += 7
if self.num < self.n:
return self.num
else:
raise StopIteration
def __iter__(self):
return self
limit = 100
for number in MultiplesOf7(limit):
print(number)
| from __future__ import print_function
__author__ = 'ragomez'
class MultiplesOf7(object):
def __init__(self, n):
self.n = n
self.num = 0
def next(self):
self.num += 7
if self.num < self.n:
return self.num
else:
raise StopIteration
def __iter__(self):
return self
limit = 100
for number in MultiplesOf7(limit):
print(number) | mit | Python |
763f67c0a1099aacf0346ad8e7b9cd9be6cf4ccd | Update headers for Roboconf | aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments,aswinpj/Pygments | pygments/lexers/roboconf.py | pygments/lexers/roboconf.py | # -*- coding: utf-8 -*-
"""
pygments.lexers.roboconf
~~~~~~~~~~~~~~~~~~~~~~~~
Lexers for Roboconf DSL.
:copyright: Copyright 2006-2015 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, bygroups, re, include
from pygments.token import *
__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
class RoboconfGraphLexer(RegexLexer):
"""
Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ graph files.
.. versadded:: 2.1
"""
name = 'Roboconf Graph'
aliases = ['roboconf-graph']
filenames = ['*.graph']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
# Skip white spaces
(r'\s+', Text),
# There is one operator
(r'=',Operator),
# Keywords
(words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
(words(('installer', 'extends', 'exports', 'imports', 'facets', 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
# Comments
(r'#.*\n', Comment),
# Default
(r'[^#]', Text),
(r'.*\n', Text)
]
}
class RoboconfInstancesLexer(RegexLexer):
"""
Lexer for `Roboconf <http://roboconf.net/en/roboconf.html>`_ instances files.
.. versadded:: 2.1
"""
name = 'Roboconf Instances'
aliases = ['roboconf-instances']
filenames = ['*.instances']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
# Skip white spaces
(r'\s+', Text),
# Keywords
(words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
(words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
(r'\s*[\w.-]+\s*:', Name),
# Comments
(r'#.*\n', Comment),
# Default
(r'[^#]', Text),
(r'.*\n', Text)
]
}
| from pygments.lexer import RegexLexer, words, bygroups, re, include
from pygments.token import *
__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
class RoboconfGraphLexer(RegexLexer):
name = 'Roboconf Graph'
aliases = ['roboconf-graph']
filenames = ['*.graph']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
# Skip white spaces
(r'\s+', Text),
# There is one operator
(r'=',Operator),
# Keywords
(words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
(words(('installer', 'extends', 'exports', 'imports', 'facets', 'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
# Comments
(r'#.*\n', Comment),
# Default
(r'[^#]', Text),
(r'.*\n', Text)
]
}
class RoboconfInstancesLexer(RegexLexer):
name = 'Roboconf Instances'
aliases = ['roboconf-instances']
filenames = ['*.instances']
flags = re.IGNORECASE | re.MULTILINE
tokens = {
'root': [
# Skip white spaces
(r'\s+', Text),
# Keywords
(words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
(words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
(r'\s*[\w.-]+\s*:', Name),
# Comments
(r'#.*\n', Comment),
# Default
(r'[^#]', Text),
(r'.*\n', Text)
]
}
| bsd-2-clause | Python |
905cd14dfa64f1a0aa0674fe064dd0cc6986692f | Update experiServe.py | ChristinaHammer/Client_Database | experiServe.py | experiServe.py | """experiServe.py
Developer: Noelle Todd
Last visit: July 15, 2014
This file tests the creation of a simple Python server,
printing to a web page, html forms, and basic I/O.
"""
import cgi
from wsgiref.simple_server import make_server
from io import
def get_form_values(post_str): pass
#This function retrieves the information submitted to the form.
#form_values = {item.split("=")[0]: item.split[1] for item in post_str.split("&")}
#return form_values
def hullo_world_app(environ, start_response):
output = StringIO()
status = "200 OK" #HTTP status
headers = [("Content-type", "text/html; charset=utf-8")]
start_response(status, headers)
#checks for POST instead of GET (more reliable)
if environ['REQUEST_METHOD'] == 'POST':
size = int(environ['CONTENT_LENGTH'])
post_str = environ['wsgi.input'].read(size)
print(post_str, "<p>", file=output)
print("""
<html>
<head>
<title>Provisions Database</title>
</head>
<body>
<table width="100%" border="0" cellpadding="10" cellspacing="5"
background=#f5f5dc>
<tr> <!--means table row-->
<td colspan="2" bgcolor="#deb887">
<h1><center>Provisions Food Pantry</center></h1>
</td> <!--td means table cell-->
</tr>
<tr valign="top">
<!--cell one-->
<td bgcolor="#fafad2" width="30%">
This will hold the
select box, new client
button, and search bar.
</td>
<!--cell two-->
<td bgcolor="#f0f8ff" width="70%">
<form method="POST">
<h3>Visitor Information</h3>
First name: <input type="text" name="first" size=16/>
Last name: <input type="text" name="last" size=16/>
Phone: <input type="text" name="phone" size=16/>
<br>
DOB:
<input type="int" name="month" size=2 maxlength=2/>
<input type="text" name="day" size=2 maxlength=2/>
<input type="text" name="year" size=4 maxlength=4/>
Age: ?
<hr/>
<h3>Household Information</h3>
Street: <input type="text" name="street"/>
Apt: <input type="text" name="apt" size=3/>
<br>
City: <input type="text" name="city" value="Troy" size=16/>
State: <input type="text" name="state" value="NY" size=2 maxlength=2/>
Zip: <input type="int" name="zip" size=5/>
<hr/>
<h3>Visit Information</h3>
date of last visit, etc.
<hr/>
<h3>Household Members</h3>
selection box thing?
<input type="submit" value="submit" />
</form>
</td>
</tr>
</table>
</body>
</html> """, file=output)
value = output.getvalue()
byt = bytes(value, "ascii")
return [byt]
httpd = make_server('', 8000, hullo_world_app)
print("Serving on port 8000...")
httpd.serve_forever()
| """experiServe.py
This file tests the creation of a simple Python server,
printing to a web page, html forms, and basic I/O.
"""
import cgi
from wsgiref.simple_server import make_server
from io import
def get_form_values(post_str): pass
#This function retrieves the information submitted to the form.
#form_values = {item.split("=")[0]: item.split[1] for item in post_str.split("&")}
#return form_values
def hullo_world_app(environ, start_response):
output = StringIO()
status = "200 OK" #HTTP status
headers = [("Content-type", "text/html; charset=utf-8")]
start_response(status, headers)
#Create title
print("<title>Provisions Database</title>", file=output)
print("<h1>Hullo, World!</h1>", file=output)
#checks for POST instead of GET (more reliable)
if environ['REQUEST_METHOD'] == 'POST':
size = int(environ['CONTENT_LENGTH'])
post_str = environ['wsgi.input'].read(size)
print(post_str, "<p>", file=output)
print('<form action="/cgi-bin/hello_post.cgi" method="POST">'
'First name: <input type="text" name="first_name" />'
'<br>'
'Last name: <input type="text" name="last_name" />'
'<input type="submit" value="submit" />'
'</form>',
file=output)
value = output.getvalue()
byt = bytes(value, "ascii")
return [byt]
httpd = make_server('', 8000, hullo_world_app)
print("Serving on port 8000...")
httpd.serve_forever()
| mit | Python |
8f80ac1f6331661932a552b38d0b4377b3cd3408 | add support for custom types | lnmds/jose | ext/playing.py | ext/playing.py | import logging
import json
import asyncio
import random
import discord
from discord.ext import commands
from .common import Cog
MINUTE = 60
PL_MIN = 3 * MINUTE
PL_MAX = 10 * MINUTE
log = logging.getLogger(__name__)
class PlayingStatus(Cog):
"""Playing status shit"""
def __init__(self, bot):
super().__init__(bot)
self.rotate_task = None
self.phrases = json.load(open('./playing_status.json', 'r'))
async def on_ready(self):
# don't fuck up
if self.rotate_task is not None:
return
self.rotate_task = self.bot.loop.create_task(self.rotate_loop())
async def rotate(self):
"""Get a random playing status and use it"""
msg = random.choice(self.phrases)
g_type = 0
if isinstance(msg, list):
g_type, msg = msg
fmt = f'{msg} | v{self.JOSE_VERSION} | {self.bot.config.prefix}help'
log.info('game: type=%d v=%r', g_type, fmt)
await self.bot.change_presence(game=discord.Game(type=g_type,
name=fmt))
async def rotate_loop(self):
try:
while True:
await self.rotate()
await asyncio.sleep(random.randint(PL_MIN, PL_MAX))
except asyncio.CancelledError:
pass
@commands.command(name='rotate')
@commands.is_owner()
async def _rotate(self, ctx):
"""Rotate playing status"""
await self.rotate()
await ctx.send('done!')
def setup(bot):
bot.add_cog(PlayingStatus(bot))
| import logging
import json
import asyncio
import random
import discord
from discord.ext import commands
from .common import Cog
MINUTE = 60
PL_MIN = 3 * MINUTE
PL_MAX = 10 * MINUTE
log = logging.getLogger(__name__)
class PlayingStatus(Cog):
"""Playing status shit"""
def __init__(self, bot):
super().__init__(bot)
self.rotate_task = None
self.phrases = json.load(open('./playing_status.json', 'r'))
async def on_ready(self):
# don't fuck up
if self.rotate_task is not None:
return
self.rotate_task = self.bot.loop.create_task(self.rotate_loop())
async def rotate(self):
"""Get a random playing status and use it"""
msg = random.choice(self.phrases)
fmt = f'{msg} | v{self.JOSE_VERSION} | {self.bot.config.prefix}help'
log.info('Setting playing to %r', fmt)
await self.bot.change_presence(game=discord.Game(name=fmt))
async def rotate_loop(self):
try:
while True:
await self.rotate()
await asyncio.sleep(random.randint(PL_MIN, PL_MAX))
except asyncio.CancelledError:
pass
@commands.command(name='rotate')
@commands.is_owner()
async def _rotate(self, ctx):
"""Rotate playing status"""
await self.rotate()
await ctx.send('done!')
def setup(bot):
bot.add_cog(PlayingStatus(bot))
| mit | Python |
bdc674762536eee22d2c8c01ebfc1d98f2d46013 | Add a test for SAM checker | jackstanek/BotBot,jackstanek/BotBot | tests/test_checker.py | tests/test_checker.py | import pytest
import os, stat
from botbot import checker, problems, checks
# Tests for Checker class methods
def test_checker_register_accept_single_function():
c = checker.Checker()
c.register(lambda: print("Hello world!"))
assert len(c.checks) == 1
def test_checker_register_accept_function_list():
c = checker.Checker()
# Function list
f = list()
f.append(lambda : print("Hello world!"))
f.append(lambda i : i + i)
c.register(f)
# Tests for checking functions
def test_symlink_checker_same_directory(tmpdir):
prev = tmpdir.chdir()
f = tmpdir.join('file.txt')
f.write('')
os.symlink(f.basename, 'link')
assert not checker.is_link(f.basename)
assert checker.is_link('link')
prev.chdir()
def test_symlink_checker_link_in_lower_directory(tmpdir):
prev = tmpdir.chdir()
f = tmpdir.join('file.txt')
f.write('')
os.mkdir('newdir')
os.symlink(f.basename, os.path.join('newdir', 'link'))
assert checker.is_link(os.path.join('newdir', 'link'))
assert not checker.is_link(f.basename)
prev.chdir()
def test_is_fastq(tmpdir):
prev = tmpdir.chdir()
bad = tmpdir.join('bad.fastq')
bad.write('')
os.symlink(bad.basename, 'good.fastq')
assert checks.is_fastq('bad.fastq') == problems.PROB_FILE_IS_FASTQ
assert checks.is_fastq('good.fastq') == problems.PROB_NO_PROBLEM
def test_sam_detection(tmpdir):
prev = tmpdir.chdir()
bad = tmpdir.join('bad.sam')
bad.write('')
# Check raw file
assert checks.sam_should_compress('bad.sam') == problems.PROB_SAM_SHOULD_COMPRESS
bam = tmpdir.join('bad.bam')
bam.write('')
assert checks.sam_should_compress('bad.sam') == problems.PROB_SAM_AND_BAM_EXIST
prev.chdir()
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checks.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
| import pytest
import os, stat
from botbot import checker, problems, checks
# Tests for Checker class methods
def test_checker_register_accept_single_function():
c = checker.Checker()
c.register(lambda: print("Hello world!"))
assert len(c.checks) == 1
def test_checker_register_accept_function_list():
c = checker.Checker()
# Function list
f = list()
f.append(lambda : print("Hello world!"))
f.append(lambda i : i + i)
c.register(f)
# Tests for checking functions
def test_symlink_checker_same_directory(tmpdir):
prev = tmpdir.chdir()
f = tmpdir.join('file.txt')
f.write('')
os.symlink(f.basename, 'link')
assert not checker.is_link(f.basename)
assert checker.is_link('link')
prev.chdir()
def test_symlink_checker_link_in_lower_directory(tmpdir):
prev = tmpdir.chdir()
f = tmpdir.join('file.txt')
f.write('')
os.mkdir('newdir')
os.symlink(f.basename, os.path.join('newdir', 'link'))
assert checker.is_link(os.path.join('newdir', 'link'))
assert not checker.is_link(f.basename)
prev.chdir()
def test_is_fastq(tmpdir):
prev = tmpdir.chdir()
bad = tmpdir.join('bad.fastq')
os.symlink(bad.basename, 'good.fastq')
assert checks.is_fastq('bad.fastq') == problems.PROB_FILE_IS_FASTQ
assert checks.is_fastq('good.fastq') == problems.PROB_NO_PROBLEM
def test_permission_checker(tmpdir):
# Create a test file
p = tmpdir.join("bad_permissions.txt")
p.write('')
prev = tmpdir.chdir()
# Change its permissions a bunch... maybe this is too expensive?
for m in range(0o300, 0o700, 0o010):
p.chmod(m)
prob = checks.has_permission_issues(os.path.abspath(p.basename))
if not bool(0o040 & m): # octal Unix permission for 'group readable'
assert prob == problems.PROB_FILE_NOT_GRPRD
else:
assert prob == problems.PROB_NO_PROBLEM
prev.chdir()
| mit | Python |
ca1489e1fca85f52a53fc1bd1f69938879752598 | test incorrrect data file intput | chfw/moban,chfw/moban | tests/test_context.py | tests/test_context.py | import os
from nose.tools import eq_
from moban.plugins import Context
def test_context():
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("simple.yaml")
eq_(data["simple"], "yaml")
def test_environ_variables():
test_var = "TEST_ENVIRONMENT_VARIABLE"
test_value = "am I found"
os.environ[test_var] = test_value
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("simple.yaml")
eq_(data[test_var], test_value)
def test_json_data_overrides_environ_variables():
test_var = "TEST_ENVIRONMENT_VARIABLE"
test_value = "am I found"
os.environ[test_var] = test_value
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("simple.json")
eq_(data[test_var], test_value)
def test_unknown_data_file():
test_var = "TEST_ENVIRONMENT_VARIABLE"
test_value = "am I found"
os.environ[test_var] = test_value
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("unknown.data")
eq_(data[test_var], test_value)
| import os
from nose.tools import eq_
from moban.plugins import Context
def test_context():
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("simple.yaml")
eq_(data["simple"], "yaml")
def test_environ_variables():
test_var = "TEST_ENVIRONMENT_VARIABLE"
test_value = "am I found"
os.environ[test_var] = test_value
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("simple.yaml")
eq_(data[test_var], test_value)
def test_json_data_overrides_environ_variables():
test_var = "TEST_ENVIRONMENT_VARIABLE"
test_value = "am I found"
os.environ[test_var] = test_value
context = Context(os.path.join("tests", "fixtures"))
data = context.get_data("simple.json")
eq_(data[test_var], test_value)
| mit | Python |
04310fb40af60a18ba3a9f15e26fae34c96b0fda | improve devices tests | TheGhouls/oct,TheGhouls/oct,karec/oct,TheGhouls/oct,karec/oct | tests/test_devices.py | tests/test_devices.py | import unittest
from multiprocessing import Process
from oct.core.devices import forwarder, streamer
from oct.utilities.run_device import start_device, run_device
class DummyArgs:
device = 'forwarder'
frontend = 0
backend = 0
class DevicesTest(unittest.TestCase):
def test_forwarder(self):
"""Should be able to start forwarder correctly
"""
p = Process(target=forwarder, args=(0, 0))
p.start()
p.join(timeout=1)
p.terminate()
def test_streamer(self):
"""Should be able to start streamer
"""
p = Process(target=streamer, args=(0, 0))
p.start()
p.join(timeout=1)
p.terminate()
def test_start_device_function(self):
"""Should be able to start device with start_device function
"""
p = Process(target=start_device, args=('streamer', 0, 0))
p.start()
p.join(timeout=1)
p.terminate()
def test_run_device_function(self):
"""Should be able start device with run_device function
"""
p = Process(target=start_device, args=('forwarder', 0, 0))
p.start()
p.join(timeout=1)
p.terminate()
| import unittest
from multiprocessing import Process
from oct.core.devices import forwarder, streamer
class DevicesTest(unittest.TestCase):
def test_forwarder(self):
"""Should be able to start forwarder correctly
"""
p = Process(target=forwarder, args=(0, 0))
p.start()
p.join(timeout=2)
p.terminate()
def test_streamer(self):
"""Should be able to start streamer
"""
p = Process(target=streamer, args=(0, 0))
p.start()
p.join(timeout=2)
p.terminate()
| mit | Python |
ceea1a28dc1b4d753c691cdb1884be4c5c992572 | Test file erros fixed | Parkayun/wsgit,acuros/wsgit | tests/test_environ.py | tests/test_environ.py | #-*-coding:utf8-*-
'''Tests for Environ object'''
import unittest
from wsgit.request import Environ
def environ(request_parameters={}, meta={}):
return Environ({'parameters':request_parameters, 'meta':meta}).get_dict()
class TestEnviron(unittest.TestCase):
def test_request_method(self):
self.assertEqual(environ({})['REQUEST_METHOD'], 'MOBILE')
def test_reqeust_uri(self):
self.assertEqual(environ({})['REQUEST_URI'], None)
self.assertEqual(environ({'url':'/'})['REQUEST_URI'], '/')
self.assertEqual(environ({'url':'/foo/bar/'})['REQUEST_URI'],
'/foo/bar/')
self.assertEqual(environ({'url': '/foo/bar/?foo=bar'})['REQUEST_URI'],
'/foo/bar/?foo=bar')
def test_path_info(self):
self.assertEqual(environ({}).get('PATH_INFO'), None)
self.assertEqual(environ({'url':'/'})['PATH_INFO'], '/')
self.assertEqual(environ({'url': '/foo/bar/?foo=bar'})['PATH_INFO'],
'/foo/bar/')
def test_query_string(self):
self.assertEqual(environ({}).get('QUERY_STRING'), None)
self.assertEqual(environ({'url':'/'}).get('QUERY_STRING'), '')
self.assertEqual(environ({'url': '/foo/bar/?foo=bar'})['QUERY_STRING'],
'foo=bar')
self.assertEqual(environ({'url': '/?foo=bar&foo2=bar2'})['QUERY_STRING'],
'foo=bar&foo2=bar2')
def test_remote_addr(self):
self.assertEqual(Environ({}).get_dict().get('REMOTE_ADDR'), None)
self.assertEqual(environ(meta={'ip':'127.0.0.1'})['REMOTE_ADDR'],
'127.0.0.1')
def test_remote_port(self):
self.assertEqual(Environ({}).get_dict().get('REMOTE_PORT'), None)
self.assertEqual(environ(meta={'port':10295})['REMOTE_PORT'], 10295)
| #-*-coding:utf8-*-
'''Tests for Environ object'''
import unittest
from wsgit.request import Environ
def environ(request_parameters, meta={}):
return Environ({'parameters':request_parameters, 'meta':meta}).get_dict()
class TestEnviron(unittest.TestCase):
def test_request_method(self):
self.assertEqual(environ({})['REQUEST_METHOD'], 'MOBILE')
def test_reqeust_uri(self):
self.assertEqual(environ({})['REQUEST_URI'], None)
self.assertEqual(environ({'uri':'/'})['REQUEST_URI'], '/')
self.assertEqual(environ({'uri':'/foo/bar/'})['REQUEST_URI'],
'/foo/bar/')
self.assertEqual(environ({'url': '/foo/bar/?foo=bar'}['REQUEST_URI'],
'/foo/bar/?foo=bar'))
def test_path_info(self):
self.assertEqual(environ({}).get('PATH_INFO'), None)
self.assertEqual(environ({'url':'/'})['PATH_INFO'], '/')
self.assertEqual(environ({'url': '/foo/bar/?foo=bar'})['PATH_INFO'],
'/foo/bar/')
self.assertEqual(environ({'url':'/foo//'})['PATH_INFO'], '/foo/')
def test_query_string(self):
self.assertEqual(environ({}).get('QUERY_STRING'), None)
self.assertEqual(environ({'url':'/'}).get('QUERY_STRING'), '')
self.assertEqual(environ({'url': '/foo/bar/?foo=bar'})['PATH_INFO'],
'foo=bar')
self.assertEqual(environ({'url': '/?foo=bar&foo2=bar2'})['PATH_INFO'],
'foo=bar&foo2=bar2')
def test_remote_addr(self):
self.assertEqual(Environ({}).get_dict().get('REMOTE_ADDR'), None)
self.assertEqual(environ(meta={'ip':'127.0.0.1'})['REMOTE_ADDR'],
'127.0.0.1')
def test_remote_port(self):
self.assertEqual(Environ({}).get_dict().get('REMOTE_PORT'), None)
self.assertEqual(environ(meta={'port':10295})['REMOTE_PORT'], '10295')
| mit | Python |
7e8e39290276bd6530e3e379bb3f47eaaf059b63 | Add loop to database connection. It blocks until mysql is up | p4u/projecte_frigos,p4u/projecte_frigos,p4u/projecte_frigos,p4u/projecte_frigos | database.py | database.py | import MySQLdb
CONFIG_FILE="/var/www/monitor/config.py"
class database(object):
def __init__(self):
config = {}
execfile(CONFIG_FILE,config)
while not self._connect(config): print("Retrying database connection")
def _connect(self,config):
try:
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
self.db.autocommit(True)
return True
except:
print("Cannot connect to MYSQL")
return False
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
| import MySQLdb
CONFIG_FILE="/var/www/monitor/config.py"
class database(object):
def __init__(self):
config = {}
execfile(CONFIG_FILE,config)
self.db = MySQLdb.connect(config["host"],config["user"],config["password"],config["database"])
self.db.autocommit(True)
def insert(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("insert into " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def update(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("update from " + txt)
dbc.close()
self.db.commit()
except Exception as e:
print(e)
return False
return True
def select(self,txt):
dbc = self.db.cursor()
try:
dbc.execute("select " + txt)
result = dbc.fetchall()
except Exception as e:
print(e)
result = None
dbc.close()
return result
| agpl-3.0 | Python |
27fc8b4bcc65a5d1fb63b1032a9a81de540daf88 | Update tests using examples from SVG spec | nvictus/svgpath2mpl | tests/test_parser.py | tests/test_parser.py | import matplotlib as mpl
import matplotlib.pyplot as plt
from svgpath2mpl import parse_path
PATH_DATA = {
'triangle01': {
'd': "M 100 100 L 300 100 L 200 300 z",
'fill': "red",
'stroke': "blue",
'stroke-width': 3,
},
'cubic01': {
'd': "M100,200 C100,100 250,100 250,200 S400,300 400,200",
'fill': "none",
'stroke': "red",
'stroke-width': 5,
},
'quad01a': {
'd': "M200,300 Q400,50 600,300 T1000,300",
'fill': "none",
'stroke': "red",
'stroke-width': 5,
},
'quad01b': {
'd': "M200,300 L400,50 L600,300 L800,550 L1000,300",
'fill': "none",
'stroke': "#888888",
'stroke-width': 2,
},
'arcs01a': {
'd': "M300,200 h-150 a150,150 0 1,0 150,-150 z",
'fill': "red",
'stroke': "blue",
'stroke-width': 5,
},
'arcs01b': {
'd': "M275,175 v-150 a150,150 0 0,0 -150,150 z",
'fill': "yellow",
'stroke': "blue",
'stroke-width': 5,
},
'arcs01c': {
'd': "M600,350 l 50,-25 a25,25 -30 0,1 50,-25 l 50,-25 "
"a25,50 -30 0,1 50,-25 l 50,-25 "
"a25,75 -30 0,1 50,-25 l 50,-25 "
"a25,100 -30 0,1 50,-25 l 50,-25",
'fill': "none",
'stroke': "red",
'stroke-width': 5,
},
'arcs02': {
'd': "M 125,75 a100,50 0 ?,? 100,50",
'fill': "none",
'stroke': "red",
'stroke-width': 6,
},
}
def test_parse_path():
d = "M300,200 h-150 a150,150 0 1,0 150,-150 z"
fill = "red"
stroke = "blue"
stroke_width = 5
path = parse_path(d)
patch = mpl.patches.PathPatch(path, facecolor=fill, edgecolor=stroke, linewidth=stroke_width)
fig = plt.figure(figsize=(12, 5.25))
ax = fig.add_subplot(111)
ax.add_patch(patch)
ax.set_aspect(1)
ax.set_xlim([0, 1200])
ax.set_ylim([0, 400])
| import matplotlib as mpl
import matplotlib.pyplot as plt
from svgpath2mpl import parse_path
d = "M300,200 h-150 a150,150 0 1,0 150,-150 z"
fill = "red"
stroke = "blue"
stroke_width = 5
def test_parse_path():
path = parse_path(d)
patch = mpl.patches.PathPatch(path, facecolor=fill, edgecolor=stroke, linewidth=stroke_width)
fig = plt.figure(figsize=(12, 5.25))
ax = fig.add_subplot(111)
ax.add_patch(patch)
ax.set_aspect(1)
ax.set_xlim([0, 1200])
ax.set_ylim([0, 400])
| bsd-3-clause | Python |
cc486de5b7d705f31f05e34e77e698fbec953d4f | change port | kevinburke/local-servers,kevinburke/local-servers | plist.py | plist.py | import argparse
import os
import subprocess
from jinja2 import Template
GODOC_DEFAULT_PORT = 6061
parser = argparse.ArgumentParser(description='Produce configurable plist files')
parser.add_argument('template', help='The location of the plist template')
parser.add_argument('--port', help='Which port the service should listen on')
current_directory = os.path.dirname(os.path.abspath(__file__))
args = parser.parse_args()
if args.template == "go":
port = args.port or GODOC_DEFAULT_PORT
template = Template(open('templates/godoc.plist.template').read())
try:
godoc_binary = subprocess.check_output(["which", "godoc"]).strip()
except subprocess.CalledProcessError as e:
print "\nCould not find godoc on path! Install godoc to use the godoc server\n"
raise
print template.render(godoc_binary=godoc_binary, godoc_port=port)
if args.template == "nginx":
try:
nginx_binary = subprocess.check_output(["which", "nginx"]).strip()
except subprocess.CalledProcessError as e:
print "\nCould not find godoc on path! Install godoc to use the godoc server\n"
raise
template = Template(open('templates/nginx.plist.template').read())
print template.render(nginx_binary=nginx_binary,
current_directory=current_directory)
if args.template == "ipython":
home = subprocess.check_output("echo $HOME", shell=True).strip()
template = Template(open('templates/ipython.plist.template').read())
print template.render(home=home,
current_directory=current_directory)
| import argparse
import os
import subprocess
from jinja2 import Template
GODOC_DEFAULT_PORT = 6060
parser = argparse.ArgumentParser(description='Produce configurable plist files')
parser.add_argument('template', help='The location of the plist template')
parser.add_argument('--port', help='Which port the service should listen on')
current_directory = os.path.dirname(os.path.abspath(__file__))
args = parser.parse_args()
if args.template == "go":
port = args.port or GODOC_DEFAULT_PORT
template = Template(open('templates/godoc.plist.template').read())
try:
godoc_binary = subprocess.check_output(["which", "godoc"]).strip()
except subprocess.CalledProcessError as e:
print "\nCould not find godoc on path! Install godoc to use the godoc server\n"
raise
print template.render(godoc_binary=godoc_binary, godoc_port=port)
if args.template == "nginx":
try:
nginx_binary = subprocess.check_output(["which", "nginx"]).strip()
except subprocess.CalledProcessError as e:
print "\nCould not find godoc on path! Install godoc to use the godoc server\n"
raise
template = Template(open('templates/nginx.plist.template').read())
print template.render(nginx_binary=nginx_binary,
current_directory=current_directory)
if args.template == "ipython":
home = subprocess.check_output("echo $HOME", shell=True).strip()
template = Template(open('templates/ipython.plist.template').read())
print template.render(home=home,
current_directory=current_directory)
| mit | Python |
bb3009a2f5ced069ffa8d3dec967d69bd7254483 | fix test_data_plugin to be more reliable | 20c/vodka,20c/vodka | tests/test_plugin.py | tests/test_plugin.py | import unittest
import time
import vodka.plugins
import vodka.data
import vodka.storage
import vodka
@vodka.plugin.register("test")
class PluginA(vodka.plugins.PluginBase):
pass
@vodka.plugin.register("timed_test")
class TimedPlugin(vodka.plugins.TimedPlugin):
def init(self):
self.counter = 0
def work(self):
self.counter += 1
@vodka.plugin.register("data_test")
class DataPlugin(vodka.plugins.DataPlugin):
def work(self):
data = {"data": [], "ts": time.time()}
return super().work(data)
class TestPlugin(unittest.TestCase):
def test_get_plugin_by_name(self):
expected = vodka.plugin.get_instance({"type": "test", "name": "a"})
plugin = vodka.plugins.get_plugin_by_name("a")
self.assertEqual(plugin, expected)
def test_get_plugin_class(self):
self.assertEqual(PluginA, vodka.plugins.get_plugin_class("test"))
class TestTimedPlugin(unittest.TestCase):
def test_run(self):
plugin = vodka.plugin.get_instance({"type": "timed_test", "interval": 0.01})
vodka.start(thread_workers=[plugin])
time.sleep(1)
plugin.stop()
self.assertGreater(plugin.counter, 1)
class TestDataPlugin(unittest.TestCase):
def test_run(self):
vodka.data.data_types.instantiate_from_config(
[
{
"type": "data_test",
"handlers": [{"type": "store", "container": "list", "limit": 10}],
}
]
)
plugin = vodka.plugin.get_instance(
{"type": "data_test", "interval": 0.01, "data": "data_test"}
)
vodka.start(thread_workers=[plugin])
time.sleep(1)
self.assertEqual(len(vodka.storage.get("data_test")), 10)
for item in vodka.storage.get("data_test"):
self.assertEqual("data" in item, True)
self.assertEqual("ts" in item, True)
| import unittest
import time
import vodka.plugins
import vodka.data
import vodka.storage
import vodka
@vodka.plugin.register("test")
class PluginA(vodka.plugins.PluginBase):
pass
@vodka.plugin.register("timed_test")
class TimedPlugin(vodka.plugins.TimedPlugin):
def init(self):
self.counter = 0
def work(self):
self.counter += 1
@vodka.plugin.register("data_test")
class DataPlugin(vodka.plugins.DataPlugin):
def work(self):
data = {"data": [], "ts": time.time()}
return super().work(data)
class TestPlugin(unittest.TestCase):
def test_get_plugin_by_name(self):
expected = vodka.plugin.get_instance({"type": "test", "name": "a"})
plugin = vodka.plugins.get_plugin_by_name("a")
self.assertEqual(plugin, expected)
def test_get_plugin_class(self):
self.assertEqual(PluginA, vodka.plugins.get_plugin_class("test"))
class TestTimedPlugin(unittest.TestCase):
def test_run(self):
plugin = vodka.plugin.get_instance({"type": "timed_test", "interval": 0.01})
vodka.start(thread_workers=[plugin])
time.sleep(1)
plugin.stop()
self.assertGreater(plugin.counter, 1)
class TestDataPlugin(unittest.TestCase):
def test_run(self):
vodka.data.data_types.instantiate_from_config(
[
{
"type": "data_test",
"handlers": [{"type": "store", "container": "list", "limit": 10}],
}
]
)
plugin = vodka.plugin.get_instance(
{"type": "data_test", "interval": 0.01, "data": "data_test"}
)
vodka.start(thread_workers=[plugin])
time.sleep(0.3)
self.assertEqual(len(vodka.storage.get("data_test")), 10)
for item in vodka.storage.get("data_test"):
self.assertEqual("data" in item, True)
self.assertEqual("ts" in item, True)
| apache-2.0 | Python |
66b1c6ff5acde60fd40c7832786abb38ff40a6fe | use more idiomatic IDAPython APIs for enum Segments | williballenthin/python-idb | tests/test_issue29.py | tests/test_issue29.py | import os.path
import idb
def test_issue29():
'''
demonstrate GetManyBytes can retrieve the entire .text section
see github issue #29 for the backstory.
'''
cd = os.path.dirname(__file__)
idbpath = os.path.join(cd, 'data', 'issue29', 'issue29.i64')
with idb.from_file(idbpath) as db:
api = idb.IDAPython(db)
seg = api.idc.FirstSeg()
while seg:
name = api.idc.SegName(seg)
start = api.idc.SegStart(seg)
end = api.idc.SegEnd(seg)
if name == '.text':
# should not fail at address 0x180072200
textBytes = api.idc.GetManyBytes(start, end - start)
assert len(textBytes) == end - start
seg = api.idc.NextSeg(seg)
| import os.path
import idb
def test_issue29():
'''
demonstrate GetManyBytes can retrieve the entire .text section
see github issue #29 for the backstory.
'''
cd = os.path.dirname(__file__)
idbpath = os.path.join(cd, 'data', 'issue29', 'issue29.i64')
with idb.from_file(idbpath) as db:
api = idb.IDAPython(db)
segments = idb.analysis.Segments(db).segments
segStrings = idb.analysis.SegStrings(db).strings
for seg in segments.values():
name = segStrings[seg.name_index]
segLen = seg.endEA - seg.startEA
if name == '.text':
# should not fail at address 0x180072200
textBytes = api.idc.GetManyBytes(seg.startEA, segLen)
assert len(textBytes) == segLen
break
| apache-2.0 | Python |
a48f651435d212907cb34164470a9028ba161300 | Add a test for vasp_raman.T | raman-sc/VASP,raman-sc/VASP | test/test_vasp_raman.py | test/test_vasp_raman.py | # -*- coding: utf-8 -*-
import os
import time
import unittest
import vasp_raman
class VaspRamanTester(unittest.TestCase):
def testT(self):
m = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
mref = [[1, 4, 7], [2, 5, 8], [3, 6, 9]]
mres = vasp_raman.T(m)
for i in range(len(m)):
self.assertSequenceEqual(mref[i], mres[i])
| # -*- coding: utf-8 -*-
import os
import time
import unittest
import vasp_raman
class VaspRamanTester(unittest.TestCase):
def testMAT_m_VEC(self):
self.assertTrue(False)
| mit | Python |
2866efdfffe802755a9acc624af1610349359cb3 | Enable a loop test | chrivers/pyjaco,chrivers/pyjaco,buchuki/pyjaco,mattpap/py2js,chrivers/pyjaco,qsnake/py2js,mattpap/py2js,buchuki/pyjaco,qsnake/py2js,buchuki/pyjaco | tests/test_run_js.py | tests/test_run_js.py | import os
from test_compile_js import (f1, f2, f3, f3b, f3c, f3d, f3e, f4, f5, ifs1,
ifs2, ifs3, ifs4, loop1, tuple1)
def test(func, run):
run_file = "/tmp/run.js"
defs = open("defs.js").read()
with open(run_file, "w") as f:
f.write(defs)
f.write("\n")
f.write(str(func))
f.write("\n")
f.write(run)
r = os.system('js -f %s' % run_file)
assert r == 0
test(f1, "assert(f1(3) == 3);")
test(f1, "assert(f1(3) != 4);")
test(f2, "assert(f2(3) == 8);")
test(f3, "assert(f3(3) == -1);")
test(f3b, "assert(f3b(3) == -1);")
#test(f3c, "assert(f3c(3) == -1);")
test(f3d, "assert(f3d(3) == 20);")
test(f3e, "assert(f3e(3) == 9);")
test(f4, "assert(f4(true) == 5);")
test(f4, "assert(f4(false) == 6);")
test(f5, "assert(f5(true) == 2);")
test(f5, "assert(f5(false) == 0);")
test(ifs1, "ifs1(1);")
test(ifs2, "ifs2(1);")
test(ifs3, "ifs3(1);")
test(ifs4, "ifs4(1);")
#test(tuple1, "assert(tuple1(3) == 12);")
test(loop1, "assert(loop1(4) == 6);")
| import os
from test_compile_js import (f1, f2, f3, f3b, f3c, f3d, f3e, f4, f5, ifs1,
ifs2, ifs3, ifs4, loop1, tuple1)
def test(func, run):
run_file = "/tmp/run.js"
defs = open("defs.js").read()
with open(run_file, "w") as f:
f.write(defs)
f.write("\n")
f.write(str(func))
f.write("\n")
f.write(run)
r = os.system('js -f %s' % run_file)
assert r == 0
test(f1, "assert(f1(3) == 3);")
test(f1, "assert(f1(3) != 4);")
test(f2, "assert(f2(3) == 8);")
test(f3, "assert(f3(3) == -1);")
test(f3b, "assert(f3b(3) == -1);")
#test(f3c, "assert(f3c(3) == -1);")
test(f3d, "assert(f3d(3) == 20);")
test(f3e, "assert(f3e(3) == 9);")
test(f4, "assert(f4(true) == 5);")
test(f4, "assert(f4(false) == 6);")
test(f5, "assert(f5(true) == 2);")
test(f5, "assert(f5(false) == 0);")
test(ifs1, "ifs1(1);")
test(ifs2, "ifs2(1);")
test(ifs3, "ifs3(1);")
test(ifs4, "ifs4(1);")
#test(tuple1, "assert(tuple1(3) == 12);")
#test(loop1, "assert(loop1(4) == 0);")
# this is the correct result:
#test(loop1, "assert(loop1(4) == 6);")
| mit | Python |
dc0c56445a40161484e30985e8baf43086088b48 | add import test and author check | yoon-gu/Mozart | tests/test_sample.py | tests/test_sample.py | import unittest
class TestStocMethods(unittest.TestCase):
def test_upper(self):
self.assertEqual('foo'.upper(), 'FOO')
def test_isupper(self):
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_test(self):
self.assertTrue(True)
def test_import(self):
import mozart as mz
self.assertTrue(True)
def test_authors(self):
import mozart as mz
authors = ('Yoon-gu Hwang <yz0624@gmail.com>', 'Dong-Wook Shin <dwshin.yonsei@gmail.com>', 'Ji-Yeon Suh <suh91919@naver.com>')
self.assertEqual(mz.__author__, authors) | import unittest
import mozart as mz
import numpy as np
class TestStocMethods(unittest.TestCase):
def test_upper(self):
self.assertEqual('foo'.upper(), 'FOO')
def test_isupper(self):
self.assertTrue('FOO'.isupper())
self.assertFalse('Foo'.isupper())
def test_test(self):
self.assertTrue(True) | mit | Python |
927e42b9230008b700a1cf63ee518c4217668f38 | Update test_get_music, includig test for one dimensional array. | DataSounds/DataSounds | tests/test_sounds.py | tests/test_sounds.py | #!/usr/bin/env python
import numpy as np
from DataSounds.sounds import build_scale, note_number, note_name, get_music
def test_build_scale_major():
scale = build_scale('C', 'major', 1)
assert scale == 'c d e f g a b'.split()
def test_build_scale_minor():
scale = build_scale('A', 'minor', 1)
assert scale == 'a b c d e f g'.split()
def test_build_scale_pentatonic():
scale = build_scale('C', 'pentatonic', 1)
assert scale == ['c', 'd', 'e', 'g', 'a']
def test_build_scale_pentatonic_two_octaves():
scale = build_scale('D', 'pentatonic', 2)
assert scale == ['d', 'e', 'f#', 'a', 'b', "d'", "e'", "f#'", "a'", "b'"]
def test_note_number():
scale = build_scale('C', 'major', 1)
assert all(note_number([1, 2, 3, 4], scale) == [0, 2, 4, 6])
def test_build_note_name():
scale = build_scale('C', 'major', 1)
notes = note_number([1, 2, 3, 4, np.nan], scale)
assert [note_name(x, scale) for x in notes] == ['c', 'e', 'g', 'b', 'r']
def test_get_music():
series = [np.random.rand(24).reshape(2, 12), np.random.rand(24)]
for item in series:
m = get_music(item, key='D', mode='pentatonic',
octaves=2, instruments=[0, 23])
# TODO: how to test this? m is a StringIO containing a MIDI file.
# Load the MIDI file and check what was generated?
| #!/usr/bin/env python
import numpy as np
from DataSounds.sounds import build_scale, note_number, note_name, get_music
def test_build_scale_major():
scale = build_scale('C', 'major', 1)
assert scale == 'c d e f g a b'.split()
def test_build_scale_minor():
scale = build_scale('A', 'minor', 1)
assert scale == 'a b c d e f g'.split()
def test_build_scale_pentatonic():
scale = build_scale('C', 'pentatonic', 1)
assert scale == ['c', 'd', 'e', 'g', 'a']
def test_build_scale_pentatonic_two_octaves():
scale = build_scale('D', 'pentatonic', 2)
assert scale == ['d', 'e', 'f#', 'a', 'b', "d'", "e'", "f#'", "a'", "b'"]
def test_note_number():
scale = build_scale('C', 'major', 1)
assert all(note_number([1, 2, 3, 4], scale) == [0, 2, 4, 6])
def test_build_note_name():
scale = build_scale('C', 'major', 1)
notes = note_number([1, 2, 3, 4, np.nan], scale)
assert [note_name(x, scale) for x in notes] == ['c', 'e', 'g', 'b', 'r']
def test_get_music():
series = np.random.rand(24).reshape(2, 12)
m = get_music(series, key='D', mode='pentatonic',
octaves=2, instruments=[0, 23])
# TODO: how to test this? m is a StringIO containing a MIDI file.
# Load the MIDI file and check what was generated?
| bsd-3-clause | Python |
e082a8ba7509d4bb1988c0d1a5778cbd74211b15 | Add log | liupeirong/azure-quickstart-templates,pateixei/azure-quickstart-templates,hlmstone/stone-china-azure-quickstart-templates,sidkri/azure-quickstart-templates,cr0550ver/azure-quickstart-templates,philon-msft/azure-quickstart-templates,cerdmann-pivotal/azure-quickstart-templates,hrboyceiii/azure-quickstart-templates,matheusbertuco/azure-quickstart-templates,mumian/azure-quickstart-templates,Teodelas/azure-quickstart-templates,Kegeruneku/azure-quickstart-templates,tagliateller/azure-quickstart-templates,arsenvlad/azure-quickstart-templates,CharlPels/azure-quickstart-templates,jmspring/azure-quickstart-templates,YidingZhou/azure-quickstart-templates,JF6/azure-quickstart-templates,ishtein/azure-public,jumbucks/azure-quickstart-templates,willhighland/azure-quickstart-templates,xtremedata/azure-quickstart-templates,jimdial/azure-quickstart-templates,SunBuild/azure-quickstart-templates,iouri-s/azure-quickstart-templates,SunBuild/azure-quickstart-templates,akurmi/azure-quickstart-templates,rkotti/azure-quickstart-templates,ninarn/azure-quickstart-templates,tibor19/azure-quickstart-templates,ishtein/azure-public,MCKLMT/azure-quickstart-templates,jmservera/azure-quickstart-templates,251744647/azure-quickstart-templates,SvenLauterbach/azure-quickstart-templates,cerdmann-pivotal/azure-quickstart-templates,zechariahks/azure-quickstart-templates,spcrux/azure-quickstart-templates,jimdial/azure-quickstart-templates,zhongyi-zhang/azure-quickstart-templates,benofben/azure-quickstart-templates,andrewelizondo/azure-quickstart-templates,ned1313/azure-quickstart-templates,rkotti/azure-quickstart-templates,rihannin/azure-template,zuojihong/azure-quickstart-templates,dipakmsft/azure-quickstart-templates,Thorlandus/azure-quickstart-templates,zhongyi-zhang/azure-quickstart-templates,m1028639/azure-quickstart-templates,VIJAYDUSI/azure-quickstart-templates,neudesic/azure-quickstart-templates,rivierni/azure-quickstart-templates,sbtron/azure-quickstart-templates,MSBrett/azure-quickstart-templates,arroyc/azure-quickstart-templates,sabbour/azure-quickstart-templates,singhkays/azure-quickstart-templates,jreid143/azure-quickstart-templates,saschagottfried/azure-quickstart-templates,iouri-s/azure-quickstart-templates,hlmstone/stone-china-azure-quickstart-templates,AsraFatima/XenApp-Marketplace,sebastus/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,iwooden/azure-quickstart-templates,zhongyi-zhang/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,introp-software/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,smithab/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,stepsic-microsoft-com/azure-quickstart-templates,maniSbindra/azure-quickstart-templates,satyarapelly/azure-quickstart-templates,moisedo/azure-quickstart-templates,vglafirov/azure-quickstart-templates,grwilson/azure-quickstart-templates,transcanada/azure-quickstart-templates,JF6/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,iwooden/azure-quickstart-templates,iouri-s/azure-quickstart-templates,gbowerman/azure-quickstart-templates,hlmstone/stone-china-azure-quickstart-templates,lukehoban/azure-quickstart-templates,scrypter/azure-quickstart-templates,hongbincao/azure-quickstart-templates,m1028639/azure-quickstart-templates,timfpark/azure-quickstart-templates,chenriksson/azure-quickstart-templates,forensiclogic/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,slapointe/azure-quickstart-templates,ritazh/azure-quickstart-templates,Undo1/azure-quickstart-templates,eissi/azure-quickstart-templates,sgoings/azure-quickstart-templates,svk2/azure-quickstart-templates,ttmc/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,stevenlivz/azure-quickstart-templates,eosiowy/azure-quickstart-templates,arsenvlad/azure-quickstart-templates,stepsic-microsoft-com/azure-quickstart-templates,johndowns/azure-quickstart-templates,asheniam/azure-quickstart-templates,irwins/azure-quickstart-templates,tibor19/azure-quickstart-templates,tibor19/azure-quickstart-templates,shzhai/azure-quickstart-templates,mukulkgupta/azure-quickstart-templates,nzthiago/azure-quickstart-templates,telmosampaio/azure-quickstart-templates,MSBrett/azure-quickstart-templates,eshaparmar/azure-quickstart-templates,travismc1/azure-quickstart-templates,stevenlivz/azure-quickstart-templates,willhighland/azure-quickstart-templates,cerdmann-pivotal/azure-quickstart-templates,SunBuild/azure-quickstart-templates,bdanse/azure-quickstart-templates,andrewelizondo/azure-quickstart-templates,sazeesha/azure-quickstart-templates,samhodgkinson/azure-quickstart-templates,jmspring/azure-quickstart-templates,iamshital/azure-quickstart-templates,ALM-Rangers/azure-quickstart-templates,olandese/azure-quickstart-templates,marleyg/azure-quickstart-templates,apachipa/Azure-JSON-Custom,tibor19/azure-quickstart-templates,philon-msft/azure-quickstart-templates,emondek/azure-quickstart-templates,pelagos/azure-quickstart-templates,CharlPels/azure-quickstart-templates,Thorlandus/azure-quickstart-templates,bingosummer/azure-quickstart-templates,tagliateller/azure-quickstart-templates,devopsteamberlin/azure-quickstart-templates,ninarn/azure-quickstart-templates,sedouard/azure-quickstart-templates,uday31in/azure-quickstart-templates,rkotti/azure-quickstart-templates,iwooden/azure-quickstart-templates,jasonbw/azure-quickstart-templates,ExchMaster/azure-quickstart-templates,samhodgkinson/azure-quickstart-templates,netwmr01/azure-quickstart-templates,olandese/azure-quickstart-templates,transcanada/azure-quickstart-templates,jmservera/azure-quickstart-templates,benjal/azure-quickstart-templates,jmspring/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,benjal/azure-quickstart-templates,marleyg/azure-quickstart-templates,kotzenjh/DCOS-JSON,Teodelas/azure-quickstart-templates,puneetsaraswat/azure-quickstart-templates,kirpasingh/azure-quickstart-templates,klondon71/azure-quickstart-templates,evertonmc/azure-quickstart-templates,hglkrijger/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,anthony-murphy/azure-quickstart-templates,simongdavies/azure-quickstart-templates,singhkay/azure-quickstart-templates,ChackDan/azure-quickstart-templates,kenazk/azure-quickstart-templates,AsraFatima/azure-quickstart-templates,rgardler/azure-quickstart-templates,jamesdixon/azure-quickstart-templates,mukulkgupta/azure-quickstart-templates,gatneil/azure-quickstart-templates,VIJAYDUSI/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,adhurwit/azure-quickstart-templates,puneetsaraswat/azure-quickstart-templates,marleyg/azure-quickstart-templates,robklausems/azure-quickstart-templates,MahendraAgrawal/azure-quickstart-templates,iamshital/azure-quickstart-templates,anhowe/azure-quickstart-templates,pelagos/azure-quickstart-templates,sedouard/azure-quickstart-templates,apachipa/Azure-JSON-Custom,haritshah33/azuretemplates,robotechredmond/azure-quickstart-templates,seanbamsft/azure-quickstart-templates,madhana/azure-quickstart-templates,neudesic/azure-quickstart-templates,moisedo/azure-quickstart-templates,steved0x/azure-quickstart-templates,artemharutyunyan/azure-quickstart-templates,slapointe/azure-quickstart-templates,RuudBorst/azure-quickstart-templates,rgardler/azure-quickstart-templates,maniSbindra/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,singhkays/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,MSBrett/azure-quickstart-templates,maniSbindra/azure-quickstart-templates,krnese/azure-quickstart-templates,vicperdana/azure-quickstart-templates,Volkanco/azure-quickstart-templates,ALM-Rangers/azure-quickstart-templates,gossion/azure-quickstart-templates,tracsman/azure-quickstart-templates,ukinahan/azure,ChackDan/azure-quickstart-templates,rarsan/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,cavanes/azure-quickstart-templates,satyarapelly/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,cdavid/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,ChackDan/azure-quickstart-templates,alibaloch/azure-quickstart-templates,eissi/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,robklausems/azure-quickstart-templates,tcsatheesh/azure-quickstart-templates,hrboyceiii/azure-quickstart-templates,jasonbw/azure-quickstart-templates,adhurwit/azure-quickstart-templates,jreid143/azure-quickstart-templates,grandhiramesh/azure-quickstart-templates,ToruMakabe/azure-quickstart-templates,johndowns/azure-quickstart-templates,mukulkgupta/azure-quickstart-templates,saschagottfried/azure-quickstart-templates,amitsriva/azure-quickstart-templates,andyliuliming/azure-quickstart-templates,spcrux/azure-quickstart-templates,hausdorff/azure-quickstart-templates,beni55/azure-quickstart-templates,ne-msft/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,gossion/azure-quickstart-templates,jreid143/azure-quickstart-templates,realcodywburns/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,oignatenko/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,jmservera/azure-quickstart-templates,netwmr01/azure-quickstart-templates,tomya/mytemplates,Thorlandus/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,ShawInnes/azure-quickstart-templates,pcgeek86/azure-quickstart-templates,tagliateller/azure-quickstart-templates,rayapa/azure-quickstart-templates,moisedo/azure-quickstart-templates,forensiclogic/azure-quickstart-templates,BorisB2015/azure-quickstart-templates,Quetzacoalt91/azure-quickstart-templates,georgewallace/azure-quickstart-templates,garimakhulbe/azure-quickstart-templates,iwooden/azure-quickstart-templates,uday31in/azure-quickstart-templates,adamnovak/azure-quickstart-templates,jmspring/azure-quickstart-templates,VybavaRamadoss/samples,Quetzacoalt91/azure-quickstart-templates,yuezh/azure-quickstart-templates,ne-msft/azure-quickstart-templates,simongdavies/azure-quickstart-templates,Alan-AcutePath/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,hglkrijger/azure-quickstart-templates,jackyjngwn/azure-quickstart-templates,BorisB2015/azure-quickstart-templates,Constellation-Solutions/azure-quickstart-templates,zuojihong/azure-quickstart-templates,introp-software/azure-quickstart-templates,neudesic/azure-quickstart-templates,pateixei/azure-quickstart-templates,eissi/azure-quickstart-templates,travismc1/azure-quickstart-templates,moisedo/azure-quickstart-templates,krnese/azure-quickstart-templates,asheniam/azure-quickstart-templates,alinefr/azure-quickstart-templates,mmarch/azure-quickstart-templates,blockapps/azure-quickstart-templates,ALM-Rangers/azure-quickstart-templates,felixrieseberg/azure-quickstart-templates,RuudBorst/azure-quickstart-templates,haritshah33/azuretemplates,bharathsreenivas/azure-quickstart-templates,rgardler/azure-quickstart-templates,simongdavies/azure-quickstart-templates,samhodgkinson/azure-quickstart-templates,chenriksson/azure-quickstart-templates,SunBuild/azure-quickstart-templates,YidingZhou/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,willhighland/azure-quickstart-templates,jv1992/pqr,251744647/azure-quickstart-templates,beni55/azure-quickstart-templates,ToruMakabe/azure-quickstart-templates,jimlane/azure-quickstart-templates,alinefr/azure-quickstart-templates,ShawInnes/azure-quickstart-templates,ishtein/azure-public,robert-bakker/azure-quickstart-templates,alinefr/azure-quickstart-templates,tibor19/azure-quickstart-templates,mumian/azure-quickstart-templates,OzGitele/azure-quickstart-templates,grwilson/azure-quickstart-templates,haritshah33/azuretemplates,AsraFatima/azure-quickstart-templates,hausdorff/azure-quickstart-templates,tobyscales/azure-quickstart-templates,mcastany/azure-quickstart-templates,svk2/azure-quickstart-templates,mrkeng/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,scrypter/azure-quickstart-templates,AlekseiPolkovnikov/azure-quickstart-templates,liupeirong/azure-quickstart-templates,anhowe/azure-quickstart-templates,gossion/azure-quickstart-templates,matheusbertuco/azure-quickstart-templates,YidingZhou/azure-quickstart-templates,johndowns/azure-quickstart-templates,ExchMaster/azure-quickstart-templates,OzGitele/azure-quickstart-templates,ezubatov/azure-quickstart-templates,forensiclogic/azure-quickstart-templates,krkhan/azure-quickstart-templates,felixrieseberg/azure-quickstart-templates,Envera/azure-quickstart-templates,251744647/azure-quickstart-templates,mrkeng/azure-quickstart-templates,ShubhaVijayasarathy/azure-quickstart-templates,steved0x/azure-quickstart-templates,grandhiramesh/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,kenazk/azure-quickstart-templates,jimlane/azure-quickstart-templates,akurmi/azure-quickstart-templates,rsponholtz/azure-quickstart-templates,Volkanco/azure-quickstart-templates,Ercenk/azure-quickstart-templates,ChackDan/azure-quickstart-templates,sazeesha/azure-quickstart-templates,realcodywburns/azure-quickstart-templates,Teodelas/azure-quickstart-templates,georgewallace/azure-quickstart-templates,CJRocK/AzureSQLalwaysOn,beni55/azure-quickstart-templates,sebastus/azure-quickstart-templates,mabsimms/azure-quickstart-templates,arsenvlad/azure-quickstart-templates,AlekseiPolkovnikov/azure-quickstart-templates,Azure/azure-quickstart-templates,SunBuild/azure-quickstart-templates,forensiclogic/azure-quickstart-templates,singhkays/azure-quickstart-templates,stepsic-microsoft-com/azure-quickstart-templates,spcrux/azure-quickstart-templates,cerdmann-pivotal/azure-quickstart-templates,satyarapelly/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,emondek/azure-quickstart-templates,CalCof/azure-quickstart-templates,alvadb/azure-quickstart-templates,sbtron/azure-quickstart-templates,shzhai/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,asheniam/azure-quickstart-templates,andyliuliming/azure-quickstart-templates,spcrux/azure-quickstart-templates,liupeirong/azure-quickstart-templates,jv1992/pqr,haritshah33/azuretemplates,pdiniz13/azure-quickstart-templates,Supraconductor/azure-quickstart-templates,willhighland/azure-quickstart-templates,Envera/azure-quickstart-templates,samhodgkinson/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,tracsman/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,ahmetalpbalkan/azure-quickstart-templates,sabbour/azure-quickstart-templates,introp-software/azure-quickstart-templates,neudesic/azure-quickstart-templates,pateixei/azure-quickstart-templates,evertonmc/azure-quickstart-templates,jamesdixon/azure-quickstart-templates,jackyjngwn/azure-quickstart-templates,hausdorff/azure-quickstart-templates,eissi/azure-quickstart-templates,alibaloch/azure-quickstart-templates,Supraconductor/azure-quickstart-templates,krnese/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,maneta/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,nzthiago/azure-quickstart-templates,smartpcr/azure-quickstart-templates,philon-msft/azure-quickstart-templates,hongbincao/azure-quickstart-templates,garimakhulbe/azure-quickstart-templates,honcao/azure-quickstart-templates,jv1992/pqr,jeffwilcox/azure-quickstart-templates,iouri-s/azure-quickstart-templates,adamnovak/azure-quickstart-templates,smithab/azure-quickstart-templates,gabrtv/azure-quickstart-templates,jv1992/pqr,klondon71/azure-quickstart-templates,jackyjngwn/azure-quickstart-templates,travismc1/azure-quickstart-templates,yuezh/azure-quickstart-templates,ttmc/azure-quickstart-templates,bharathsreenivas/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,Undo1/azure-quickstart-templates,benofben/azure-quickstart-templates,bdanse/azure-quickstart-templates,SvenLauterbach/azure-quickstart-templates,VIJAYDUSI/azure-quickstart-templates,CJRocK/AzureSQLalwaysOn,hrboyceiii/azure-quickstart-templates,willhighland/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,amitsriva/azure-quickstart-templates,eissi/azure-quickstart-templates,MahendraAgrawal/azure-quickstart-templates,transcanada/azure-quickstart-templates,knithinc/azure-quickstart-templates,samhodgkinson/azure-quickstart-templates,MahendraAgrawal/azure-quickstart-templates,rlfmendes/azure-quickstart-templates,Jaganod/azure-quickstart-templates,jreid143/azure-quickstart-templates,kotzenjh/DCOS-JSON,rivierni/azure-quickstart-templates,chenriksson/azure-quickstart-templates,timfpark/azure-quickstart-templates,vicperdana/azure-quickstart-templates,iamshital/azure-quickstart-templates,Envera/azure-quickstart-templates,mmarch/azure-quickstart-templates,bcdev-/azure-quickstart-templates,castilhoa/azure,neonquest/azure-quickstart-templates,juvchan/azure-quickstart-templates,svk2/azure-quickstart-templates,gbowerman/azure-quickstart-templates,jwendl/azure-quickstart-templates,robrankin/azure-quickstart-templates,knithinc/azure-quickstart-templates,anweiss/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,nzthiago/azure-quickstart-templates,Quetzacoalt91/azure-quickstart-templates,simongdavies/azure-quickstart-templates,aarsan/azure-quickstart-templates,mabsimms/azure-quickstart-templates,slapointe/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,grandhiramesh/azure-quickstart-templates,kirpasingh/azure-quickstart-templates,Jaganod/azure-quickstart-templates,hlmstone/stone-china-azure-quickstart-templates,tomya/mytemplates,sidkri/azure-quickstart-templates,adhurwit/azure-quickstart-templates,iouri-s/azure-quickstart-templates,alibaloch/azure-quickstart-templates,beni55/azure-quickstart-templates,arroyc/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,zhongyi-zhang/azure-quickstart-templates,bwanner/azure-quickstart-templates,tagliateller/azure-quickstart-templates,realcodywburns/azure-quickstart-templates,simongdavies/azure-quickstart-templates,singhkay/azure-quickstart-templates,sazeesha/azure-quickstart-templates,liupeirong/azure-quickstart-templates,sidkri/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,juvchan/azure-quickstart-templates,lizzha/azure-quickstart-templates,bwanner/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,daltskin/azure-quickstart-templates,iamshital/azure-quickstart-templates,gossion/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,MSBrett/azure-quickstart-templates,felixrieseberg/azure-quickstart-templates,devopsteamberlin/azure-quickstart-templates,Kegeruneku/azure-quickstart-templates,bingosummer/azure-quickstart-templates,satyarapelly/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,rivierni/azure-quickstart-templates,eshaparmar/azure-quickstart-templates,CalCof/azure-quickstart-templates,bwanner/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,samhodgkinson/azure-quickstart-templates,Azure/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,ALM-Rangers/azure-quickstart-templates,grandhiramesh/azure-quickstart-templates,cavanes/azure-quickstart-templates,sabbour/azure-quickstart-templates,anweiss/azure-quickstart-templates,squillace/azure-quickstart-templates,AsraFatima/XenApp-Marketplace,kenazk/azure-quickstart-templates,CalCof/azure-quickstart-templates,krkhan/azure-quickstart-templates,BedeGaming/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,ShubhaVijayasarathy/azure-quickstart-templates,eosiowy/azure-quickstart-templates,smithab/azure-quickstart-templates,jimdial/azure-quickstart-templates,vglafirov/azure-quickstart-templates,arsenvlad/azure-quickstart-templates,Envera/azure-quickstart-templates,rayapa/azure-quickstart-templates,Supraconductor/azure-quickstart-templates,grwilson/azure-quickstart-templates,benofben/azure-quickstart-templates,steved0x/azure-quickstart-templates,travismc1/azure-quickstart-templates,Azure/azure-quickstart-templates,madhana/azure-quickstart-templates,eissi/azure-quickstart-templates,AsraFatima/XenApp-Marketplace,andyliuliming/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,ytechie/azure-quickstart-templates,hrboyceiii/azure-quickstart-templates,evertonmc/azure-quickstart-templates,adamnovak/azure-quickstart-templates,jasonbw/azure-quickstart-templates,jackyjngwn/azure-quickstart-templates,bcdev-/azure-quickstart-templates,SunBuild/azure-quickstart-templates,ahmetalpbalkan/azure-quickstart-templates,eosiowy/azure-quickstart-templates,rarsan/azure-quickstart-templates,ttmc/azure-quickstart-templates,MSBrett/azure-quickstart-templates,Envera/azure-quickstart-templates,netwmr01/azure-quickstart-templates,MahendraAgrawal/azure-quickstart-templates,robrankin/azure-quickstart-templates,Supraconductor/azure-quickstart-templates,jwendl/azure-quickstart-templates,klondon71/azure-quickstart-templates,bcdev-/azure-quickstart-templates,Ercenk/azure-quickstart-templates,Teodelas/azure-quickstart-templates,zuojihong/azure-quickstart-templates,hglkrijger/azure-quickstart-templates,sazeesha/azure-quickstart-templates,rsponholtz/azure-quickstart-templates,haritshah33/azuretemplates,seanbamsft/azure-quickstart-templates,puneetsaraswat/azure-quickstart-templates,RuudBorst/azure-quickstart-templates,marleyg/azure-quickstart-templates,tagliateller/azure-quickstart-templates,bharathsreenivas/azure-quickstart-templates,krkhan/azure-quickstart-templates,krnese/azure-quickstart-templates,mmarch/azure-quickstart-templates,zechariahks/azure-quickstart-templates,sidkri/azure-quickstart-templates,jimdial/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,knithinc/azure-quickstart-templates,Thorlandus/azure-quickstart-templates,ne-msft/azure-quickstart-templates,JF6/azure-quickstart-templates,singhkays/azure-quickstart-templates,AbelHu/azure-quickstart-templates,jwendl/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,introp-software/azure-quickstart-templates,krkhan/azure-quickstart-templates,ttmc/azure-quickstart-templates,introp-software/azure-quickstart-templates,rsponholtz/azure-quickstart-templates,benofben/azure-quickstart-templates,alexstoddard/azure-quickstart-templates,iwooden/azure-quickstart-templates,harijayms/azure-quickstart-templates,haritshah33/azuretemplates,benofben/azure-quickstart-templates,realcodywburns/azure-quickstart-templates,sebastus/azure-quickstart-templates,ytechie/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,steved0x/azure-quickstart-templates,vicperdana/azure-quickstart-templates,sgoings/azure-quickstart-templates,BedeGaming/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,vglafirov/azure-quickstart-templates,lukehoban/azure-quickstart-templates,iouri-s/azure-quickstart-templates,singhkays/azure-quickstart-templates,ezubatov/azure-quickstart-templates,ExchMaster/azure-quickstart-templates,gbowerman/azure-quickstart-templates,squillace/azure-quickstart-templates,olandese/azure-quickstart-templates,MahendraAgrawal/azure-quickstart-templates,ritazh/azure-quickstart-templates,andykillinger/azure-quickstart-templates,jmspring/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,rlfmendes/azure-quickstart-templates,ned1313/azure-quickstart-templates,benofben/azure-quickstart-templates,maneta/azure-quickstart-templates,andykillinger/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,RuudBorst/azure-quickstart-templates,cerdmann-pivotal/azure-quickstart-templates,simongdavies/hongbincaoazure,singhkays/azure-quickstart-templates,iamshital/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,anthony-murphy/azure-quickstart-templates,alvadb/azure-quickstart-templates,BorisB2015/azure-quickstart-templates,anthony-murphy/azure-quickstart-templates,jimdial/azure-quickstart-templates,gossion/azure-quickstart-templates,ninarn/azure-quickstart-templates,mmarch/azure-quickstart-templates,AlekseiPolkovnikov/azure-quickstart-templates,asheniam/azure-quickstart-templates,olandese/azure-quickstart-templates,jackyjngwn/azure-quickstart-templates,Volkanco/azure-quickstart-templates,nzthiago/azure-quickstart-templates,travismc1/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,jwendl/azure-quickstart-templates,BorisB2015/azure-quickstart-templates,VIJAYDUSI/azure-quickstart-templates,zhongyi-zhang/azure-quickstart-templates,akurmi/azure-quickstart-templates,rnithish/MyOpenGit,anweiss/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,ne-msft/azure-quickstart-templates,castilhoa/azure,ishtein/azure-public,rnithish/MyOpenGit,cdavid/azure-quickstart-templates,saschagottfried/azure-quickstart-templates,puneetsaraswat/azure-quickstart-templates,smartpcr/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,pdiniz13/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,ishtein/azure-public,CJRocK/AzureSQLalwaysOn,SunBuild/azure-quickstart-templates,juvchan/azure-quickstart-templates,saschagottfried/azure-quickstart-templates,moisedo/azure-quickstart-templates,jumbucks/azure-quickstart-templates,tracsman/azure-quickstart-templates,jarobey/azure-quickstart-templates,ritazh/azure-quickstart-templates,garimakhulbe/azure-quickstart-templates,jmservera/azure-quickstart-templates,georgewallace/azure-quickstart-templates,liupeirong/azure-quickstart-templates,iwooden/azure-quickstart-templates,knithinc/azure-quickstart-templates,ukinahan/azure,alexstoddard/azure-quickstart-templates,seanbamsft/azure-quickstart-templates,anweiss/azure-quickstart-templates,rgardler/azure-quickstart-templates,tobyscales/azure-quickstart-templates,ShubhaVijayasarathy/azure-quickstart-templates,rsponholtz/azure-quickstart-templates,lizzha/azure-quickstart-templates,robrankin/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,ShawInnes/azure-quickstart-templates,scrypter/azure-quickstart-templates,hongbincao/azure-quickstart-templates,jumbucks/azure-quickstart-templates,jv1992/pqr,Quetzacoalt91/azure-quickstart-templates,grwilson/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,jeffwilcox/azure-quickstart-templates,Kegeruneku/azure-quickstart-templates,timfpark/azure-quickstart-templates,knithinc/azure-quickstart-templates,squillace/azure-quickstart-templates,sbtron/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,MrQbit/azure-quickstart-templates,gbowerman/azure-quickstart-templates,dipakmsft/azure-quickstart-templates,VybavaRamadoss/samples,Constellation-Solutions/azure-quickstart-templates,hlmstone/stone-china-azure-quickstart-templates,sivaedupuganti/azure-quickstart-templates,georgewallace/azure-quickstart-templates,zrahui/azure-quickstart-templates,netwmr01/azure-quickstart-templates,andyliuliming/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,AsraFatima/azure-quickstart-templates,neudesic/azure-quickstart-templates,simongdavies/hongbincaoazure,mcastany/azure-quickstart-templates,irwins/azure-quickstart-templates,neudesic/azure-quickstart-templates,anhowe/azure-quickstart-templates,stevenlivz/azure-quickstart-templates,Volkanco/azure-quickstart-templates,grandhiramesh/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,netwmr01/azure-quickstart-templates,simongdavies/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,irwins/azure-quickstart-templates,jasonbw/azure-quickstart-templates,uday31in/azure-quickstart-templates,ttmc/azure-quickstart-templates,johndowns/azure-quickstart-templates,CJRocK/AzureSQLalwaysOn,ShubhaVijayasarathy/azure-quickstart-templates,maneta/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,MSBrett/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,krkhan/azure-quickstart-templates,rsponholtz/azure-quickstart-templates,zuojihong/azure-quickstart-templates,anhowe/azure-quickstart-templates,ToruMakabe/azure-quickstart-templates,AbelHu/azure-quickstart-templates,rlfmendes/azure-quickstart-templates,chenriksson/azure-quickstart-templates,jmspring/azure-quickstart-templates,ne-msft/azure-quickstart-templates,netwmr01/azure-quickstart-templates,irwinwilliams/azure-quickstart-templates,tibor19/azure-quickstart-templates,rlfmendes/azure-quickstart-templates,jmspring/azure-quickstart-templates,bdanse/azure-quickstart-templates,gabrtv/azure-quickstart-templates,takekazuomi/azure-quickstart-templates,introp-software/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,yuezh/azure-quickstart-templates,honcao/azure-quickstart-templates,pateixei/azure-quickstart-templates,realcodywburns/azure-quickstart-templates,nzthiago/azure-quickstart-templates,ttmc/azure-quickstart-templates,amitsriva/azure-quickstart-templates,gbowerman/azure-quickstart-templates,aarsan/azure-quickstart-templates,hglkrijger/azure-quickstart-templates,kenazk/azure-quickstart-templates,uday31in/azure-quickstart-templates,benjal/azure-quickstart-templates,blockapps/azure-quickstart-templates,simongdavies/azure-quickstart-templates,liupeirong/azure-quickstart-templates,knithinc/azure-quickstart-templates,xtremedata/azure-quickstart-templates,robert-bakker/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,singhkay/azure-quickstart-templates,anhowe/azure-quickstart-templates,cr0550ver/azure-quickstart-templates,jarobey/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,mabsimms/azure-quickstart-templates,tagliateller/azure-quickstart-templates,ChackDan/azure-quickstart-templates,Volkanco/azure-quickstart-templates,telmosampaio/azure-quickstart-templates,hglkrijger/azure-quickstart-templates,ExchMaster/azure-quickstart-templates,AbelHu/azure-quickstart-templates,sazeesha/azure-quickstart-templates,cr0550ver/azure-quickstart-templates,BedeGaming/azure-quickstart-templates,nzthiago/azure-quickstart-templates,hglkrijger/azure-quickstart-templates,rkotti/azure-quickstart-templates,bdanse/azure-quickstart-templates,svk2/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,realcodywburns/azure-quickstart-templates,CharlPels/azure-quickstart-templates,zrahui/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,anthony-murphy/azure-quickstart-templates,MSSedusch/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,sedouard/azure-quickstart-templates,akurmi/azure-quickstart-templates,ahmetalpbalkan/azure-quickstart-templates,castilhoa/azure,Jaganod/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,daltskin/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,rnithish/MyOpenGit,rayapa/azure-quickstart-templates,kdyoung18872/azure-quickstart-templates,castilhoa/azure,JF6/azure-quickstart-templates,simongdavies/azure-quickstart-templates,mukulkgupta/azure-quickstart-templates,kirpasingh/azure-quickstart-templates,olandese/azure-quickstart-templates,Kegeruneku/azure-quickstart-templates,sebastus/azure-quickstart-templates,matheusbertuco/azure-quickstart-templates,simongdavies/azure-quickstart-templates,sivaedupuganti/azure-quickstart-templates,pcgeek86/azure-quickstart-templates,jackyjngwn/azure-quickstart-templates,Alan-AcutePath/azure-quickstart-templates,OzGitele/azure-quickstart-templates,xtremedata/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,daltskin/azure-quickstart-templates,ttmc/azure-quickstart-templates,rlfmendes/azure-quickstart-templates,gatneil/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,rivierni/azure-quickstart-templates,arroyc/azure-quickstart-templates,BorisB2015/azure-quickstart-templates,zechariahks/azure-quickstart-templates,sazeesha/azure-quickstart-templates,kotzenjh/DCOS-JSON,scrypter/azure-quickstart-templates,matheusbertuco/azure-quickstart-templates,SudhakaraReddyEvuri/azure-quickstart-templates,SvenLauterbach/azure-quickstart-templates,jreid143/azure-quickstart-templates,liupeirong/azure-quickstart-templates,rivierni/azure-quickstart-templates,CalCof/azure-quickstart-templates,ritazh/azure-quickstart-templates,pcgeek86/azure-quickstart-templates,tobyscales/azure-quickstart-templates,georgewallace/azure-quickstart-templates,ezubatov/azure-quickstart-templates,kotzenjh/DCOS-JSON,puneetsaraswat/azure-quickstart-templates,CalCof/azure-quickstart-templates,sidkri/azure-quickstart-templates,zechariahks/azure-quickstart-templates,satyarapelly/azure-quickstart-templates,arroyc/azure-quickstart-templates,jasonbw/azure-quickstart-templates,Volkanco/azure-quickstart-templates,AlekseiPolkovnikov/azure-quickstart-templates,willhighland/azure-quickstart-templates,travismc1/azure-quickstart-templates,mukulkgupta/azure-quickstart-templates,satyarapelly/azure-quickstart-templates,Ercenk/azure-quickstart-templates,Alan-AcutePath/azure-quickstart-templates,tracsman/azure-quickstart-templates,jmservera/azure-quickstart-templates,ytechie/azure-quickstart-templates,spcrux/azure-quickstart-templates,robert-bakker/azure-quickstart-templates,bcdev-/azure-quickstart-templates,neonquest/azure-quickstart-templates,bganapa/azure-quickstart-templates,bganapa/azure-quickstart-templates,jwendl/azure-quickstart-templates,maneta/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,eshaparmar/azure-quickstart-templates,gatneil/azure-quickstart-templates,CharlPels/azure-quickstart-templates,zrahui/azure-quickstart-templates,Teodelas/azure-quickstart-templates,zhongyi-zhang/azure-quickstart-templates,alexstoddard/azure-quickstart-templates,OzGitele/azure-quickstart-templates,jreid143/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,ishtein/azure-public,zechariahks/azure-quickstart-templates,MrQbit/azure-quickstart-templates,gossion/azure-quickstart-templates,dipakmsft/azure-quickstart-templates,jarobey/azure-quickstart-templates,bingosummer/azure-quickstart-templates,MSBrett/azure-quickstart-templates,philon-msft/azure-quickstart-templates,dmakogon/azure-quickstart-templates,bhummerstone/azure-quickstart-templates,telmosampaio/azure-quickstart-templates,bingosummer/azure-quickstart-templates,simongdavies/hongbincaoazure,tobyscales/azure-quickstart-templates,benjal/azure-quickstart-templates,knithinc/azure-quickstart-templates,tracsman/azure-quickstart-templates,ALM-Rangers/azure-quickstart-templates,stevenlivz/azure-quickstart-templates,jimdial/azure-quickstart-templates,tibor19/azure-quickstart-templates,matt1883/azure-quickstart-templates,MrQbit/azure-quickstart-templates,svk2/azure-quickstart-templates,alinefr/azure-quickstart-templates,forensiclogic/azure-quickstart-templates,seanbamsft/azure-quickstart-templates,jimdial/azure-quickstart-templates,matt1883/azure-quickstart-templates,richstep/azure-quickstart-templates,rsponholtz/azure-quickstart-templates,blockapps/azure-quickstart-templates,bwanner/azure-quickstart-templates,AlekseiPolkovnikov/azure-quickstart-templates,sabbour/azure-quickstart-templates,transcanada/azure-quickstart-templates,robert-bakker/azure-quickstart-templates,RuudBorst/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,andyliuliming/azure-quickstart-templates,bingosummer/azure-quickstart-templates,irwins/azure-quickstart-templates,lizzha/azure-quickstart-templates,harijayms/azure-quickstart-templates,artemharutyunyan/azure-quickstart-templates,puneetsaraswat/azure-quickstart-templates,rarsan/azure-quickstart-templates,stepsic-microsoft-com/azure-quickstart-templates,cavanes/azure-quickstart-templates,matt1883/azure-quickstart-templates,maniSbindra/azure-quickstart-templates,ytechie/azure-quickstart-templates,jimlane/azure-quickstart-templates,anhowe/azure-quickstart-templates,jeffwilcox/azure-quickstart-templates,krkhan/azure-quickstart-templates,johndowns/azure-quickstart-templates,olandese/azure-quickstart-templates,sedouard/azure-quickstart-templates,bingosummer/azure-quickstart-templates,sebastus/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,juvchan/azure-quickstart-templates,AbelHu/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,grwilson/azure-quickstart-templates,neonquest/azure-quickstart-templates,zechariahks/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,alinefr/azure-quickstart-templates,dipakmsft/azure-quickstart-templates,Jaganod/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,adamnovak/azure-quickstart-templates,Azure/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,robklausems/azure-quickstart-templates,pdiniz13/azure-quickstart-templates,vicperdana/azure-quickstart-templates,mathieu-benoit/azure-quickstart-templates,bwanner/azure-quickstart-templates,mumian/azure-quickstart-templates,smartpcr/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,mumian/azure-quickstart-templates,xtremedata/azure-quickstart-templates,AbelHu/azure-quickstart-templates,gbowerman/azure-quickstart-templates,artemharutyunyan/azure-quickstart-templates,akurmi/azure-quickstart-templates,rkotti/azure-quickstart-templates,CharlPels/azure-quickstart-templates,lukehoban/azure-quickstart-templates,rlfmendes/azure-quickstart-templates,mrkeng/azure-quickstart-templates,zrahui/azure-quickstart-templates,robklausems/azure-quickstart-templates,richstep/azure-quickstart-templates,cdavid/azure-quickstart-templates,Jaganod/azure-quickstart-templates,tcsatheesh/azure-quickstart-templates,jwendl/azure-quickstart-templates,daltskin/azure-quickstart-templates,krnese/azure-quickstart-templates,gbowerman/azure-quickstart-templates,Alan-AcutePath/azure-quickstart-templates,bingosummer/azure-quickstart-templates,grwilson/azure-quickstart-templates,MahendraAgrawal/azure-quickstart-templates,robotechredmond/azure-quickstart-templates,matt1883/azure-quickstart-templates,amitsriva/azure-quickstart-templates,ukinahan/azure,cerdmann-pivotal/azure-quickstart-templates,rarsan/azure-quickstart-templates,sivaedupuganti/azure-quickstart-templates,tcsatheesh/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,hongbincao/azure-quickstart-templates,jmservera/azure-quickstart-templates,sgoings/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,jarobey/azure-quickstart-templates,rarsan/azure-quickstart-templates,adhurwit/azure-quickstart-templates,tracsman/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,jmspring/azure-quickstart-templates,asheniam/azure-quickstart-templates,ne-msft/azure-quickstart-templates,sidkri/azure-quickstart-templates,bingosummer/azure-quickstart-templates,richstep/azure-quickstart-templates,rgardler/azure-quickstart-templates,kdyoung18872/azure-quickstart-templates,chenriksson/azure-quickstart-templates,mmarch/azure-quickstart-templates,eshaparmar/azure-quickstart-templates,m1028639/azure-quickstart-templates,madhana/azure-quickstart-templates,cr0550ver/azure-quickstart-templates,lizzha/azure-quickstart-templates,cdavid/azure-quickstart-templates,lizzha/azure-quickstart-templates,iamshital/azure-quickstart-templates,gossion/azure-quickstart-templates,sunbinzhu/azure-quickstart-templates,smartpcr/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,jamesdixon/azure-quickstart-templates,andykillinger/azure-quickstart-templates,introp-software/azure-quickstart-templates,eosiowy/azure-quickstart-templates,introp-software/azure-quickstart-templates,Alan-AcutePath/azure-quickstart-templates,SunBuild/azure-quickstart-templates,sebastus/azure-quickstart-templates,sazeesha/azure-quickstart-templates,asheniam/azure-quickstart-templates,tomya/mytemplates,Volkanco/azure-quickstart-templates,Nepomuceno/azure-quickstart-templates,arroyc/azure-quickstart-templates,richstep/azure-quickstart-templates,madhana/azure-quickstart-templates,jv1992/pqr,pcgeek86/azure-quickstart-templates,jeffwilcox/azure-quickstart-templates,vglafirov/azure-quickstart-templates,mumian/azure-quickstart-templates,lizzha/azure-quickstart-templates,smartpcr/azure-quickstart-templates,chenriksson/azure-quickstart-templates,jumbucks/azure-quickstart-templates,honcao/azure-quickstart-templates,mumian/azure-quickstart-templates,Constellation-Solutions/azure-quickstart-templates,mumian/azure-quickstart-templates,bmoore-msft/azure-quickstart-templates,shzhai/azure-quickstart-templates,gossion/azure-quickstart-templates,bganapa/azure-quickstart-templates,ShubhaVijayasarathy/azure-quickstart-templates,slapointe/azure-quickstart-templates,honcao/azure-quickstart-templates,bwanner/azure-quickstart-templates,mumian/azure-quickstart-templates,AbelHu/azure-quickstart-templates,rgardler/azure-quickstart-templates,CharlPels/azure-quickstart-templates,xiaoyingLJ/azure-quickstart-templates,bcdev-/azure-quickstart-templates,ShubhaVijayasarathy/azure-quickstart-templates,bharathsreenivas/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,vglafirov/azure-quickstart-templates,jasonbw/azure-quickstart-templates,gatneil/azure-quickstart-templates,singhkay/azure-quickstart-templates,rnithish/MyOpenGit,moisedo/azure-quickstart-templates,mukulkgupta/azure-quickstart-templates,nilaydshah/azure-quickstart-templates,Envera/azure-quickstart-templates,tfitzmac/azure-quickstart-templates,arroyc/azure-quickstart-templates,krnese/azure-quickstart-templates,klondon71/azure-quickstart-templates,andykillinger/azure-quickstart-templates,slapointe/azure-quickstart-templates,ninarn/azure-quickstart-templates,ned1313/azure-quickstart-templates,rgardler/azure-quickstart-templates,dmakogon/azure-quickstart-templates,harijayms/azure-quickstart-templates,rarsan/azure-quickstart-templates,AvyanConsultingCorp/azure-quickstart-templates,neonquest/azure-quickstart-templates,MCKLMT/azure-quickstart-templates,tibor19/azure-quickstart-templates,251744647/azure-quickstart-templates,alinefr/azure-quickstart-templates,aarsan/azure-quickstart-templates,pateixei/azure-quickstart-templates,devopsteamberlin/azure-quickstart-templates,puneetsaraswat/azure-quickstart-templates,andyliuliming/azure-quickstart-templates,VybavaRamadoss/samples,Kegeruneku/azure-quickstart-templates,gabrtv/azure-quickstart-templates,dmakogon/azure-quickstart-templates,mumian/azure-quickstart-templates,pateixei/azure-quickstart-templates,Jaganod/azure-quickstart-templates,uday31in/azure-quickstart-templates,hlmstone/stone-china-azure-quickstart-templates,johndowns/azure-quickstart-templates,simongdavies/hongbincaoazure,andrewelizondo/azure-quickstart-templates,rayapa/azure-quickstart-templates,emondek/azure-quickstart-templates,ChackDan/azure-quickstart-templates,Teodelas/azure-quickstart-templates,CJRocK/AzureSQLalwaysOn,dmakogon/azure-quickstart-templates,YidingZhou/azure-quickstart-templates,emondek/azure-quickstart-templates,bganapa/azure-quickstart-templates,apachipa/Azure-JSON-Custom,kdyoung18872/azure-quickstart-templates,BorisB2015/azure-quickstart-templates,kirpasingh/azure-quickstart-templates,svk2/azure-quickstart-templates,smithab/azure-quickstart-templates,sidkri/azure-quickstart-templates,slapointe/azure-quickstart-templates,CJRocK/AzureSQLalwaysOn,alvadb/azure-quickstart-templates,artemharutyunyan/azure-quickstart-templates,pdiniz13/azure-quickstart-templates,CalCof/azure-quickstart-templates,alvadb/azure-quickstart-templates,arsenvlad/azure-quickstart-templates,andrewelizondo/azure-quickstart-templates,jimdial/azure-quickstart-templates,Azure/azure-quickstart-templates,arsenvlad/azure-quickstart-templates,grandhiramesh/azure-quickstart-templates,apachipa/Azure-JSON-Custom,alibaloch/azure-quickstart-templates,ToruMakabe/azure-quickstart-templates,rnithish/MyOpenGit,RuudBorst/azure-quickstart-templates,Constellation-Solutions/azure-quickstart-templates,smartpcr/azure-quickstart-templates,forensiclogic/azure-quickstart-templates,sivaedupuganti/azure-quickstart-templates,mcastany/azure-quickstart-templates,telmosampaio/azure-quickstart-templates,Azure/azure-quickstart-templates,ALM-Rangers/azure-quickstart-templates,uday31in/azure-quickstart-templates,rkotti/azure-quickstart-templates,akurmi/azure-quickstart-templates,pelagos/azure-quickstart-templates,ezubatov/azure-quickstart-templates,vglafirov/azure-quickstart-templates,sebastus/azure-quickstart-templates,Alan-AcutePath/azure-quickstart-templates,Undo1/azure-quickstart-templates,mmarch/azure-quickstart-templates,blockapps/azure-quickstart-templates,knithinc/azure-quickstart-templates,rnithish/MyOpenGit,lizzha/azure-quickstart-templates,Kegeruneku/azure-quickstart-templates | microbosh-setup/setup_devbox.py | microbosh-setup/setup_devbox.py | from Utils.WAAgentUtil import waagent
import Utils.HandlerUtil as Util
import commands
import os
import re
import json
waagent.LoggerInit('/var/log/waagent.log','/dev/stdout')
hutil = Util.HandlerUtility(waagent.Log, waagent.Error, "bosh-deploy-script")
hutil.do_parse_context("enable")
settings= hutil.get_public_settings()
from subprocess import call
call("mkdir -p ./bosh",shell=True)
call("mkdir -p ./bosh/.ssh",shell=True)
for f in ['micro_bosh.yml','deploy_micro_bosh.sh','micro_cf.yml']:
if not os.path.exists(f):
continue
with open (f,"r") as tmpfile:
content = tmpfile.read()
for i in settings.keys():
if i == 'fileUris':
continue
content=re.compile(re.escape("#"+i+"#"), re.IGNORECASE).sub(settings[i],content)
with open (os.path.join('bosh',f),"w") as tmpfile:
tmpfile.write(content)
with open (os.path.join('bosh','settings'),"w") as tmpfile:
tmpfile.write(json.dumps(settings, indent=4, sort_keys=True))
call("sh create_cert.sh >> ./bosh/micro_bosh.yml",shell=True)
call("chmod 700 myPrivateKey.key",shell=True)
call("chmod 744 ./bosh/deploy_micro_bosh.sh",shell=True)
call("cp myPrivateKey.key ./bosh/.ssh/bosh.key",shell=True)
call("cp -r ./bosh/* /home/"+settings['username'],shell=True)
call("chown -R "+settings['username']+" "+"/home/"+settings['username'],shell=True)
call("rm -r /tmp; mkdir /mnt/tmp; ln -s /mnt/tmp /tmp; chmod 777 /mnt/tmp ;chmod 777 /tmp", shell=True)
call("mkdir /mnt/bosh_install; cp install_bosh_client.sh /mnt/bosh_install; cd /mnt/bosh_install ; sh install_bosh_client.sh >install.log 2>&1;",shell=True)
exit(0)
| from Utils.WAAgentUtil import waagent
import Utils.HandlerUtil as Util
import commands
import os
import re
import json
waagent.LoggerInit('/var/log/waagent.log','/dev/stdout')
hutil = Util.HandlerUtility(waagent.Log, waagent.Error, "bosh-deploy-script")
hutil.do_parse_context("enable")
settings= hutil.get_public_settings()
from subprocess import call
call("mkdir -p ./bosh",shell=True)
call("mkdir -p ./bosh/.ssh",shell=True)
for f in ['micro_bosh.yml','deploy_micro_bosh.sh','micro_cf.yml']:
if not os.path.exists(f):
continue
with open (f,"r") as tmpfile:
content = tmpfile.read()
for i in settings.keys():
if i == 'fileUris':
continue
content=re.compile(re.escape("#"+i+"#"), re.IGNORECASE).sub(settings[i],content)
with open (os.path.join('bosh',f),"w") as tmpfile:
tmpfile.write(content)
with open (os.path.join('bosh','settings'),"w") as tmpfile:
tmpfile.write(json.dumps(settings, indent=4, sort_keys=True))
call("sh create_cert.sh >> ./bosh/micro_bosh.yml",shell=True)
call("chmod 700 myPrivateKey.key",shell=True)
call("chmod 744 ./bosh/deploy_micro_bosh.sh",shell=True)
call("cp myPrivateKey.key ./bosh/.ssh/bosh.key",shell=True)
call("cp -r ./bosh/* /home/"+settings['username'],shell=True)
call("chown -R "+settings['username']+" "+"/home/"+settings['username'],shell=True)
call("rm -r /tmp; mkdir /mnt/tmp; ln -s /mnt/tmp /tmp; chmod 777 /mnt/tmp ;chmod 777 /tmp", shell=True)
call("mkdir /mnt/bosh_install; cp install_bosh_client.sh /mnt/bosh_install; cd /mnt/bosh_install ; sh install_bosh_client.sh;",shell=True)
exit(0)
| mit | Python |
68ac2d95f339a7a1daf644170fab1c15ed0406c4 | Update main module file | bow/pytest-pipeline | pytest_pipeline/__init__.py | pytest_pipeline/__init__.py | # -*- coding: utf-8 -*-
"""
pytest_pipeline
~~~~~~~~~~~~~~~
Pytest plugin for functional testing of data analysis pipelines.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
RELEASE = False
__version_info__ = ("0", "2", "0")
__version__ = ".".join(__version_info__)
__version__ += "-dev" if not RELEASE else ""
__author__ = "Wibowo Arindrarto"
__contact__ = "bow@bow.web.id"
__homepage__ = "https://github.com/bow/pytest-pipeline"
# so we can keep the info above for setup.py
try:
from .core import PipelineRun
except ImportError:
pass
| # -*- coding: utf-8 -*-
"""
pytest_pipeline
~~~~~~~~~~~~~~~
Pytest plugin for functional testing of data analysis pipelines.
:copyright: (c) 2014 Wibowo Arindrarto <bow@bow.web.id>
:license: BSD
"""
RELEASE = False
__version_info__ = ("0", "2", "0")
__version__ = ".".join(__version_info__)
__version__ += "-dev" if not RELEASE else ""
__author__ = "Wibowo Arindrarto"
__contact__ = "bow@bow.web.id"
__homepage__ = "https://github.com/bow/pytest-pipeline"
# so we can keep the info above for setup.py
try:
from .core import PipelineRun, PipelineTest
except ImportError:
pass
| bsd-3-clause | Python |
0c863aaabee5350396184f0cd8636feaeeb21552 | Refactor velocity constraint calculations in Ex 3.10. | jcrist/pydy,oliverlee/pydy,jcrist/pydy,Shekharrajak/pydy,Shekharrajak/pydy,oliverlee/pydy,skidzo/pydy,jcrist/pydy,jcrist/pydy,skidzo/pydy,jcrist/pydy,Shekharrajak/pydy,jcrist/pydy,Shekharrajak/pydy,jcrist/pydy,skidzo/pydy,oliverlee/pydy,skidzo/pydy | Kane1985/Chapter2/Ex3.10.py | Kane1985/Chapter2/Ex3.10.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 3.10 from Kane 1985."""
from __future__ import division
from sympy import cancel, collect, expand_trig, solve, symbols, trigsimp
from sympy import sin, cos
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dot, dynamicsymbols, msprint
q1, q2, q3, q4, q5, q6, q7 = q = dynamicsymbols('q1:8')
u1, u2, u3, u4, u5, u6, u7 = u = dynamicsymbols('q1:8', level=1)
r, theta, b = symbols('r θ b', real=True, positive=True)
# define reference frames
R = ReferenceFrame('R') # fixed race rf, let R.z point upwards
A = R.orientnew('A', 'axis', [q7, R.z]) # rf that rotates with S* about R.z
# B.x, B.z are parallel with face of cone, B.y is perpendicular
B = A.orientnew('B', 'axis', [-theta, A.x])
S = ReferenceFrame('S')
S.set_ang_vel(A, u1*A.x + u2*A.y + u3*A.z)
C = ReferenceFrame('C')
C.set_ang_vel(A, u4*B.x + u5*B.y + u6*B.z)
# define points
pO = Point('O')
pS_star = pO.locatenew('S*', b*A.y)
pS_hat = pS_star.locatenew('S^', -r*B.y) # S^ touches the cone
pS1 = pS_star.locatenew('S1', -r*A.z) # S1 touches horizontal wall of the race
pS2 = pS_star.locatenew('S2', r*A.y) # S2 touches vertical wall of the race
pO.set_vel(R, 0)
pS_star.v2pt_theory(pO, R, A)
pS1.v2pt_theory(pS_star, R, S)
pS2.v2pt_theory(pS_star, R, S)
# Since S is rolling against R, v_S1_R = 0, v_S2_R = 0.
vc = [dot(p.vel(R), basis) for p in [pS1, pS2] for basis in R]
pO.set_vel(C, 0)
pS_star.v2pt_theory(pO, C, A)
pS_hat.v2pt_theory(pS_star, C, S)
# Since S is rolling against C, v_S^_C = 0.
# Cone has only angular velocity in R.z direction.
vc += [dot(pS_hat.vel(C), basis).subs(vc_map) for basis in A]
vc += [dot(C.ang_vel_in(R), basis) for basis in [R.x, R.y]]
vc_map = solve(vc, u)
# Pure rolling between S and C, dot(ω_C_S, B.y) = 0.
b_val = solve([dot(C.ang_vel_in(S), B.y).subs(vc_map).simplify()], b)[0][0]
print('b = {0}'.format(msprint(collect(cancel(expand_trig(b_val)), r))))
b_expected = r*(1 + sin(theta))/(cos(theta) - sin(theta))
assert trigsimp(b_val - b_expected) == 0
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Exercise 3.10 from Kane 1985."""
from __future__ import division
from sympy import cancel, collect, expand_trig, solve, symbols, trigsimp
from sympy import sin, cos
from sympy.physics.mechanics import ReferenceFrame, Point
from sympy.physics.mechanics import dot, dynamicsymbols, msprint
q1, q2, q3, q4, q5, q6, q7 = q = dynamicsymbols('q1:8')
u1, u2, u3, u4, u5, u6, u7 = u = dynamicsymbols('q1:8', level=1)
r, theta, b = symbols('r θ b', real=True, positive=True)
# define reference frames
R = ReferenceFrame('R') # fixed race rf, let R.z point upwards
A = R.orientnew('A', 'axis', [q7, R.z]) # rf that rotates with S* about R.z
# B.x, B.z are parallel with face of cone, B.y is perpendicular
B = A.orientnew('B', 'axis', [-theta, A.x])
S = ReferenceFrame('S')
S.set_ang_vel(A, u1*A.x + u2*A.y + u3*A.z)
C = ReferenceFrame('C')
C.set_ang_vel(A, u4*B.x + u5*B.y + u6*B.z)
# define points
pO = Point('O')
pS_star = pO.locatenew('S*', b*A.y)
pS_hat = pS_star.locatenew('S^', -r*B.y) # S^ touches the cone
pS1 = pS_star.locatenew('S1', -r*A.z) # S1 touches horizontal wall of the race
pS2 = pS_star.locatenew('S2', r*A.y) # S2 touches vertical wall of the race
pO.set_vel(R, 0)
pS_star.v2pt_theory(pO, R, A)
pS1.v2pt_theory(pS_star, R, S)
pS2.v2pt_theory(pS_star, R, S)
# Since S is rolling against R, v_S1_R = 0, v_S2_R = 0.
vc = [dot(p.vel(R), basis) for p in [pS1, pS2] for basis in R]
vc_map = solve(vc, [u1, u2, u3])
pO.set_vel(C, 0)
pS_star.v2pt_theory(pO, C, A)
pS_hat.v2pt_theory(pS_star, C, S)
# Since S is rolling against C, v_S^_C = 0.
# Cone has only angular velocity in R.z direction.
vc2 = [dot(pS_hat.vel(C), basis).subs(vc_map) for basis in A]
vc2 += [dot(C.ang_vel_in(R), basis) for basis in [R.x, R.y]]
vc_map = dict(vc_map.items() + solve(vc2, [u4, u5, u6]).items())
# Pure rolling between S and C, dot(ω_C_S, B.y) = 0.
b_val = solve([dot(C.ang_vel_in(S), B.y).subs(vc_map).simplify()], b)[0][0]
print('b = {0}'.format(msprint(collect(cancel(expand_trig(b_val)), r))))
b_expected = r*(1 + sin(theta))/(cos(theta) - sin(theta))
assert trigsimp(b_val - b_expected) == 0
| bsd-3-clause | Python |
e26ae2fc4baac83856158f4e41149ab3a8bb86a7 | return config if factory default is restored | twhtanghk/docker.esp8266,twhtanghk/docker.esp8266,twhtanghk/docker.esp8266,twhtanghk/docker.esp8266 | python/config/controller.py | python/config/controller.py | import picoweb
from config import model
from util import notFound
import logging
logger = logging.getLogger(__name__)
def get(req, res):
yield from picoweb.jsonify(res, model.load())
def set(req, res):
yield from req.read_form_data()
cfg = model.load()
for key, value in req.form.items():
cfg[key] = value
model.save(cfg)
yield from get(req, res)
def reset(req, res):
yield from picoweb.jsonify(res, {})
model.reset()
def factory(req, res):
model.factory()
from wlan.ap import controller
yield from controller.get(req, res)
def method(req, res):
ret = {
'GET': get,
'PUT': set
}
logger.info('{0} {1}'.format(req.method, req.path))
yield from ret.get(req.method, notFound)(req, res)
app = picoweb.WebApp(__name__)
app.route('/')(method)
app.route('/reset')(reset)
app.route('/factory')(factory)
| import picoweb
from config import model
from util import notFound
import logging
logger = logging.getLogger(__name__)
def get(req, res):
yield from picoweb.jsonify(res, model.load())
def set(req, res):
yield from req.read_form_data()
cfg = model.load()
for key, value in req.form.items():
cfg[key] = value
model.save(cfg)
yield from get(req, res)
def reset(req, res):
yield from picoweb.jsonify(res, {})
model.reset()
def factory(req, res):
model.factory()
yield from picoweb.jsonify(res, {})
def method(req, res):
ret = {
'GET': get,
'PUT': set
}
logger.info('{0} {1}'.format(req.method, req.path))
yield from ret.get(req.method, notFound)(req, res)
app = picoweb.WebApp(__name__)
app.route('/')(method)
app.route('/reset')(reset)
app.route('/factory')(factory)
| mit | Python |
0355e131b3afd1cd59baf78d3457f3268c297259 | Use typecheck in command | weblabdeusto/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,zstars/weblabdeusto,morelab/weblabdeusto,morelab/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,weblabdeusto/weblabdeusto,porduna/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,morelab/weblabdeusto,porduna/weblabdeusto,weblabdeusto/weblabdeusto,zstars/weblabdeusto,porduna/weblabdeusto,morelab/weblabdeusto,zstars/weblabdeusto | server/src/weblab/data/command.py | server/src/weblab/data/command.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
from voodoo.representable import Representable
from voodoo.checker import typecheck
class Command(object):
__metaclass__ = Representable
@typecheck((basestring, typecheck.NONE))
def __init__(self, commandstring):
self.commandstring = commandstring
def get_command_string(self):
return self.commandstring
def __cmp__(self, other):
if isinstance(other, Command):
return cmp(self.commandstring, other.commandstring)
return -1
def to_dict(self):
return {'commandstring': self.commandstring}
class NullCommand(Command):
def __init__(self):
super(NullCommand, self).__init__(None)
| #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2005-2009 University of Deusto
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
#
# This software consists of contributions made by many individuals,
# listed below:
#
# Author: Pablo Orduña <pablo@ordunya.com>
from voodoo.representable import Representable
class Command(object):
__metaclass__ = Representable
def __init__(self, commandstring):
self.commandstring = commandstring
def get_command_string(self):
return self.commandstring
def __cmp__(self, other):
if isinstance(other, Command):
return cmp(self.commandstring, other.commandstring)
return -1
def to_dict(self):
return {'commandstring': self.commandstring}
class NullCommand(Command):
def __init__(self):
super(NullCommand, self).__init__(None)
| bsd-2-clause | Python |
56c42a359eb9e1e765b3ea610a4c6a37fbc2c812 | fix failed unittests in travis CI | yunstanford/sanic-transmute | tests/test_parsing.py | tests/test_parsing.py | import json
def test_parsing_path_parameters(app):
request, response = app.test_client.get(
'/api/v1/user/yun',
)
assert response.status == 200
user = response.text
assert json.loads(user) == "yun"
def test_parsing_parameters_optional(app):
request, response = app.test_client.get(
'/api/v1/env/',
)
assert response.status == 200
exist = response.text
assert json.loads(exist) == False
def test_parsing_parameters_optional_with_value(app):
request, response = app.test_client.get(
'/api/v1/env/?exist=True',
)
assert response.status == 200
exist = response.text
assert json.loads(exist) == True
def test_parsing_query_parameters(app):
request, response = app.test_client.get(
'/multiply?left=3&right=4',
)
assert response.status == 200
result = response.text
assert json.loads(result) == 12
| import pytest
@pytest.mark.asyncio
async def test_parsing_path_parameters(app):
request, response = app.test_client.get(
'/api/v1/user/yun',
)
assert response.status == 200
user = await response.json()
assert user == "yun"
@pytest.mark.asyncio
async def test_parsing_parameters_optional(app):
request, response = app.test_client.get(
'/api/v1/env/',
)
assert response.status == 200
user = await response.json()
assert user == False
@pytest.mark.asyncio
async def test_parsing_parameters_optional_with_value(app):
request, response = app.test_client.get(
'/api/v1/env/?exist=True',
)
assert response.status == 200
user = await response.json()
assert user == True
@pytest.mark.asyncio
async def test_parsing_query_parameters(app):
request, response = app.test_client.get(
'/multiply?left=3&right=4',
)
assert response.status == 200
result = await response.json()
assert result == 12
| mit | Python |
ab464dfc6db6736bb116408c1978546cc9d8c93d | update test to respect update of batch2 | jepegit/cellpy,jepegit/cellpy | tests/test_batch2.py | tests/test_batch2.py | import pytest
import tempfile
import logging
from cellpy import log
from cellpy import prms
import cellpy.utils.batch_engines as batch_engines
from . import fdv
log.setup_logging(default_level=logging.DEBUG)
@pytest.fixture()
def clean_dir():
new_path = tempfile.mkdtemp()
return new_path
def test_initial():
print(batch_engines)
print(dir(batch_engines))
def test_base_exporter():
base_exporter = batch_engines.BaseExporter()
base_exporter._assign_engine(batch_engines.cycles_engine)
base_exporter._assign_dumper(batch_engines.csv_dumper)
def test_base_journal():
base_journal = batch_engines.BaseJournal()
def test_base_experiment():
base_experiment = batch_engines.BaseExperiment()
| import pytest
import tempfile
import logging
from cellpy import log
from cellpy import prms
import cellpy.utils.batch_engines as batch_engines
from . import fdv
log.setup_logging(default_level=logging.DEBUG)
@pytest.fixture()
def clean_dir():
new_path = tempfile.mkdtemp()
return new_path
def test_initial():
print(batch_engines)
print(dir(batch_engines))
def test_base_exporter():
base_exporter = batch_engines.BaseExporter()
base_exporter._assign_engine()
base_exporter._assign_dumper()
def test_base_journal():
base_journal = batch_engines.BaseJournal()
base_journal.from_db()
base_journal.from_file("experiment_001.json")
base_journal.to_file("experiment_001.json")
base_journal.generate_file_name()
base_journal.look_for_file()
def test_base_experiment():
base_experiment = batch_engines.BaseExperiment()
| mit | Python |
d2ab5077e78f58fbe4c059c561553f4b40514bbc | Fix test | dylanaraps/pywal,dylanaraps/pywal,dylanaraps/pywal | tests/test_colors.py | tests/test_colors.py | """Test imagemagick functions."""
import unittest
from pywal import colors
class TestGenColors(unittest.TestCase):
"""Test the gen_colors functions."""
def test_gen_colors(self):
"""> Generate a colorscheme."""
result = colors.get("tests/test_files/test.jpg")
self.assertEqual(result["colors"]["color0"], "#0D191B")
if __name__ == "__main__":
unittest.main()
| """Test imagemagick functions."""
import unittest
from pywal import colors
class TestGenColors(unittest.TestCase):
"""Test the gen_colors functions."""
def test_gen_colors(self):
"""> Generate a colorscheme."""
result = colors.get("tests/test_files/test.jpg")
self.assertEqual(result["colors"]["color0"], "#0F191A")
if __name__ == "__main__":
unittest.main()
| mit | Python |
c99bbc0a30dca8aaa72a4c79543400d9fbf97ebb | Fix failing bash completion function test signature. | pallets/click,mitsuhiko/click | tests/test_compat.py | tests/test_compat.py | import click
import pytest
if click.__version__ >= '3.0':
def test_legacy_callbacks(runner):
def legacy_callback(ctx, value):
return value.upper()
@click.command()
@click.option('--foo', callback=legacy_callback)
def cli(foo):
click.echo(foo)
with pytest.warns(Warning, match='Invoked legacy parameter callback'):
result = runner.invoke(cli, ['--foo', 'wat'])
assert result.exit_code == 0
assert 'WAT' in result.output
def test_bash_func_name():
from click._bashcomplete import get_completion_script
script = get_completion_script('foo-bar baz_blah', '_COMPLETE_VAR', 'bash').strip()
assert script.startswith('_foo_barbaz_blah_completion()')
assert '_COMPLETE_VAR=complete $1' in script
| import click
import pytest
if click.__version__ >= '3.0':
def test_legacy_callbacks(runner):
def legacy_callback(ctx, value):
return value.upper()
@click.command()
@click.option('--foo', callback=legacy_callback)
def cli(foo):
click.echo(foo)
with pytest.warns(Warning, match='Invoked legacy parameter callback'):
result = runner.invoke(cli, ['--foo', 'wat'])
assert result.exit_code == 0
assert 'WAT' in result.output
def test_bash_func_name():
from click._bashcomplete import get_completion_script
script = get_completion_script('foo-bar baz_blah', '_COMPLETE_VAR').strip()
assert script.startswith('_foo_barbaz_blah_completion()')
assert '_COMPLETE_VAR=complete $1' in script
| bsd-3-clause | Python |
5d1e84609daf0a149b725b78e35b7e92b67c2627 | Improve the documentation for command line string parsing. | google/fiddle | fiddle/absl_flags/example/example.py | fiddle/absl_flags/example/example.py | # coding=utf-8
# Copyright 2022 The Fiddle-Config Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""An example demonstrating Fiddle and absl_flags.
Run this example with the following command:
```sh
python3 -m fiddle.absl_flags.example.example \
--fdl_config=simple \
--fiddler=swap_weight_and_bias \
--fdl.model.b=0.73
--fdl.data.filename='"other.txt"' # Alt syntax: --fdl.data.filename=\"b.txt\"
```
"""
from typing import Sequence
from absl import app
import fiddle as fdl
from fiddle import absl_flags
from fiddle import printing
from fiddle.absl_flags.example import configs
def main(argv: Sequence[str]) -> None:
if argv:
raise ValueError('Unexpected CLI arguments.')
cfg = absl_flags.create_buildable_from_flags(configs)
print(printing.as_str_flattened(cfg))
runner = fdl.build(cfg)
runner.run()
if __name__ == '__main__':
app.run(main, flags_parser=absl_flags.flags_parser)
| # coding=utf-8
# Copyright 2022 The Fiddle-Config Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""An example demonstrating Fiddle and absl_flags.
Run this example with the following command:
```sh
python3 -m fiddle.absl_flags.example.example \
--fdl_config=simple \
--fiddler=swap_weight_and_bias \
--fdl.model.b=0.73
--fdl.data.filename=\"other.txt\"
```
"""
from typing import Sequence
from absl import app
import fiddle as fdl
from fiddle import absl_flags
from fiddle import printing
from fiddle.absl_flags.example import configs
def main(argv: Sequence[str]) -> None:
if argv:
raise ValueError('Unexpected CLI arguments.')
cfg = absl_flags.create_buildable_from_flags(configs)
print(printing.as_str_flattened(cfg))
runner = fdl.build(cfg)
runner.run()
if __name__ == '__main__':
app.run(main, flags_parser=absl_flags.flags_parser)
| apache-2.0 | Python |
23022049c06efa5cc82a317df55d5d4b5d78b9ce | add some test | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | test/test_bedtools.py | test/test_bedtools.py | import os
from sequana import bedtools, sequana_data
from easydev import TempFile
def test_genomecov():
filename = sequana_data("test_bedcov.bed", "testing")
mydata = bedtools.GenomeCov(filename)
# This requires to call other method before
for chrom in mydata:
chrom.moving_average(n=501)
chrom.running_median(n=501, circular=True)
chrom.running_median(n=501, circular=False)
chrom.compute_zscore()
chrom.get_low_coverage()
high_cov = chrom.get_high_coverage()
high_cov.merge_region(3)
with TempFile(suffix='.png') as fh:
chrom.plot_coverage(filename=fh.name)
with TempFile(suffix='.png') as fh:
chrom.plot_hist_zscore(filename=fh.name)
with TempFile(suffix='.png') as fh:
chrom.plot_hist_normalized_coverage(filename=fh.name)
len(chrom)
print(chrom)
chrom.get_size()
chrom.get_mean_cov()
chrom.get_var_coef()
| import os
from sequana import bedtools, sequana_data
from easydev import TempFile
def test_genomecov():
filename = sequana_data("test_bedcov.bed", "testing")
mydata = bedtools.GenomeCov(filename)
# This requires to call other method before
for chrom in mydata:
chrom.moving_average(n=501)
chrom.running_median(n=501, circular=True)
chrom.running_median(n=501, circular=False)
chrom.compute_zscore()
chrom.get_low_coverage()
high_cov = chrom.get_high_coverage()
high_cov.merge_region(3)
with TempFile(suffix='.png') as fh:
chrom.plot_coverage(filename=fh.name)
with TempFile(suffix='.png') as fh:
chrom.plot_hist_zscore(filename=fh.name)
with TempFile(suffix='.png') as fh:
chrom.plot_hist_normalized_coverage(filename=fh.name)
len(chrom)
print(chrom)
| bsd-3-clause | Python |
a33139252622492426a35169fdd5139a76b93d10 | Check that binarry files are skipped | dmerejkowsky/replacer | test/test_replacer.py | test/test_replacer.py | import replacer
import path
import pytest
@pytest.fixture
def test_path(tmpdir, monkeypatch):
tmp_path = path.Path(tmpdir)
this_path = path.Path(__file__).parent
src = this_path.joinpath("test_path")
dest = tmp_path.joinpath("test_path")
src.copytree(dest)
monkeypatch.chdir(dest)
return dest
def assert_replaced(filename):
as_path = path.Path(filename)
if replacer.is_binary(as_path):
assert b"new" in as_path.bytes()
else:
assert "new" in as_path.text()
def assert_not_replaced(filename):
as_path = path.Path(filename)
if replacer.is_binary(as_path):
assert b"old" in as_path.bytes()
else:
assert "old" in as_path.text()
def ensure_matching_file(src, binary=False):
src = path.Path(src)
if src.parent:
src.parent.makedirs_p()
if binary:
src.write_bytes(b"MAGIC\0old\xca\xff\xee")
else:
src.write_text("this is old")
def test_help(capsys):
with pytest.raises(SystemExit) as e:
replacer.main(["--help"])
stdout, _ = capsys.readouterr()
assert "usage" in stdout
assert(e.value.code) == 0
def test_replace_in_files(capsys, test_path):
replacer.main(["old", "new"])
stdout, _ = capsys.readouterr()
assert "top.txt" in stdout
assert "other.txt" not in stdout
# Dry-run: files should not have changed:
assert_not_replaced("top.txt")
# Now re-run with --go
replacer.main(["old", "new", "--go"])
assert_replaced("top.txt")
def test_hidden(test_path):
replacer.main(["old", "new", "--go"])
assert_not_replaced(".hidden/hidden.txt")
replacer.main(["old", "new", "--go", "--no-skip-hidden"])
assert_replaced(".hidden/hidden.txt")
def test_include(test_path):
replacer.main(["old", "new", "--go", "--include", "*.txt"])
assert_replaced("top.txt")
assert_not_replaced("b_dir/file.noext")
def test_exclude_extension(test_path):
replacer.main(["old", "new", "--go", "--exclude", "*.txt"])
assert_not_replaced("top.txt")
assert_replaced("b_dir/file.noext")
def test_exclude_directory(test_path):
one = "node_modules/one.js"
two = "packages/foo/node_modules/two.js"
for f in one, two:
ensure_matching_file(f)
replacer.main(["old", "new", "--go", "--exclude", "node_modules/*"])
assert_not_replaced(one)
assert_not_replaced(two)
def test_skip_binaries(test_path):
ensure_matching_file("foo.exe", binary=True)
replacer.main(["old", "new"])
assert_not_replaced("foo.exe")
| import replacer
import path
import pytest
@pytest.fixture
def test_path(tmpdir, monkeypatch):
tmp_path = path.Path(tmpdir)
this_path = path.Path(__file__).parent
src = this_path.joinpath("test_path")
dest = tmp_path.joinpath("test_path")
src.copytree(dest)
monkeypatch.chdir(dest)
return dest
def assert_replaced(filename):
assert "new" in path.Path(filename).text()
def assert_not_replaced(filename):
assert "old" in path.Path(filename).text()
def ensure_matching_file(src):
src = path.Path(src)
src.parent.makedirs_p()
src.write_text("this is old")
def test_help(capsys):
with pytest.raises(SystemExit) as e:
replacer.main(["--help"])
stdout, _ = capsys.readouterr()
assert "usage" in stdout
assert(e.value.code) == 0
def test_replace_in_files(capsys, test_path):
replacer.main(["old", "new"])
stdout, _ = capsys.readouterr()
assert "top.txt" in stdout
assert "other.txt" not in stdout
# Dry-run: files should not have changed:
assert_not_replaced("top.txt")
# Now re-run with --go
replacer.main(["old", "new", "--go"])
assert_replaced("top.txt")
def test_hidden(test_path):
replacer.main(["old", "new", "--go"])
assert_not_replaced(".hidden/hidden.txt")
replacer.main(["old", "new", "--go", "--no-skip-hidden"])
assert_replaced(".hidden/hidden.txt")
def test_include(test_path):
replacer.main(["old", "new", "--go", "--include", "*.txt"])
assert_replaced("top.txt")
assert_not_replaced("b_dir/file.noext")
def test_exclude_extension(test_path):
replacer.main(["old", "new", "--go", "--exclude", "*.txt"])
assert_not_replaced("top.txt")
assert_replaced("b_dir/file.noext")
def test_exclude_directory(test_path):
one = "node_modules/one.js"
two = "packages/foo/node_modules/two.js"
for f in one, two:
ensure_matching_file(f)
replacer.main(["old", "new", "--go", "--exclude", "node_modules/*"])
assert_not_replaced(one)
assert_not_replaced(two)
| bsd-3-clause | Python |
24929f857d865ab9a2251545b5fc7d26634394ec | update init | dariusbakunas/rawdisk | rawdisk/plugins/__init__.py | rawdisk/plugins/__init__.py | # -*- coding: utf-8 -*-
__all__ = ['categories', 'plugin_manager', 'filesystems']
from . import categories
from . import plugin_manager
from . import filesystems
| # -*- coding: utf-8 -*-
__all__ = ['categories', 'manager', 'filesystems']
from . import categories
from . import manager
from . import filesystems
| bsd-3-clause | Python |
cca0f026188da9906666d0fbdc2658fb8277e2d3 | Update openssl to version 1.0.2h | rnixx/kivy-ios,kivy/kivy-ios,rnixx/kivy-ios,tonibagur/kivy-ios,cbenhagen/kivy-ios,cbenhagen/kivy-ios,kivy/kivy-ios,kivy/kivy-ios,tonibagur/kivy-ios | recipes/openssl/__init__.py | recipes/openssl/__init__.py | from toolchain import Recipe, shprint
from os.path import join
import sh
arch_mapper = {'i386': 'darwin-i386-cc',
'x86_64': 'darwin64-x86_64-cc',
'armv7': 'iphoneos-cross',
'arm64': 'iphoneos-cross'}
class OpensslRecipe(Recipe):
version = "1.0.2h"
url = "http://www.openssl.org/source/openssl-{version}.tar.gz"
libraries = ["libssl.a", "libcrypto.a"]
include_dir = "include"
include_per_arch = True
def build_arch(self, arch):
options_iphoneos = (
"-isysroot {}".format(arch.sysroot),
"-DOPENSSL_THREADS",
"-D_REENTRANT",
"-DDSO_DLFCN",
"-DHAVE_DLFCN_H",
"-fomit-frame-pointer",
"-fno-common",
"-O3"
)
build_env = arch.get_env()
target = arch_mapper[arch.arch]
shprint(sh.env, _env=build_env)
sh.perl(join(self.build_dir, "Configure"),
target,
_env=build_env)
if target == 'iphoneos-cross':
sh.sed("-ie", "s!^CFLAG=.*!CFLAG={} {}!".format(build_env['CFLAGS'],
" ".join(options_iphoneos)),
"Makefile")
sh.sed("-ie", "s!static volatile sig_atomic_t intr_signal;!static volatile intr_signal;! ",
"crypto/ui/ui_openssl.c")
else:
sh.sed("-ie", "s!^CFLAG=!CFLAG={} !".format(build_env['CFLAGS']),
"Makefile")
shprint(sh.make, "clean")
shprint(sh.make, "-j4", "build_libs")
recipe = OpensslRecipe()
| from toolchain import Recipe, shprint
from os.path import join
import sh
arch_mapper = {'i386': 'darwin-i386-cc',
'x86_64': 'darwin64-x86_64-cc',
'armv7': 'iphoneos-cross',
'arm64': 'iphoneos-cross'}
class OpensslRecipe(Recipe):
version = "1.0.2g"
url = "http://www.openssl.org/source/openssl-{version}.tar.gz"
libraries = ["libssl.a", "libcrypto.a"]
include_dir = "include"
include_per_arch = True
def build_arch(self, arch):
options_iphoneos = (
"-isysroot {}".format(arch.sysroot),
"-DOPENSSL_THREADS",
"-D_REENTRANT",
"-DDSO_DLFCN",
"-DHAVE_DLFCN_H",
"-fomit-frame-pointer",
"-fno-common",
"-O3"
)
build_env = arch.get_env()
target = arch_mapper[arch.arch]
shprint(sh.env, _env=build_env)
sh.perl(join(self.build_dir, "Configure"),
target,
_env=build_env)
if target == 'iphoneos-cross':
sh.sed("-ie", "s!^CFLAG=.*!CFLAG={} {}!".format(build_env['CFLAGS'],
" ".join(options_iphoneos)),
"Makefile")
sh.sed("-ie", "s!static volatile sig_atomic_t intr_signal;!static volatile intr_signal;! ",
"crypto/ui/ui_openssl.c")
else:
sh.sed("-ie", "s!^CFLAG=!CFLAG={} !".format(build_env['CFLAGS']),
"Makefile")
shprint(sh.make, "clean")
shprint(sh.make, "-j4", "build_libs")
recipe = OpensslRecipe()
| mit | Python |
1fbbf29a49a6ac0482b28038e07fb2d90048c8fb | fix typo, add missing "g" | sjh/python | debug_decorator.py | debug_decorator.py | #!/usr/bin/env python
#_*_ coding:utf8 _*_
import subprocess
def debug_decorator(func, *args, **kwargs):
def inner_debug_decorator(*args, **kwargs):
try:
import ipdb
except ImportError as e_:
print e_
subprocess.call(["pip", "install", "ipdb"])
print 'decorating'
import ipdb
ipdb.set_trace()
return func(*args, **kwargs)
return inner_debug_decorator
@debug_decorator
def print_function(input):
a = 1
b = 3
print 'I need some debugging decorating, input = {}'.format(a + b + input)
if __name__ == '__main__':
print_function(10)
| #!/usr/bin/env python
#_*_ coding:utf8 _*_
import subprocess
def debug_decorator(func, *args, **kwargs):
def inner_debug_decorator(*args, **kwargs):
try:
import ipdb
except ImportError as e_:
print e_
subprocess.call(["pip", "install", "ipdb"])
print 'decorating'
import ipdb
ipdb.set_trace()
return func(*args, **kwargs)
return inner_debug_decorator
@debug_decorator
def print_function(input):
a = 1
b = 3
print 'I need some debuggin decorating, input = {}'.format(a + b + input)
if __name__ == '__main__':
print_function(10)
| apache-2.0 | Python |
86ac763b95e6d0742a434a273d598d64e437c75a | add debug_mode | andersbll/deeppy | deeppy/__init__.py | deeppy/__init__.py | __version__ = '0.1.dev'
import os
import logging
debug_mode = os.getenv('DEEPPY_DEBUG', '')
debug_mode = None if debug_mode == '' else debug_mode.lower()
from . import dataset
from . import expr
from . import misc
from . import model
from .autoencoder.autoencoder import Autoencoder, DenoisingAutoencoder
from .autoencoder.stacked_autoencoder import StackedAutoencoder
from .base import bool_, int_, float_
from .feedforward.activation_layers import (
Activation, LeakyReLU, ParametricReLU, ReLU, Sigmoid, Softmax, Softplus,
Tanh
)
from .feedforward.neural_network import NeuralNetwork
from .feedforward.layers import Affine
from .feedforward.dropout_layers import Dropout
from .feedforward.convnet_layers import (
Convolution, Flatten, Pool, LocalContrastNormalization,
LocalResponseNormalization
)
from .filler import (
AutoFiller, CopyFiller, ConstantFiller, NormalFiller, UniformFiller
)
from .input import Input, SupervisedInput
from .loss import SoftmaxCrossEntropy, BinaryCrossEntropy, MeanSquaredError
from .parameter import Parameter
from .preprocess.scalers import StandardScaler, UniformScaler
from .siamese.input import SiameseInput, SupervisedSiameseInput
from .siamese.loss import ContrastiveLoss
from .siamese.siamese_network import SiameseNetwork
from .train.annealers import ZeroAnnealer, DecayAnnealer, GammaAnnealer
from .train.learn_rules import Adam, Momentum, RMSProp
from .train.gradient_descent import GradientDescent
log = logging.getLogger(__name__)
if debug_mode is not None:
log.info('DeepPy in debug mode: %s' % debug_mode)
| from . import dataset
from . import expr
from . import misc
from . import model
from .autoencoder.autoencoder import Autoencoder, DenoisingAutoencoder
from .autoencoder.stacked_autoencoder import StackedAutoencoder
from .base import bool_, int_, float_
from .feedforward.activation_layers import (
Activation, LeakyReLU, ParametricReLU, ReLU, Sigmoid, Softmax, Softplus,
Tanh
)
from .feedforward.neural_network import NeuralNetwork
from .feedforward.layers import Affine
from .feedforward.dropout_layers import Dropout
from .feedforward.convnet_layers import (
Convolution, Flatten, Pool, LocalContrastNormalization,
LocalResponseNormalization
)
from .filler import (
AutoFiller, CopyFiller, ConstantFiller, NormalFiller, UniformFiller
)
from .input import Input, SupervisedInput
from .loss import SoftmaxCrossEntropy, BinaryCrossEntropy, MeanSquaredError
from .parameter import Parameter
from .preprocess.scalers import StandardScaler, UniformScaler
from .siamese.input import SiameseInput, SupervisedSiameseInput
from .siamese.loss import ContrastiveLoss
from .siamese.siamese_network import SiameseNetwork
from .train.annealers import ZeroAnnealer, DecayAnnealer, GammaAnnealer
from .train.learn_rules import Adam, Momentum, RMSProp
from .train.gradient_descent import GradientDescent
__version__ = '0.1.dev'
| mit | Python |
c9b639e5d77916a91d2d74de041f16bab73fd9e3 | Use github project url | hbrunn/l10n-netherlands | l10n_nl_postcodeapi/__openerp__.py | l10n_nl_postcodeapi/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013-2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Integration with PostcodeApi.nu',
'summary': 'Autocomplete Dutch addresses using PostcodeApi.nu',
'description': '''
Auto-completion for Dutch addresses
===================================
This module contains integration of the excellent and free address completion
service 'PostcodeAPI', using the Python API library by Stefan Jansen (included
in this module). The service allows lookups by zip code and house number,
providing street name and city. The lookups will be triggered in the partner
form views when a zip code or house number is entered or modified. Only
Dutch addresses (which is assumed to include addresses with no country) are
auto-completed.
More info about the lookup service here: http://www.postcodeapi.nu/
Home of the Python API library: https://github.com/steffex/pyPostcode
Dependencies
============
This module depends on the module partner_street_number, which will split
up the street field into separate fields for street name and number.
Configuration
=============
Please enter the API key that you request from PostcodeAPI into the system
parameter 'l10n_nl_postcodeapi.apikey'
Provinces are autocompleted if a country state with the exact name is found in
the system. A CSV file with the Dutch provinces is included in the data
directory, but not loaded by default. You can import the file manually.
Compatibility
=============
This module is compatible with OpenERP 7.0.
''',
'version': '0.1',
'author': 'Therp BV',
'category': 'Localization',
'website': 'https://github.com/OCA/l10n-netherlands',
'license': 'AGPL-3',
'depends': ['partner_street_number'],
'data': [
'data/ir_config_parameter.xml',
],
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2013-2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Integration with PostcodeApi.nu',
'summary': 'Autocomplete Dutch addresses using PostcodeApi.nu',
'description': '''
Auto-completion for Dutch addresses
===================================
This module contains integration of the excellent and free address completion
service 'PostcodeAPI', using the Python API library by Stefan Jansen (included
in this module). The service allows lookups by zip code and house number,
providing street name and city. The lookups will be triggered in the partner
form views when a zip code or house number is entered or modified. Only
Dutch addresses (which is assumed to include addresses with no country) are
auto-completed.
More info about the lookup service here: http://www.postcodeapi.nu/
Home of the Python API library: https://github.com/steffex/pyPostcode
Dependencies
============
This module depends on the module partner_street_number, which will split
up the street field into separate fields for street name and number.
Configuration
=============
Please enter the API key that you request from PostcodeAPI into the system
parameter 'l10n_nl_postcodeapi.apikey'
Provinces are autocompleted if a country state with the exact name is found in
the system. A CSV file with the Dutch provinces is included in the data
directory, but not loaded by default. You can import the file manually.
Compatibility
=============
This module is compatible with OpenERP 7.0.
''',
'version': '0.1',
'author': 'Therp BV',
'category': 'Usability',
'website': 'https://therp.nl',
'license': 'AGPL-3',
'depends': ['partner_street_number'],
'data': [
'data/ir_config_parameter.xml',
],
}
| agpl-3.0 | Python |
d1332ced773a62396b5d6bd3d49aae1122901629 | test code.. | rabitt/mir_eval,urinieto/mir_eval,mrgloom/mir_eval,rabitt/mir_eval,bmcfee/mir_eval,faroit/mir_eval,bmcfee/mir_eval,craffel/mir_eval,mrgloom/mir_eval,craffel/mir_eval,urinieto/mir_eval,faroit/mir_eval | tests/test_melody.py | tests/test_melody.py | # CREATED: 4/15/14 9:42 AM by Justin Salamon <justin.salamon@nyu.edu>
'''
Unit tests for mir_eval.melody
'''
import numpy as np
import os, sys
sys.path.append('../evaluators')
import melody_eval
def test_melody_functions():
songs = ['daisy1','daisy2','daisy3','daisy4','jazz1','jazz2','jazz3','jazz4','midi1','midi2','midi3','midi4','opera_fem2','opera_fem4','opera_male3','opera_male5','pop1','pop2','pop3','pop4']
refpath = 'data/melody/mirex2011/adc2004_ref/'
estpath = 'data/melody/mirex2011/adc2004_SG2/'
resultspath = 'data/melody/mirex2011/adc2004_results/SG2_per_track_results_mapped.csv'
# create results dictionary
results = np.loadtxt(resultspath, dtype='string', delimiter=',')
keys = results[0]
results_dict = {}
for i in range(1,len(results)):
value_dict = {}
for k in range(1,len(keys)):
value_dict[keys[k]] = results[i][k]
results_dict[results[i][0]] = value_dict
hop = 0.01
for song in songs:
print song
reffile = os.path.join(refpath, song + "REF.txt")
estfile = os.path.join(estpath, song + "_mel.txt")
M = melody_eval.evaluate(reffile, estfile, hop)
# compare results
for metric in M.keys():
mirex_result = float(results_dict[song + '.wav'][metric])
mireval_result = M[metric]
diff = np.abs(mirex_result - mireval_result)
if diff > 0.01:
print "\t%s: %.3f [mx:%.3f me:%.3f]" % (metric, diff, mirex_result, mireval_result)
| # CREATED: 4/15/14 9:42 AM by Justin Salamon <justin.salamon@nyu.edu>
'''
Unit tests for mir_eval.melody
'''
import numpy as np
import mir_eval
def test_melody_functions():
songs = ['daisy1','daisy2','daisy3','daisy4','jazz1','jazz2','jazz3','jazz4','midi1','midi2','midi3','midi4','opera_fem2','opera_fem4','opera_male3','opera_male5','pop1','pop2','pop3','pop4']
refpath = 'data/melody/mirex2011/adc2004_ref/'
estpath = 'data/melody/mirex2011/adc2004_SG2/'
resultspath = 'data/mirex2011/adc2004_results/SG2_per_track_results_mapped.csv'
for song in songs:
# Load in an example beat annotation
reference_beats = np.genfromtxt('data/beat/reference.beats')
# Load in an example beat tracker output
estimated_beats = np.genfromtxt('data/beat/estimated.beats')
# Trim the first 5 seconds off
reference_beats = mir_eval.beat.trim_beats(reference_beats)
estimated_beats = mir_eval.beat.trim_beats(estimated_beats)
# Load in reference scores computed with the beat eval toolbox
bet_scores = pickle.load(open('data/beat/bet_scores.pickle'))
# List of functions in mir_eval.beat
functions = {'f_measure':mir_eval.beat.f_measure,
'cemgil':mir_eval.beat.cemgil,
'goto':mir_eval.beat.goto,
'p_score':mir_eval.beat.p_score,
'continuity':mir_eval.beat.continuity,
'information_gain':mir_eval.beat.information_gain}
# Check each function output against beat evaluation toolbox
for name, function in functions.items():
my_score = function(reference_beats, estimated_beats)
their_score = bet_scores[name]
assert np.allclose(my_score, their_score) | mit | Python |
515f81994fae8bc455d67e5faeb8fe4c5598156b | fix licensing in depend_filter.py | philippedeswert/dsme,philippedeswert/dsme,philippedeswert/dsme,spiiroin/dsme,spiiroin/dsme | libiphb/depend_filter.py | libiphb/depend_filter.py | #! /usr/bin/env python
# =============================================================================
# File: depend_filter.py
#
# Copyright (C) 2007-2010 Nokia. All rights reserved.
#
# Author: Simo Piiroinen <simo.piiroinen@nokia.com>
#
# This file is part of Dsme.
#
# Dsme is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License
# version 2.1 as published by the Free Software Foundation.
#
# Dsme is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Dsme. If not, see <http://www.gnu.org/licenses/>.
#
# -----------------------------------------------------------------------------
#
# History:
#
# 05-Dec-2007 Simo Piiroinen
# - initial version
# =============================================================================
# gcc -MM filters out only standard includes, which
# does not cover glib etc headers ... so we filter
# out all dependencies with absolute path
import sys,os
DEST = None
args = sys.argv[1:]
args.reverse()
while args:
a = args.pop()
k,v = a[:2],a[2:]
if k in "-d":
DEST = v or args.pop()
else:
print>>sys.stderr, "Unknown option: %s" % a
sys.exit(1)
def dep_compare(a,b):
return cmp(a.count("/"),b.count("/")) or cmp(a,b)
def dep_filter(deps):
src, hdr = [], {}
for dep in deps:
if dep.endswith(".c"):
src.append(dep)
elif dep.startswith("/"):
continue
elif not dep in hdr:
hdr[dep] = None
hdr = hdr.keys()
hdr.sort(dep_compare)
return src + hdr
for line in sys.stdin.read().replace("\\\n", " ").split("\n"):
if not ':' in line:
continue
dest,srce = line.split(":",1)
if DEST:
dest = os.path.basename(dest)
dest = os.path.join(DEST, dest)
srce = dep_filter(srce.split())
print '%s: %s\n' % (dest, " \\\n ".join(srce))
| #! /usr/bin/env python
# =============================================================================
# File: depend_filter.py
#
# Copyright (C) 2007 Nokia. All rights reserved.
#
# Author: Simo Piiroinen <simo.piiroinen@nokia.com>
#
# -----------------------------------------------------------------------------
#
# History:
#
# 05-Dec-2007 Simo Piiroinen
# - initial version
# =============================================================================
# gcc -MM filters out only standard includes, which
# does not cover glib etc headers ... so we filter
# out all dependencies with absolute path
import sys,os
DEST = None
args = sys.argv[1:]
args.reverse()
while args:
a = args.pop()
k,v = a[:2],a[2:]
if k in "-d":
DEST = v or args.pop()
else:
print>>sys.stderr, "Unknown option: %s" % a
sys.exit(1)
def dep_compare(a,b):
return cmp(a.count("/"),b.count("/")) or cmp(a,b)
def dep_filter(deps):
src, hdr = [], {}
for dep in deps:
if dep.endswith(".c"):
src.append(dep)
elif dep.startswith("/"):
continue
elif not dep in hdr:
hdr[dep] = None
hdr = hdr.keys()
hdr.sort(dep_compare)
return src + hdr
for line in sys.stdin.read().replace("\\\n", " ").split("\n"):
if not ':' in line:
continue
dest,srce = line.split(":",1)
if DEST:
dest = os.path.basename(dest)
dest = os.path.join(DEST, dest)
srce = dep_filter(srce.split())
print '%s: %s\n' % (dest, " \\\n ".join(srce))
| lgpl-2.1 | Python |
3fa1ea04bff36b03633008df8bfdacb083700a2b | add app.warn to init_values to make tests pass | sloria/sphinx-issues | test_sphinx_issues.py | test_sphinx_issues.py | # -*- coding: utf-8 -*-
from tempfile import mkdtemp
from shutil import rmtree
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
from sphinx.application import Sphinx
from sphinx_issues import (
issue_role,
user_role,
setup as issues_setup
)
import pytest
@pytest.yield_fixture(params=[
# Parametrize config
{'issues_github_path': 'sloria/marshmallow'},
{'issues_uri': 'https://github.com/sloria/marshmallow/issues/{issue}'}
])
def app(request):
src, doctree, confdir, outdir = [mkdtemp() for _ in range(4)]
Sphinx._log = lambda self, message, wfile, nonl=False: None
app = Sphinx(
srcdir=src,
confdir=None,
outdir=outdir,
doctreedir=doctree,
buildername='html',
)
issues_setup(app)
# Stitch together as the sphinx app init() usually does w/ real conf files
app.config._raw_config = request.param
app.config.init_values(app.warn)
yield app
[rmtree(x) for x in (src, doctree, confdir, outdir)]
@pytest.fixture()
def inliner(app):
return Mock(document=Mock(settings=Mock(env=Mock(app=app))))
def test_issue_role(inliner):
result = issue_role(
name=None,
rawtext='',
text='42',
lineno=None,
inliner=inliner
)
link = result[0][0]
assert link.astext() == '#42'
assert link.attributes['refuri'] == 'https://github.com/sloria/marshmallow/issues/42'
def test_issue_role_multiple(inliner):
result = issue_role(
name=None,
rawtext='',
text='42,43',
inliner=inliner,
lineno=None,
)
link1 = result[0][0]
assert link1.astext() == '#42'
assert link1.attributes['refuri'] == 'https://github.com/sloria/marshmallow/issues/42'
sep = result[0][1]
assert sep.astext() == ', '
link2 = result[0][2]
assert link2.astext() == '#43'
assert link2.attributes['refuri'] == 'https://github.com/sloria/marshmallow/issues/43'
def test_user_role(inliner):
result = user_role(
name=None,
rawtext='',
text='sloria',
inliner=inliner,
lineno=None
)
link = result[0][0]
assert link.astext() == '@sloria'
assert link.attributes['refuri'] == 'https://github.com/sloria'
| # -*- coding: utf-8 -*-
from tempfile import mkdtemp
from shutil import rmtree
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
from sphinx.application import Sphinx
from sphinx_issues import (
issue_role,
user_role,
setup as issues_setup
)
import pytest
@pytest.yield_fixture(params=[
# Parametrize config
{'issues_github_path': 'sloria/marshmallow'},
{'issues_uri': 'https://github.com/sloria/marshmallow/issues/{issue}'}
])
def app(request):
src, doctree, confdir, outdir = [mkdtemp() for _ in range(4)]
Sphinx._log = lambda self, message, wfile, nonl=False: None
app = Sphinx(
srcdir=src,
confdir=None,
outdir=outdir,
doctreedir=doctree,
buildername='html',
)
issues_setup(app)
# Stitch together as the sphinx app init() usually does w/ real conf files
app.config._raw_config = request.param
app.config.init_values()
yield app
[rmtree(x) for x in (src, doctree, confdir, outdir)]
@pytest.fixture()
def inliner(app):
return Mock(document=Mock(settings=Mock(env=Mock(app=app))))
def test_issue_role(inliner):
result = issue_role(
name=None,
rawtext='',
text='42',
lineno=None,
inliner=inliner
)
link = result[0][0]
assert link.astext() == '#42'
assert link.attributes['refuri'] == 'https://github.com/sloria/marshmallow/issues/42'
def test_issue_role_multiple(inliner):
result = issue_role(
name=None,
rawtext='',
text='42,43',
inliner=inliner,
lineno=None,
)
link1 = result[0][0]
assert link1.astext() == '#42'
assert link1.attributes['refuri'] == 'https://github.com/sloria/marshmallow/issues/42'
sep = result[0][1]
assert sep.astext() == ', '
link2 = result[0][2]
assert link2.astext() == '#43'
assert link2.attributes['refuri'] == 'https://github.com/sloria/marshmallow/issues/43'
def test_user_role(inliner):
result = user_role(
name=None,
rawtext='',
text='sloria',
inliner=inliner,
lineno=None
)
link = result[0][0]
assert link.astext() == '@sloria'
assert link.attributes['refuri'] == 'https://github.com/sloria'
| mit | Python |
18e95204eb7feef40cd4d61c6fbe3ca021b96129 | Add identify_regions step to image_problem. | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge | src/puzzle/problems/image/image_problem.py | src/puzzle/problems/image/image_problem.py | """
Steps:
2. For each color band
1. Compute components
2. Identify components
3. Erase identified components from parent
"""
import numpy as np
from data.image import image
from puzzle.constraints.image import decompose_constraints, \
identify_regions_constraints, prepare_image_constraints
from puzzle.problems import problem
from puzzle.steps.image import decompose, identify_regions, prepare_image
class ImageProblem(problem.Problem):
_source_image: image.Image
_prepare_image: prepare_image.PrepareImage
_identify_regions: identify_regions.IdentifyRegions
_decompose: decompose.Decompose
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
# Fork to preserve a pristine original in "parent".
self._source_image = image.Image(data).fork()
self._prepare_image = prepare_image.PrepareImage(
self._source_image,
prepare_image_constraints.PrepareImageConstraints())
self._identify_regions = identify_regions.IdentifyRegions(
self._prepare_image,
identify_regions_constraints.IdentifyRegionsConstraints())
self._decompose = decompose.Decompose(
self._identify_regions,
decompose_constraints.DecomposeConstraints())
self._solutions_generator.depends_on(self._decompose)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
| """
Steps:
2. For each color band
1. Compute components
2. Identify components
3. Erase identified components from parent
"""
import numpy as np
from data.image import image
from puzzle.constraints.image import decompose_constraints, \
prepare_image_constraints
from puzzle.problems import problem
from puzzle.steps.image import decompose, prepare_image
class ImageProblem(problem.Problem):
_source_image: image.Image
_prepare_image: prepare_image.PrepareImage
def __init__(self, name: str, data: np.ndarray, *args, **kwargs) -> None:
super(ImageProblem, self).__init__(name, data, *args, **kwargs)
self._source_image = image.Image(data)
self._prepare_image = prepare_image.PrepareImage(
self._source_image,
prepare_image_constraints.PrepareImageConstraints())
self._decompose = decompose.Decompose(
self._prepare_image,
decompose_constraints.DecomposeConstraints())
self._solutions_generator.depends_on(self._decompose)
@staticmethod
def score(data: problem.ProblemData) -> float:
if not isinstance(data, np.ndarray):
return 0
if data.dtype == np.uint8:
return 1
return .5
def __str__(self) -> str:
return '<image data>'
def _solve(self) -> dict:
return {}
| mit | Python |
efdbbc0cf6e3cab2d7073615b831d2421e97c2c3 | add sympy_theanify function | MBALearnsToCode/Helpy,MBALearnsToCode/Helpy | HelpyFuncs/SymPy.py | HelpyFuncs/SymPy.py | from copy import copy
from numpy import allclose, array, float32
from sympy import Atom, Expr, Float, Integer, sympify
from sympy.core.numbers import NegativeOne, One, Zero
from sympy.matrices import Matrix
from sympy.printing.theanocode import theano_function
FLOAT_TYPES = float, Float, float32, Integer, NegativeOne, One, Zero, Matrix
def is_non_atomic_sympy_expr(obj):
return hasattr(obj, 'doit') and not isinstance(obj, Atom)
def sympy_to_float(sympy_number_or_matrix):
if isinstance(sympy_number_or_matrix, FLOAT_TYPES):
return float(sympy_number_or_matrix)
else:
return array(sympy_number_or_matrix.tolist(), dtype=float)
def sympy_allclose(*sympy_matrices, **kwargs):
if len(sympy_matrices) == 2:
return allclose(sympy_to_float(sympy_matrices[0]), sympy_to_float(sympy_matrices[1]), **kwargs)
else:
for i in range(1, len(sympy_matrices)):
if not sympy_allclose(sympy_matrices[0], sympy_matrices[i], **kwargs):
return False
return True
def sympy_xreplace(obj, xreplace___dict={}):
if hasattr(obj, 'xreplace'):
return obj.xreplace(xreplace___dict)
elif isinstance(obj, tuple):
return tuple(sympy_xreplace(item, xreplace___dict) for item in obj)
elif isinstance(obj, list):
return [sympy_xreplace(item, xreplace___dict) for item in obj]
elif isinstance(obj, set):
return set(sympy_xreplace(item, xreplace___dict) for item in obj)
elif isinstance(obj, frozenset):
return frozenset(sympy_xreplace(item, xreplace___dict) for item in obj)
elif hasattr(obj, 'keys'):
obj = obj.copy()
for k, v in obj.items():
obj[k] = sympy_xreplace(v, xreplace___dict)
return obj
else:
return copy(obj)
def sympy_theanify(sympy_expr, symbols=()):
if isinstance(sympy_expr, Expr):
if not symbols:
symbols = sympy_expr.free_symbols
return theano_function(symbols, [sympy_expr])
else:
return lambda **kwargs: sympy_expr
def sympy_eval_by_theano(sympy_expr, symbols=(), **kwargs):
return sympy_theanify(sympy_expr, symbols)(**kwargs)
| from copy import copy
from numpy import allclose, array, atleast_2d, float32
from sympy import Atom, Float, Integer
from sympy.core.numbers import NegativeOne, One, Zero
from sympy.matrices import Matrix
from sympy.printing.theanocode import theano_function
FLOAT_TYPES = float, Float, float32, Integer, NegativeOne, One, Zero, Matrix
def is_non_atomic_sympy_expr(obj):
return hasattr(obj, 'doit') and not isinstance(obj, Atom)
def sympy_to_float(sympy_number_or_matrix):
if isinstance(sympy_number_or_matrix, FLOAT_TYPES):
return float(sympy_number_or_matrix)
else:
return array(sympy_number_or_matrix.tolist(), dtype=float)
def numpy_vector(a, dtype=float32):
v = array(a, dtype=dtype)
if v.ndim == 1:
v = atleast_2d(v).T
return v
def sympy_vector(a, dtype=float32):
return Matrix(numpy_vector(a, dtype=dtype))
def sympy_allclose(*sympy_matrices, **kwargs):
if len(sympy_matrices) == 2:
return allclose(sympy_to_float(sympy_matrices[0]), sympy_to_float(sympy_matrices[1]), **kwargs)
else:
for i in range(1, len(sympy_matrices)):
if not sympy_allclose(sympy_matrices[0], sympy_matrices[i], **kwargs):
return False
return True
def sympy_xreplace(obj, xreplace___dict={}):
if hasattr(obj, 'xreplace'):
return obj.xreplace(xreplace___dict)
elif isinstance(obj, tuple):
return tuple(sympy_xreplace(item, xreplace___dict) for item in obj)
elif isinstance(obj, list):
return [sympy_xreplace(item, xreplace___dict) for item in obj]
elif isinstance(obj, set):
return set(sympy_xreplace(item, xreplace___dict) for item in obj)
elif isinstance(obj, frozenset):
return frozenset(sympy_xreplace(item, xreplace___dict) for item in obj)
elif hasattr(obj, 'keys'):
obj = obj.copy()
for k, v in obj.items():
obj[k] = sympy_xreplace(v, xreplace___dict)
return obj
else:
return copy(obj)
def sympy_eval_by_theano(sympy_expr, symbols=[], **kwargs):
return theano_function(symbols, [sympy_expr])(**kwargs)
| mit | Python |
22a1e02efae195ef8f93a34eedfc28a8d9bb40ba | Add many tests for prompt.query_yes_no(). | kkujawinski/cookiecutter,utek/cookiecutter,foodszhang/cookiecutter,audreyr/cookiecutter,alex/cookiecutter,letolab/cookiecutter,venumech/cookiecutter,dajose/cookiecutter,venumech/cookiecutter,michaeljoseph/cookiecutter,atlassian/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,vincentbernat/cookiecutter,sp1rs/cookiecutter,dajose/cookiecutter,moi65/cookiecutter,benthomasson/cookiecutter,cguardia/cookiecutter,ramiroluz/cookiecutter,Springerle/cookiecutter,audreyr/cookiecutter,0k/cookiecutter,atlassian/cookiecutter,Vauxoo/cookiecutter,letolab/cookiecutter,takeflight/cookiecutter,lucius-feng/cookiecutter,sp1rs/cookiecutter,agconti/cookiecutter,moi65/cookiecutter,alex/cookiecutter,drgarcia1986/cookiecutter,lgp171188/cookiecutter,lgp171188/cookiecutter,nhomar/cookiecutter,nhomar/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,luzfcb/cookiecutter,drgarcia1986/cookiecutter,jhermann/cookiecutter,lucius-feng/cookiecutter,cguardia/cookiecutter,kkujawinski/cookiecutter,tylerdave/cookiecutter,terryjbates/cookiecutter,vincentbernat/cookiecutter,cichm/cookiecutter,foodszhang/cookiecutter,janusnic/cookiecutter,stevepiercy/cookiecutter,vintasoftware/cookiecutter,michaeljoseph/cookiecutter,cichm/cookiecutter,ionelmc/cookiecutter,janusnic/cookiecutter,pjbull/cookiecutter,hackebrot/cookiecutter,vintasoftware/cookiecutter,christabor/cookiecutter,benthomasson/cookiecutter,christabor/cookiecutter,ramiroluz/cookiecutter,willingc/cookiecutter,Vauxoo/cookiecutter,luzfcb/cookiecutter,0k/cookiecutter,ionelmc/cookiecutter,jhermann/cookiecutter,agconti/cookiecutter,stevepiercy/cookiecutter,utek/cookiecutter,terryjbates/cookiecutter,tylerdave/cookiecutter,takeflight/cookiecutter | tests/test_prompt.py | tests/test_prompt.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_prompt
--------------
Tests for `cookiecutter.prompt` module.
"""
import unittest
from unittest.mock import patch
from cookiecutter import prompt
# class TestPrompt(unittest.TestCase):
# def test_prompt_for_config(self):
# context = {"cookiecutter": {"full_name": "Your Name",
# "email": "you@example.com"}}
# TODO: figure out how to mock input with pexpect or something
# prompt.prompt_for_config(context)
class TestQueryAnswers(unittest.TestCase):
@patch('builtins.input', lambda: 'y')
def test_query_y(self):
answer = prompt.query_yes_no("Blah?")
self.assertTrue(answer)
@patch('builtins.input', lambda: 'ye')
def test_query_ye(self):
answer = prompt.query_yes_no("Blah?")
self.assertTrue(answer)
@patch('builtins.input', lambda: 'yes')
def test_query_yes(self):
answer = prompt.query_yes_no("Blah?")
self.assertTrue(answer)
@patch('builtins.input', lambda: 'n')
def test_query_n(self):
answer = prompt.query_yes_no("Blah?")
self.assertFalse(answer)
@patch('builtins.input', lambda: 'no')
def test_query_n(self):
answer = prompt.query_yes_no("Blah?")
self.assertFalse(answer)
# @patch('builtins.input', lambda: 'junk')
# def test_query_junk(self):
# answer = prompt.query_yes_no("Blah?")
# self.assertTrue(answer)
class TestQueryDefaults(unittest.TestCase):
@patch('builtins.input', lambda: 'y')
def test_query_y_none_default(self):
answer = prompt.query_yes_no("Blah?", default=None)
self.assertTrue(answer)
@patch('builtins.input', lambda: 'n')
def test_query_n_none_default(self):
answer = prompt.query_yes_no("Blah?", default=None)
self.assertFalse(answer)
@patch('builtins.input', lambda: '')
def test_query_no_default(self):
answer = prompt.query_yes_no("Blah?", default='no')
self.assertFalse(answer)
@patch('builtins.input', lambda: 'junk')
def test_query_bad_default(self):
self.assertRaises(ValueError, prompt.query_yes_no, "Blah?", default='yn')
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_prompt
--------------
Tests for `cookiecutter.prompt` module.
"""
import unittest
from cookiecutter import prompt
class TestPrompt(unittest.TestCase):
def test_prompt_for_config(self):
context = {"cookiecutter": {"full_name": "Your Name",
"email": "you@example.com"}}
# TODO: figure out how to mock input with pexpect or something
# prompt.prompt_for_config(context)
| bsd-3-clause | Python |
2df00cb8d570ce4994ab1a1acfa5b307ae824f5b | Remove empty folder | savex/spectra | Janitor/__init__.py | Janitor/__init__.py | import utils
utils = utils
logger, logger_api = utils.logger.setup_loggers(
"janitor"
)
| apache-2.0 | Python | |
b851f7ed12abaad2c583a2f5397e38006cf6f5a9 | Add first unit test | pshchelo/ironic-python-heartbeater | ironic_python_heartbeater/tests/unit/test_ironic_python_heartbeater.py | ironic_python_heartbeater/tests/unit/test_ironic_python_heartbeater.py | import unittest
import mock
from ironic_python_heartbeater import ironic_python_heartbeater as iph
class IronicPythonHeartbeaterTestCase(unittest.TestCase):
def test__parse_kernel_cmdline(self):
fake_kernel_opts = "spam=ham foo=bar"
expected_opts = {'spam': 'ham', 'foo': 'bar'}
with mock.patch.object(iph, 'open',
new=mock.mock_open(read_data=fake_kernel_opts)):
self.assertEqual(expected_opts, iph._parse_kernel_cmdline())
| import unittest
class IronicPythonHeartbeaterTestCase(unittest.TestCase):
def test_dummy(self):
self.assertTrue(True)
| apache-2.0 | Python |
4db72e1b4f81132079554dab1ce464363522df02 | Make TIME_ZONE the same as myuw production. | fanglinfang/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw,uw-it-aca/myuw,fanglinfang/myuw,uw-it-aca/myuw | travis-ci/settings.py | travis-ci/settings.py | """
Django settings for project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south', 'compressor', 'restclients', 'templatetag_handlebars',
'myuw_mobile', 'userservice', 'django_client_logger',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.RemoteUserBackend',
# 'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'travis-ci.urls'
WSGI_APPLICATION = 'travis-ci.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Los_Angeles'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
| """
Django settings for project project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'south', 'compressor', 'restclients', 'templatetag_handlebars',
'myuw_mobile', 'userservice', 'django_client_logger',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.RemoteUserMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.RemoteUserBackend',
# 'django.contrib.auth.backends.ModelBackend',
)
ROOT_URLCONF = 'travis-ci.urls'
WSGI_APPLICATION = 'travis-ci.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
| apache-2.0 | Python |
eed78d3a671aee0fcc0760f15087085f2918da6c | Add "localhost" in the allowed hosts for testing purposes | ExCiteS/geokey-epicollect,ExCiteS/geokey-epicollect | travis_ci/settings.py | travis_ci/settings.py | """GeoKey settings."""
from geokey.core.settings.dev import *
DEFAULT_FROM_EMAIL = 'no-reply@travis-ci.org'
ACCOUNT_EMAIL_VERIFICATION = 'optional'
SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx'
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'geokey',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
ALLOWED_HOSTS = ['localhost']
INSTALLED_APPS += (
'geokey_epicollect',
)
STATIC_URL = '/static/'
MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets'))
MEDIA_URL = '/assets/'
WSGI_APPLICATION = 'wsgi.application'
| """GeoKey settings."""
from geokey.core.settings.dev import *
DEFAULT_FROM_EMAIL = 'no-reply@travis-ci.org'
ACCOUNT_EMAIL_VERIFICATION = 'optional'
SECRET_KEY = 'xxxxxxxxxxxxxxxxxxxxxxxxx'
DATABASES = {
'default': {
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'NAME': 'geokey',
'USER': 'postgres',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS += (
'geokey_epicollect',
)
STATIC_URL = '/static/'
MEDIA_ROOT = normpath(join(dirname(dirname(abspath(__file__))), 'assets'))
MEDIA_URL = '/assets/'
WSGI_APPLICATION = 'wsgi.application'
| mit | Python |
a62032bdcfbbf129535ce92a3f0659cfecfbee37 | Update main.py | Python-IoT/Smart-IoT-Planting-System,Python-IoT/Smart-IoT-Planting-System | device/src/main.py | device/src/main.py | #This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
import pyb
from pyb import Pin
from pyb import Timer
import micropython
#Import light intensity needed module
import LightIntensity
import time
micropython.alloc_emergency_exception_buf(100)
print('pin init')
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
if __name__=='__main__':
while True:
print('Smart IoT Plant System-Device')
print(LightIntensity.readLight())
time.sleep(2)
#send on-line message to gateway to notifiy and obtain own data from gateway's database
###reference begin###
"""
import pyb
from pyb import Pin
from ds18x20 import DS18X20
from pyb import Timer
import micropython
micropython.alloc_emergency_exception_buf(100)
tempValue = 0
print('pin init')
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
def displayTemp(t):
print('Current Temperature:')
print(tempValue)
tim1 = Timer(1)
tim1.callback(displayTemp)
tim1.init(freq=1/5)
if __name__=='__main__':
print('Smart IoT Plant System')
DQ=DS18X20(Pin('Y10')) #DQ
while True:
tempValue = DQ.read_temp()
###reference end###
"""
"""
Waiting for LoRa message from gateway.
from pyb import UART
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
while True:
if(u4.any() > 0):
print('reading....')
receive = u4.read()
print(receive)
u4.write(receive)
"""
| #This is the file executing while STM32 MCU bootup, and in this file,
#it will call other functions to fullfill the project.
import pyb
from pyb import Pin
from pyb import Timer
import micropython
#Import light intensity needed module
import LightIntensity
import time
micropython.alloc_emergency_exception_buf(100)
print('pin init')
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
#LED shining regularly(using timer) to indicate the program is running correctly
tim1 = Timer(1, freq=1)
tim1.callback(lambda t: pyb.LED(1).toggle())
if __name__=='__main__':
while True:
print('Smart IoT Plant System-Device')
print LightIntensity.readLight()
time.sleep(2)
#send on-line message to gateway to notifiy and obtain own data from gateway's database
###reference begin###
"""
import pyb
from pyb import Pin
from ds18x20 import DS18X20
from pyb import Timer
import micropython
micropython.alloc_emergency_exception_buf(100)
tempValue = 0
print('pin init')
Pin('Y11',Pin.OUT_PP).low() #GND
Pin('Y9',Pin.OUT_PP).high() #VCC
def displayTemp(t):
print('Current Temperature:')
print(tempValue)
tim1 = Timer(1)
tim1.callback(displayTemp)
tim1.init(freq=1/5)
if __name__=='__main__':
print('Smart IoT Plant System')
DQ=DS18X20(Pin('Y10')) #DQ
while True:
tempValue = DQ.read_temp()
###reference end###
"""
"""
Waiting for LoRa message from gateway.
from pyb import UART
u4 = UART(4,9600)
u4.init(9600, bits=8, parity=None, stop=1)
while True:
if(u4.any() > 0):
print('reading....')
receive = u4.read()
print(receive)
u4.write(receive)
"""
| mit | Python |
0a3566d764815af2512104b248024d53ecbbc890 | Fix merge conflicts | galxy25/safeDriver | distance.py | distance.py | #http://www.pyimagesearch.com/2014/08/04/opencv-python-color-detection/
import numpy as np
import cv2
#import os
#Function to compute the ratio of black pixels to non black pixels
def countBlackPixels(grayImg):
height=grayImg.shape[0]
width=grayImg.shape[1]
size = width * height
return (size - cv2.countNonZero(grayImg)) / float(size)
# define rgb range for red colors
red = [[17, 15, 100], [50, 56, 200]]
# create NumPy arrays from the boundaries
lower = np.array(red[0], dtype = "uint8")
upper = np.array(red[1], dtype = "uint8")
#Set our capture object to dev0, assuming we only have one camera running
cap=cv2.VideoCapture(0)
#set width and height so we reduce our image size
cap.set(4,320)
cap.set(5,240)
#Variables to stop our loop...in reality we want
#this to run as long as the power is on
ret=True
BlackRatio=1.0
#Loop to process frames from running video device
while(ret):
preRatio=BlackRatio
#Capture the image from our device
ret, image =cap.read()
# find the colors within the specified boundaries and apply
# the mask
mask = cv2.inRange(image, lower, upper)
output = cv2.bitwise_and(image, image, mask = mask)
#Convert the image to gray scale so we can count number of non black pixels
gray=cv2.cvtColor(output, cv2.COLOR_BGR2GRAY)
#Count the number of non black pixels
BlackRatio=countBlackPixels(gray)
#First figure out
if BlackRatio > preRatio:
print (BlackRatio)
else:
print("No extra red in this frame!")
#Release the device at then end
cap.release()
| #http://www.pyimagesearch.com/2014/08/04/opencv-python-color-detection/
import numpy as np
import cv2
#import os
#Function to compute the ratio of black pixels to non black pixels
def countBlackPixels(grayImg):
height=grayImg.shape[0]
width=grayImg.shape[1]
size = width * height
return (size - cv2.countNonZero(grayImg)) / float(size)
# define rgb range for red colors
red = [[17, 15, 100], [50, 56, 200]]
# create NumPy arrays from the boundaries
lower = np.array(red[0], dtype = "uint8")
upper = np.array(red[1], dtype = "uint8")
#Set our capture object to dev0, assuming we only have one camera running
cap=cv2.VideoCapture(0)
#set width and height so we reduce our image size
cap.set(4,320)
cap.set(5,240)
#Variables to stop our loop...in reality we want
#this to run as long as the power is on
ret=True
BlackRatio=1.0
#Loop to process frames from running video device
while(ret):
preRatio=BlackRatio
#Capture the image from our device
ret, image =cap.read()
# find the colors within the specified boundaries and apply
# the mask
mask = cv2.inRange(image, lower, upper)
output = cv2.bitwise_and(image, image, mask = mask)
#Convert the image to gray scale so we can count number of non black pixels
gray=cv2.cvtColor(output, cv2.COLOR_BGR2GRAY)
#Count the number of non black pixels
BlackRatio=countBlackPixels(gray)
#First figure out
if BlackRatio > preRatio:
print (BlackRatio)
else:
print("No extra red in this frame!")
#Release the device at then end
cap.release() | mit | Python |
54980f619dd055b5797db131f47a3805f4fe4050 | Test images.data_uri directly | mwilliamson/python-mammoth | tests/images_tests.py | tests/images_tests.py | import io
from hamcrest import assert_that, contains, has_properties
from nose.tools import istest
import mammoth
@istest
def inline_is_available_as_alias_of_img_element():
assert mammoth.images.inline is mammoth.images.img_element
@istest
def data_uri_encodes_images_in_base64():
image_bytes = b"abc"
image = mammoth.documents.Image(
alt_text=None,
content_type="image/jpeg",
open=lambda: io.BytesIO(image_bytes),
)
result = mammoth.images.data_uri(image)
assert_that(result, contains(
has_properties(attributes={"src": "data:image/jpeg;base64,YWJj"}),
))
| from nose.tools import istest
import mammoth
@istest
def inline_is_available_as_alias_of_img_element():
assert mammoth.images.inline is mammoth.images.img_element
| bsd-2-clause | Python |
0018919168315e8a893e10c56a58f45ec797c750 | Update style formatting. | dgilland/flask-alchy | flask_alchy.py | flask_alchy.py | """Integrate alchy with Flask SQLAlchemy
"""
__version__ = '0.4.0'
__author__ = 'Derrick Gilland <dgilland@gmail.com>'
from flask_sqlalchemy import SQLAlchemy
from alchy import make_declarative_base, QueryModel, ManagerMixin
class Alchy(SQLAlchemy, ManagerMixin):
"""Flask extension that integrates alchy with Flask-SQLAlchemy."""
def __init__(self,
app=None,
use_native_unicode=True,
session_options=None,
Model=None,
metadata=None):
if session_options is None:
session_options = {}
session_options.setdefault('query_cls', QueryModel)
self.Model = Model
super(Alchy, self).__init__(
app, use_native_unicode, session_options, metadata=metadata)
self.Query = session_options['query_cls']
def make_declarative_base(self, metadata=None):
"""Override parent function with alchy's"""
return make_declarative_base(self.session,
Model=self.Model,
metadata=metadata)
def __getattr__(self, attr):
"""Delegate all other attributes to self.session"""
return getattr(self.session, attr)
| """Integrate alchy with Flask SQLAlchemy
"""
__version__ = '0.4.0'
__author__ = 'Derrick Gilland <dgilland@gmail.com>'
from flask_sqlalchemy import SQLAlchemy
from alchy import make_declarative_base, QueryModel, ManagerMixin
class Alchy(SQLAlchemy, ManagerMixin):
"""Flask extension that integrates alchy with Flask-SQLAlchemy."""
def __init__(self,
app=None,
use_native_unicode=True,
session_options=None,
Model=None,
metadata=None):
if session_options is None:
session_options = {}
session_options.setdefault('query_cls', QueryModel)
self.Model = Model
super(Alchy, self).__init__(
app, use_native_unicode, session_options, metadata=metadata)
self.Query = session_options['query_cls']
def make_declarative_base(self, metadata=None):
"""Override parent function with alchy's"""
return make_declarative_base(self.session, Model=self.Model,
metadata=metadata)
def __getattr__(self, attr):
"""Delegate all other attributes to self.session"""
return getattr(self.session, attr)
| mit | Python |
460d6184d432f2786c24d7526811cb4d245fa7e9 | add require python-dateutil | moodpulse/l2,moodpulse/l2,moodpulse/l2,moodpulse/l2,moodpulse/l2 | directory/admin.py | directory/admin.py | from django.contrib import admin
import directory.models as models
class ResAdmin(admin.ModelAdmin):
list_filter = ('podrazdeleniye', 'groups', 'hide')
list_display = ('title', 'podrazdeleniye',)
list_display_links = ('title',)
class RefAdmin(admin.ModelAdmin):
list_filter = ('fraction',)
list_display = ('title', 'fraction', 'ref_m', 'ref_f', 'about')
list_display_links = ('title',)
class RefFractions(admin.ModelAdmin):
list_display = ('title', 'research', 'podr',)
list_display_links = ('title', 'research', 'podr',)
list_filter = ('research__podrazdeleniye',)
def podr(self, obj):
return obj.research.podrazdeleniye
podr.short_description = "лаборатория"
podr.admin_order_fiels = 'podr'
admin.site.register(models.ResearchGroup)
admin.site.register(models.Researches, ResAdmin)
admin.site.register(models.ParaclinicInputGroups)
admin.site.register(models.ParaclinicInputField)
admin.site.register(models.References, RefAdmin)
admin.site.register(models.ResultVariants)
admin.site.register(models.MaterialVariants)
admin.site.register(models.Fractions, RefFractions)
admin.site.register(models.Absorption)
admin.site.register(models.ReleationsFT)
admin.site.register(models.AutoAdd)
| from django.contrib import admin
import directory.models as models
class ResAdmin(admin.ModelAdmin):
list_filter = ('podrazdeleniye', 'groups', 'hide')
list_display = ('title', 'podrazdeleniye',)
list_display_links = ('title',)
class RefAdmin(admin.ModelAdmin):
list_filter = ('fraction',)
list_display = ('title', 'fraction', 'ref_m', 'ref_f', 'about')
list_display_links = ('title',)
admin.site.register(models.ResearchGroup)
admin.site.register(models.Researches, ResAdmin)
admin.site.register(models.ParaclinicInputGroups)
admin.site.register(models.ParaclinicInputField)
admin.site.register(models.References, RefAdmin)
admin.site.register(models.ResultVariants)
admin.site.register(models.MaterialVariants)
admin.site.register(models.Fractions)
admin.site.register(models.Absorption)
admin.site.register(models.ReleationsFT)
admin.site.register(models.AutoAdd)
| mit | Python |
c9d55abceb4b2dd7189e29a8a9d6cecb9094ad91 | add admin prefix to forum data handling | hacklabr/django-discussion,hacklabr/django-discussion,hacklabr/django-discussion | discussion/urls.py | discussion/urls.py | # -*- coding: utf-8 -*-
from django.conf.urls import url, include
from django.views.generic import TemplateView
from django.views.generic.base import RedirectView
from discussion.views import (CategoryViewSet, ForumViewSet, ForumSearchViewSet, TopicTypeaheadViewSet, TopicViewSet, CommentViewSet, TagViewSet, TopicPageViewSet,
TopicNotificationViewSet, TopicLikeViewSet, CommentLikeViewSet, TopicFileViewSet, CommentFileViewSet, ContentFileViewSet, TopicReadViewSet, ForumView,
ForumCreateView, ForumListView, ForumUpdateView, ForumDeleteView)
from rest_framework import routers
router = routers.SimpleRouter(trailing_slash=False)
router.register(r'category', CategoryViewSet)
router.register(r'forum', ForumViewSet)
router.register(r'topic', TopicViewSet)
router.register(r'topic_page', TopicPageViewSet)
router.register(r'comment', CommentViewSet)
router.register(r'tag', TagViewSet)
router.register(r'topic-notification', TopicNotificationViewSet)
router.register(r'comment_like', CommentLikeViewSet)
router.register(r'comment-file', CommentFileViewSet)
router.register(r'topic_like', TopicLikeViewSet)
router.register(r'topic-file', TopicFileViewSet)
router.register(r'content-file', ContentFileViewSet)
router.register(r'topic-read', TopicReadViewSet)
router.register(r'search', ForumSearchViewSet)
router.register(r'typeahead', TopicTypeaheadViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', ForumView.as_view(), name='forum'),
url(r'^admin/forum-create$', ForumCreateView.as_view(), name='forum-create'),
url(r'^admin$', ForumListView.as_view(), name='forum-list'),
url(r'^admin/forum-update/(?P<pk>[-a-zA-Z0-9_]+)$', ForumUpdateView.as_view(), name='forum-update'),
url(r'^admin/forum-delete/(?P<pk>[-a-zA-Z0-9_]+)$', ForumDeleteView.as_view(), name='forum-delete'),
url(r'^topic/(?:#(?P<topic_id>[-a-zA-Z0-9_]+))?$', TemplateView.as_view(template_name="forum-topic.html")),
url(r'^topic/new/', TemplateView.as_view(template_name="forum-new-topic.html")),
url(r'^home/$', RedirectView.as_view(url='/discussion/', permanent=False), name='forum-home'),
]
| # -*- coding: utf-8 -*-
from django.conf.urls import url, include
from django.views.generic import TemplateView
from django.views.generic.base import RedirectView
from discussion.views import (CategoryViewSet, ForumViewSet, ForumSearchViewSet, TopicTypeaheadViewSet, TopicViewSet, CommentViewSet, TagViewSet, TopicPageViewSet,
TopicNotificationViewSet, TopicLikeViewSet, CommentLikeViewSet, TopicFileViewSet, CommentFileViewSet, ContentFileViewSet, TopicReadViewSet, ForumView,
ForumCreateView, ForumListView, ForumUpdateView, ForumDeleteView)
from rest_framework import routers
router = routers.SimpleRouter(trailing_slash=False)
router.register(r'category', CategoryViewSet)
router.register(r'forum', ForumViewSet)
router.register(r'topic', TopicViewSet)
router.register(r'topic_page', TopicPageViewSet)
router.register(r'comment', CommentViewSet)
router.register(r'tag', TagViewSet)
router.register(r'topic-notification', TopicNotificationViewSet)
router.register(r'comment_like', CommentLikeViewSet)
router.register(r'comment-file', CommentFileViewSet)
router.register(r'topic_like', TopicLikeViewSet)
router.register(r'topic-file', TopicFileViewSet)
router.register(r'content-file', ContentFileViewSet)
router.register(r'topic-read', TopicReadViewSet)
router.register(r'search', ForumSearchViewSet)
router.register(r'typeahead', TopicTypeaheadViewSet)
urlpatterns = [
url(r'^api/', include(router.urls)),
url(r'^$', ForumView.as_view(), name='forum'),
url(r'^forum-create$', ForumCreateView.as_view(), name='forum-create'),
url(r'^forum-list$', ForumListView.as_view(), name='forum-list'),
url(r'^forum-update/(?P<pk>[-a-zA-Z0-9_]+)$', ForumUpdateView.as_view(), name='forum-update'),
url(r'^forum-delete/(?P<pk>[-a-zA-Z0-9_]+)$', ForumDeleteView.as_view(), name='forum-delete'),
url(r'^topic/(?:#(?P<topic_id>[-a-zA-Z0-9_]+))?$', TemplateView.as_view(template_name="forum-topic.html")),
url(r'^topic/new/', TemplateView.as_view(template_name="forum-new-topic.html")),
url(r'^home/$', RedirectView.as_view(url='/discussion/', permanent=False), name='forum-home'),
]
| agpl-3.0 | Python |
51eab09eefe5d187b904b31f17a573960ae909d0 | Store oauth token for later usage | zen4ever/django-linked-accounts,zen4ever/django-linked-accounts | linked_accounts/views.py | linked_accounts/views.py | from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
import django.contrib.auth as auth
from linked_accounts.models import LinkedAccount
from linked_accounts.utils import get_profile
from linked_accounts.forms import RegisterForm
LINKED_ACCOUNTS_ID_SESSION = getattr(
settings,
'LINKED_ACCOUNTS_ID_SESSION',
'_linked_acccount_id'
)
class AuthCallback(object):
def __call__(self, request, access, token):
next = request.POST.get('next', settings.LOGIN_REDIRECT_URL)
service = access.service
if request.user.is_authenticated():
profile = get_profile(service=service, token=token)
if not profile.user:
profile.user = request.user
profile.save()
access.persist(request.user,
token,
identifier="auth")
else:
profile = auth.authenticate(service=service, token=token)
if profile.user:
auth.login(request, profile.user)
access.persist(profile.user,
token,
identifier="auth")
else:
request.session[LINKED_ACCOUNTS_ID_SESSION] = profile.id
return HttpResponseRedirect(
reverse('linked_accounts_register') + "?next=%s" % next
)
return HttpResponseRedirect(next)
def register(request, template_name="linked_accounts/registration.html"):
next = request.REQUEST.get('next', settings.LOGIN_REDIRECT_URL)
try:
profile_id = request.session[LINKED_ACCOUNTS_ID_SESSION]
profile = LinkedAccount.objects.get(id=profile_id)
except (KeyError, LinkedAccount.DoesNotExist):
return HttpResponseRedirect(next)
if request.method == "POST":
form = RegisterForm(request.POST)
if form.is_valid():
user = form.save(profile)
auth.login(request, user)
user.backend = "linked_accounts.backends.LinkedAccountsBackend"
return HttpResponseRedirect(next)
else:
form = RegisterForm()
return direct_to_template(
request,
template_name,
{'form': form, 'profile': profile}
)
| from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.views.generic.simple import direct_to_template
import django.contrib.auth as auth
from linked_accounts.models import LinkedAccount
from linked_accounts.utils import get_profile
from linked_accounts.forms import RegisterForm
LINKED_ACCOUNTS_ID_SESSION = getattr(
settings,
'LINKED_ACCOUNTS_ID_SESSION',
'_linked_acccount_id'
)
class AuthCallback(object):
def __call__(self, request, access, token):
next = request.POST.get('next', settings.LOGIN_REDIRECT_URL)
service = access.service
if request.user.is_authenticated():
profile = get_profile(service=service, token=token)
if not profile.user:
profile.user = request.user
profile.save()
else:
profile = auth.authenticate(service=service, token=token)
if profile.user:
auth.login(request, profile.user)
else:
request.session[LINKED_ACCOUNTS_ID_SESSION] = profile.id
return HttpResponseRedirect(
reverse('linked_accounts_register') + "?next=%s" % next
)
return HttpResponseRedirect(next)
def register(request, template_name="linked_accounts/registration.html"):
next = request.REQUEST.get('next', settings.LOGIN_REDIRECT_URL)
try:
profile_id = request.session[LINKED_ACCOUNTS_ID_SESSION]
profile = LinkedAccount.objects.get(id=profile_id)
except (KeyError, LinkedAccount.DoesNotExist):
return HttpResponseRedirect(next)
if request.method == "POST":
form = RegisterForm(request.POST)
if form.is_valid():
user = form.save(profile)
auth.login(request, user)
user.backend = "linked_accounts.backends.LinkedAccountsBackend"
return HttpResponseRedirect(next)
else:
form = RegisterForm()
return direct_to_template(
request,
template_name,
{'form': form, 'profile': profile}
)
| mit | Python |
d28bf14c672dd0a4aef1d76523f76670af561486 | Fix checkpoint access rule facts example (#50870) | thaim/ansible,thaim/ansible | lib/ansible/modules/network/checkpoint/checkpoint_access_rule_facts.py | lib/ansible/modules/network/checkpoint/checkpoint_access_rule_facts.py | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: checkpoint_access_rule_facts
short_description: Get access rules objects facts on Checkpoint over Web Services API
description:
- Get access rules objects facts on Checkpoint devices.
All operations are performed over Web Services API.
version_added: "2.8"
author: "Ansible by Red Hat (@rcarrillocruz)"
options:
name:
description:
- Name of the access rule. If not provided, UID is required.
type: str
uid:
description:
- UID of the access rule. If not provided, name is required.
type: str
layer:
description:
- Layer the access rule is attached to.
required: True
type: str
"""
EXAMPLES = """
- name: Get access rule facts
checkpoint_access_rule_facts:
layer: Network
name: "Drop attacker"
"""
RETURN = """
ansible_facts:
description: The checkpoint access rule object facts.
returned: always.
type: list
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.six.moves.urllib.error import HTTPError
import json
def get_access_rule(module, connection):
name = module.params['name']
uid = module.params['uid']
layer = module.params['layer']
if uid:
payload = {'uid': uid, 'layer': layer}
elif name:
payload = {'name': name, 'layer': layer}
code, response = connection.send_request('/web_api/show-access-rule', payload)
return code, response
def main():
argument_spec = dict(
name=dict(type='str'),
uid=dict(type='str'),
layer=dict(type='str', required=True),
)
module = AnsibleModule(argument_spec=argument_spec)
connection = Connection(module._socket_path)
code, response = get_access_rule(module, connection)
if code == 200:
module.exit_json(ansible_facts=dict(checkpoint_access_rules=response))
else:
module.fail_json(msg='Checkpoint device returned error {0} with message {1}'.format(code, response))
if __name__ == '__main__':
main()
| #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = """
---
module: checkpoint_access_rule_facts
short_description: Get access rules objects facts on Checkpoint over Web Services API
description:
- Get access rules objects facts on Checkpoint devices.
All operations are performed over Web Services API.
version_added: "2.8"
author: "Ansible by Red Hat (@rcarrillocruz)"
options:
name:
description:
- Name of the access rule. If not provided, UID is required.
type: str
uid:
description:
- UID of the access rule. If not provided, name is required.
type: str
layer:
description:
- Layer the access rule is attached to.
required: True
type: str
"""
EXAMPLES = """
- name: Create access rule
checkpoint_access_rule_facts:
layer: Network
name: "Drop attacker"
"""
RETURN = """
ansible_facts:
description: The checkpoint access rule object facts.
returned: always.
type: list
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.six.moves.urllib.error import HTTPError
import json
def get_access_rule(module, connection):
name = module.params['name']
uid = module.params['uid']
layer = module.params['layer']
if uid:
payload = {'uid': uid, 'layer': layer}
elif name:
payload = {'name': name, 'layer': layer}
code, response = connection.send_request('/web_api/show-access-rule', payload)
return code, response
def main():
argument_spec = dict(
name=dict(type='str'),
uid=dict(type='str'),
layer=dict(type='str', required=True),
)
module = AnsibleModule(argument_spec=argument_spec)
connection = Connection(module._socket_path)
code, response = get_access_rule(module, connection)
if code == 200:
module.exit_json(ansible_facts=dict(checkpoint_access_rules=response))
else:
module.fail_json(msg='Checkpoint device returned error {0} with message {1}'.format(code, response))
if __name__ == '__main__':
main()
| mit | Python |
9e0291475f955bd7d5a35e9bcfdb8a39f3514313 | add persons to admin | Colorless-Green-Ideas/tinylibrary,Colorless-Green-Ideas/tinylibrary,Colorless-Green-Ideas/tinylibrary | tinylibrary/admin.py | tinylibrary/admin.py | from django.contrib import admin
# Register your models here.
from tinylibrary.models import Book, Person
admin.site.register((Book, Person)) | from django.contrib import admin
# Register your models here.
from tinylibrary.models import Book
admin.site.register((Book)) | agpl-3.0 | Python |
a98a20fd089563b6551bc4f2a08d0ff3d2c60083 | test for existance | peterorum/functal,peterorum/functal,peterorum/functal,peterorum/functal,peterorum/functal | titles/get-tweets.py | titles/get-tweets.py | #!/usr/bin/python3
import os
import random
import time
# import re
import sys
# import json
import pprint
import twitter
# import pudb
# pu.db
import pymongo
client = pymongo.MongoClient(os.getenv('mongo_functal'))
# --- get_tweets
def get_tweets(topic):
print('topic : ' + topic)
db = client['topics']
tweets = db['tweets']
search_results = twit.search.tweets(q=topic, lang='en', result_type='popular')
# print('search_results')
# pp.pprint(search_results)
# 'user': tweet['user']['name']
texts = [{'_id': tweet['id_str'], 'text': tweet['text'], 'topic': topic}
for tweet in search_results['statuses'] if topic in tweet['text'] and tweets.find({'_id': tweet['id_str']}).count() == 0]
print('tweets: ' + str(len(texts)))
if len(texts) > 0:
pp.pprint(texts)
# store
# ignore dup key error
try:
result = tweets.insert(texts, {'ordered': False})
print(str(len(result)) + ' tweets inserted')
except pymongo.errors.PyMongoError:
print(type(e))
print(e)
#--- global
pp = pprint.PrettyPrinter(indent=4)
auth = twitter.oauth.OAuth(os.environ['token'], os.environ['token_secret'], os.environ[
'consumer_key'], os.environ['consumer_secret'])
twit = twitter.Twitter(auth=auth)
#--- main
def main():
topics = ["red", "orange", "yellow", "green", "blue", "purple",
"pink", "triangle", "square", "circle", "arrow", "asterisk", "wavy", "star",
"sunset", "gold", "golden"]
while True:
try:
get_tweets(topics[random.randint(0, len(topics) - 1)])
except Exception as e:
print(type(e))
print(e)
finally:
time.sleep(60)
#--- run
main()
| #!/usr/bin/python3
import os
import random
import time
# import re
import sys
# import json
import pprint
import twitter
# import pudb
# pu.db
import pymongo
client = pymongo.MongoClient(os.getenv('mongo_functal'))
# --- get_tweets
def get_tweets(topic):
print('topic : ' + topic)
try:
search_results = twit.search.tweets(q=topic, lang='en', result_type='popular')
# print('search_results')
# pp.pprint(search_results)
# 'user': tweet['user']['name']
texts = [{'_id': tweet['id_str'], 'text': tweet['text'], 'topic': topic}
for tweet in search_results['statuses'] if topic in tweet['text']]
print('tweets: ' + str(len(texts)))
if len(texts) > 0:
pp.pprint(texts)
# store
db = client['topics']
tweets = db['tweets']
# ignore dup key error
try:
result = tweets.insert(texts, {'ordered': False})
print(str(len(result)) + ' tweets inserted')
except pymongo.errors.DuplicateKeyError as e:
print('db error')
print(e)
except Exception as e:
print('api error')
print(type(e))
print(e)
#--- global
pp = pprint.PrettyPrinter(indent=4)
auth = twitter.oauth.OAuth(os.environ['token'], os.environ['token_secret'], os.environ[
'consumer_key'], os.environ['consumer_secret'])
twit = twitter.Twitter(auth=auth)
#--- main
def main():
topics = ["red", "orange", "yellow", "green", "blue", "purple",
"pink", "triangle", "square", "circle", "arrow", "asterisk", "wavy", "star"]
while True:
try:
get_tweets(topics[random.randint(0, len(topics) - 1)])
finally:
time.sleep(60)
#--- run
main()
| mit | Python |
d3b7dc39f0be8d62a9c996f64ef49fd062fbe507 | Fix pep8 in anpy-cli | regardscitoyens/anpy,regardscitoyens/anpy | bin/anpy-cli.py | bin/anpy-cli.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import json
import click
import requests
import attr
from pathlib import Path
from anpy.service import AmendementSearchService
from anpy.parsing.question_parser import parse_question
from anpy.parsing.amendement_parser import parse_amendement
from anpy.parsing.dossier_legislatif_parser import parse_dossier_legislatif
from anpy.parsing.json_utils import json_dumps
sys.path.append(str(Path(__file__).absolute().parents[1]))
@click.group()
def cli():
pass
@cli.command()
@click.argument('id-dossier')
@click.option('--id-examen')
@click.option('--limit', default=100)
def show_amendements_order(id_dossier, id_examen, limit):
results = AmendementSearchService().get_order(
idDossierLegislatif=id_dossier, idExamen=id_examen, rows=limit)
print('Nombre d\'amendements : {}'.format(len(results)))
print('Ordre des ammendements : {}'.format((','.join(results))))
@cli.command()
@click.option('--start-date')
@click.option('--end-date')
@click.option('--numero')
@click.option('--rows', default=100)
def show_amendements_summary(start_date, end_date, numero, rows):
iterator = AmendementSearchService().iterator(rows=rows,
dateDebut=start_date,
dateFin=end_date,
numAmend=numero)
for result in iterator:
print(json.dumps(attr.asdict(result), indent=4, sort_keys=True,
ensure_ascii=False))
@cli.command()
@click.argument('url')
def show_amendement(url):
print('Amendement : {}'.format(url))
print(json.dumps(parse_amendement(url, requests.get(url).content).__dict__,
indent=4, sort_keys=True, ensure_ascii=False))
@cli.command()
@click.argument('url')
def show_question(url):
question_html = requests.get(url + '/vue/xml').content
parsed_data = parse_question(url, question_html)
print(json.dumps(parsed_data, indent=4, sort_keys=True,
ensure_ascii=False))
@cli.command()
@click.argument('url')
def show_dossier(url):
html = requests.get(url).content
parsed_data = parse_dossier_legislatif(url, html)
print(json_dumps(parsed_data, indent=4, sort_keys=True,
ensure_ascii=False))
if __name__ == '__main__':
cli()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import json
import click
import requests
import attr
from pathlib import Path
from anpy.service import AmendementSearchService
from anpy.parsing.question_parser import parse_question
from anpy.parsing.amendement_parser import parse_amendement
from anpy.parsing.dossier_legislatif_parser import parse_dossier_legislatif
from anpy.parsing.json_utils import json_dumps
sys.path.append(str(Path(__file__).absolute().parents[1]))
@click.group()
def cli():
pass
@cli.command()
@click.argument('id-dossier')
@click.option('--id-examen')
@click.option('--limit', default=100)
def show_amendements_order(id_dossier, id_examen, limit):
results = AmendementSearchService().get_order(
idDossierLegislatif=id_dossier, idExamen=id_examen, rows=limit)
print('Nombre d\'amendements : {}'.format(len(results)))
print('Ordre des ammendements : {}'.format((','.join(results))))
@cli.command()
@click.option('--start-date')
@click.option('--end-date')
@click.option('--numero')
@click.option('--rows', default=100)
def show_amendements_summary(start_date, end_date, numero, rows):
iterator = AmendementSearchService().iterator(rows=rows, dateDebut=start_date,
dateFin=end_date,
numAmend=numero)
for result in iterator:
print(json.dumps(attr.asdict(result), indent=4, sort_keys=True,
ensure_ascii=False))
@cli.command()
@click.argument('url')
def show_amendement(url):
print('Amendement : {}'.format(url))
print(json.dumps(parse_amendement(url, requests.get(url).content).__dict__,
indent=4, sort_keys=True, ensure_ascii=False))
@cli.command()
@click.argument('url')
def show_question(url):
question_html = requests.get(url + '/vue/xml').content
parsed_data = parse_question(url, question_html)
print(json.dumps(parsed_data, indent=4, sort_keys=True,
ensure_ascii=False))
@cli.command()
@click.argument('url')
def show_dossier(url):
html = requests.get(url).content
parsed_data = parse_dossier_legislatif(url, html)
print(json_dumps(parsed_data, indent=4, sort_keys=True,
ensure_ascii=False))
if __name__ == '__main__':
cli()
| mit | Python |
9866fc27d6dd56f0493a22fcddd53b25cfe8da2d | fix db dict | amboycharlie/Child-Friendly-LCMS,amboycharlie/Child-Friendly-LCMS,django-leonardo/django-leonardo,amboycharlie/Child-Friendly-LCMS,django-leonardo/django-leonardo,django-leonardo/django-leonardo,amboycharlie/Child-Friendly-LCMS,django-leonardo/django-leonardo | tests/local_settings.py | tests/local_settings.py | from __future__ import absolute_import, unicode_literals
import sys
import os
SITE_ID = 1
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'memory:',
'TEST_NAME': 'test_db:',
}
}
try:
import mysql # noqa
except Exception:
pass
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django_leonardo',
'USER': 'travis',
}
}
MIGRATION_MODULES = {
'web': 'notmigrations',
'media': 'notmigrations',
}
try:
import psycopg2 # noqa
except Exception as e:
raise e
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'django_leonardo',
'USER': 'postgres',
}
}
# monkeypath postgres tests
MIGRATION_MODULES['dbtemplates'] = 'notmigrations'
MIGRATION_MODULES['sites'] = 'notmigrations'
MIGRATION_MODULES['contenttypes'] = 'notmigrations'
MIGRATION_MODULES['auth'] = 'notmigrations'
MIGRATION_MODULES['reversion'] = 'notmigrations'
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
BASEDIR = os.path.dirname(__file__)
MEDIA_ROOT = os.path.join(BASEDIR, 'media/')
STATIC_ROOT = os.path.join(BASEDIR, 'static/')
SECRET_KEY = 'supersikret'
USE_TZ = True
ROOT_URLCONF = 'testapp.urls'
LANGUAGES = (('en', 'English'), ('cs', 'Czech'))
LEONARDO_MODULE_AUTO_INCLUDE = False
APPS = [
'testapp',
'leaonrdo_theme_bootswatch',
]
| from __future__ import absolute_import, unicode_literals
import sys
import os
SITE_ID = 1
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'memory:',
'TEST_NAME': 'test_db:',
}
}
try:
import mysql # noqa
except Exception:
pass
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django_leonardo',
'USER': 'travis',
}
}
MIGRATION_MODULES = {
'web': 'notmigrations',
'media': 'notmigrations',
}
try:
import psycopg2 # noqa
except Exception as e:
raise e
else:
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'postgres',
}
}
# monkeypath postgres tests
MIGRATION_MODULES['dbtemplates'] = 'notmigrations'
MIGRATION_MODULES['sites'] = 'notmigrations'
MIGRATION_MODULES['contenttypes'] = 'notmigrations'
MIGRATION_MODULES['auth'] = 'notmigrations'
MIGRATION_MODULES['reversion'] = 'notmigrations'
MEDIA_URL = '/media/'
STATIC_URL = '/static/'
BASEDIR = os.path.dirname(__file__)
MEDIA_ROOT = os.path.join(BASEDIR, 'media/')
STATIC_ROOT = os.path.join(BASEDIR, 'static/')
SECRET_KEY = 'supersikret'
USE_TZ = True
ROOT_URLCONF = 'testapp.urls'
LANGUAGES = (('en', 'English'), ('cs', 'Czech'))
LEONARDO_MODULE_AUTO_INCLUDE = False
APPS = [
'testapp',
'leaonrdo_theme_bootswatch',
]
| apache-2.0 | Python |
b2841940dd1d5b891eed3817552447bb60a930af | Add a return of success flag | abhisheksugam/Climate_Police | Climate_Police/tests/pollution_map.py | Climate_Police/tests/pollution_map.py | import plotly.offline as py
py.init_notebook_mode()
from pre_process import pre_process
def pollution_map(df, source, year, option='Mean'):
# Pre-processes the pollution data so that it can be plotted by plotly.
df2 = pre_process(df, source, year, option)
#scl = [[0.0, 'rgb(242,240,247)'],[0.2, 'rgb(218,218,235)'],[0.4, 'rgb(188,189,220)'],\
#[0.6, 'rgb(158,154,200)'],[0.8, 'rgb(117,107,177)'],[1.0, 'rgb(84,39,143)']]
data = [ dict(
type='choropleth',
#colorscale = scl,
autocolorscale = True,
locations = df2.index,
z = df2[source+' '+option].astype(float),
locationmode = 'USA-states',
text = df2['text'],
marker = dict(
line = dict (
color = 'rgb(255,255,255)',
width = 2
) ),
colorbar = dict(
title = df.loc[0, source+' Units'])
) ]
layout = dict(
title = year+' US '+source+' level by state<br>(Hover for details)',
geo = dict(
scope='usa',
projection=dict( type='albers usa' ),
showlakes = True,
lakecolor = 'rgb(255, 255, 255)'),
)
fig = dict( data=data, layout=layout )
py.iplot( fig, filename='us-pollution-map' )
plotSuccessful = "Pollution map plotted."
return fig, plotSuccessful
| import plotly.offline as py
py.init_notebook_mode()
from pre_process import pre_process
def pollution_map(df, source, year, option='Mean'):
# Pre-processes the pollution data so that it can be plotted by plotly.
df2 = pre_process(df, source, year, option)
#scl = [[0.0, 'rgb(242,240,247)'],[0.2, 'rgb(218,218,235)'],[0.4, 'rgb(188,189,220)'],\
#[0.6, 'rgb(158,154,200)'],[0.8, 'rgb(117,107,177)'],[1.0, 'rgb(84,39,143)']]
data = [ dict(
type='choropleth',
#colorscale = scl,
autocolorscale = True,
locations = df2.index,
z = df2[source+' '+option].astype(float),
locationmode = 'USA-states',
text = df2['text'],
marker = dict(
line = dict (
color = 'rgb(255,255,255)',
width = 2
) ),
colorbar = dict(
title = df.loc[0, source+' Units'])
) ]
layout = dict(
title = year+' US '+source+' level by state<br>(Hover for details)',
geo = dict(
scope='usa',
projection=dict( type='albers usa' ),
showlakes = True,
lakecolor = 'rgb(255, 255, 255)'),
)
fig = dict( data=data, layout=layout )
py.iplot( fig, filename='us-pollution-map' )
return fig
| mit | Python |
a0702eb488ffd1eff12d0ffd0a83823cba020214 | Add quick todo so i don't forget | spotify/napalm,napalm-automation/napalm-ios,napalm-automation/napalm,spotify/napalm | utils/__init__.py | utils/__init__.py | # TODO move utils folder inside napalm | apache-2.0 | Python | |
8e478aa4407389cb01752cc59a3e52882ae20261 | order by checked_at | kkamkou/gitmostwanted.com,kkamkou/gitmostwanted.com,kkamkou/gitmostwanted.com,kkamkou/gitmostwanted.com | gitmostwanted/tasks/repo_stars.py | gitmostwanted/tasks/repo_stars.py | from datetime import datetime, timedelta
from gitmostwanted.app import app, db, celery
from gitmostwanted.lib.bigquery.job import Job
from gitmostwanted.models.repo import Repo, RepoStars
from gitmostwanted.services import bigquery
from time import sleep
def results_of(j: Job): # @todo #0:15m copy-paste code in multiple tasks
while not j.complete:
app.logger.debug('The job is not complete, waiting...')
sleep(10)
return j.results
@celery.task()
def stars_mature(num_days):
service = bigquery.instance(app)
jobs = []
repos = Repo.query\
.filter(Repo.mature.is_(True))\
.filter(Repo.status == 'new')\
.order_by(Repo.checked_at.asc())\
.limit(40) # we are at the free plan
for repo in repos:
query = query_stars_by_repo(
repo_id=repo.id, date_from=datetime.now() + timedelta(days=num_days * -1),
date_to=datetime.now()
)
job = Job(service, query, batch=True)
job.execute()
jobs.append((job, repo))
for job in jobs:
for row in results_of(job[0]):
db.session.add(RepoStars(repo_id=job[1].id, stars=row[0], year=row[1], day=row[2]))
job[1].status = 'unknown'
db.session.commit()
# @todo #192:1h move BQ queries to a separate place
def query_stars_by_repo(repo_id: int, date_from: datetime, date_to: datetime):
query = """
SELECT
COUNT(1) AS stars, YEAR(created_at) AS y, DAYOFYEAR(created_at) AS doy,
MONTH(created_at) as mon
FROM
TABLE_DATE_RANGE([githubarchive:day.], TIMESTAMP('{date_from}'), TIMESTAMP('{date_to}'))
WHERE
repo.id = {id} AND type IN ('WatchEvent', 'ForkEvent')
GROUP BY y, mon, doy
"""
return query.format(
id=repo_id, date_from=date_from.strftime('%Y-%m-%d'), date_to=date_to.strftime('%Y-%m-%d')
)
| from datetime import datetime, timedelta
from gitmostwanted.app import app, db, celery
from gitmostwanted.lib.bigquery.job import Job
from gitmostwanted.models.repo import Repo, RepoStars
from gitmostwanted.services import bigquery
from time import sleep
def results_of(j: Job): # @todo #0:15m copy-paste code in multiple tasks
while not j.complete:
app.logger.debug('The job is not complete, waiting...')
sleep(10)
return j.results
@celery.task()
def stars_mature(num_days):
service = bigquery.instance(app)
jobs = []
repos = Repo.query\
.filter(Repo.mature.is_(True))\
.filter(Repo.status == 'new')\
.limit(40) # we are at the free plan
for repo in repos:
query = query_stars_by_repo(
repo_id=repo.id, date_from=datetime.now() + timedelta(days=num_days * -1),
date_to=datetime.now()
)
job = Job(service, query, batch=True)
job.execute()
jobs.append((job, repo))
for job in jobs:
for row in results_of(job[0]):
db.session.add(RepoStars(repo_id=job[1].id, stars=row[0], year=row[1], day=row[2]))
job[1].status = 'unknown'
db.session.commit()
# @todo #192:1h move BQ queries to a separate place
def query_stars_by_repo(repo_id: int, date_from: datetime, date_to: datetime):
query = """
SELECT
COUNT(1) AS stars, YEAR(created_at) AS y, DAYOFYEAR(created_at) AS doy,
MONTH(created_at) as mon
FROM
TABLE_DATE_RANGE([githubarchive:day.], TIMESTAMP('{date_from}'), TIMESTAMP('{date_to}'))
WHERE
repo.id = {id} AND type IN ('WatchEvent', 'ForkEvent')
GROUP BY y, mon, doy
"""
return query.format(
id=repo_id, date_from=date_from.strftime('%Y-%m-%d'), date_to=date_to.strftime('%Y-%m-%d')
)
| mit | Python |
25f1a03dd494075924133a9e86b6796c5ad7a026 | Use unicode constant. | tv42/fs,nailor/filesystem | fs/_localfs.py | fs/_localfs.py | import os
class InsecurePathError(Exception):
"""
The path operation is unsafe to perform.
An insecure operation was requested, for example:
* a join is performed with an absolute path as input parameter
* '..' is passed as a parameter to child method
* Symlinks not passing security validations
"""
pass
class path(object):
def __init__(self, pathname):
self._pathname = pathname
def __str__(self):
return str(self._pathname)
def __unicode__(self):
return unicode(self._pathname)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._pathname)
def join(self, relpath):
if relpath.startswith(u'/'):
raise InsecurePathError('path name to join must be relative')
return self.__class__(os.path.join(self._pathname, relpath))
def open(self, *args, **kwargs):
return file(self._pathname, *args, **kwargs)
def __iter__(self):
for i in os.listdir(self._pathname):
yield path(i)
def child(self, *segments):
p = self
for segment in segments:
if u'/' in segment:
raise InsecurePathError(
'child name contains directory separator')
# this may be too naive
if segment == u'..':
raise InsecurePathError(
'child trying to climb out of directory')
p = p.join(segment)
return p
def parent(self):
head, tail = os.path.split(self._pathname)
return self.__class__(head)
def __eq__(self, other):
if not isinstance(other, path):
return NotImplemented
return self._pathname == other._pathname
def __ne__(self, other):
if not isinstance(other, path):
return NotImplemented
return self._pathname != other._pathname
| import os
class InsecurePathError(Exception):
"""
The path operation is unsafe to perform.
An insecure operation was requested, for example:
* a join is performed with an absolute path as input parameter
* '..' is passed as a parameter to child method
* Symlinks not passing security validations
"""
pass
class path(object):
def __init__(self, pathname):
self._pathname = pathname
def __str__(self):
return str(self._pathname)
def __unicode__(self):
return unicode(self._pathname)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._pathname)
def join(self, relpath):
if relpath.startswith(u'/'):
raise InsecurePathError('path name to join must be relative')
return self.__class__(os.path.join(self._pathname, relpath))
def open(self, *args, **kwargs):
return file(self._pathname, *args, **kwargs)
def __iter__(self):
for i in os.listdir(self._pathname):
yield path(i)
def child(self, *segments):
p = self
for segment in segments:
if u'/' in segment:
raise InsecurePathError(
'child name contains directory separator')
# this may be too naive
if segment == '..':
raise InsecurePathError(
'child trying to climb out of directory')
p = p.join(segment)
return p
def parent(self):
head, tail = os.path.split(self._pathname)
return self.__class__(head)
def __eq__(self, other):
if not isinstance(other, path):
return NotImplemented
return self._pathname == other._pathname
def __ne__(self, other):
if not isinstance(other, path):
return NotImplemented
return self._pathname != other._pathname
| mit | Python |
8ce8b5f6d016188a8485b7daac30e842ff49ba25 | Bump version to 3.2.3 | JukeboxPipeline/jukeboxmaya,JukeboxPipeline/jukeboxmaya | src/jukeboxmaya/__init__.py | src/jukeboxmaya/__init__.py | __author__ = 'David Zuber'
__email__ = 'zuber.david@gmx.de'
__version__ = '3.2.3'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
| __author__ = 'David Zuber'
__email__ = 'zuber.david@gmx.de'
__version__ = '3.2.2'
STANDALONE_INITIALIZED = None
"""After calling :func:`init` this is True, if maya standalone
has been initialized or False, if you are running
from within maya.
It is None, if initialized has not been called yet.
"""
| bsd-3-clause | Python |
2cf3fe6e0e700f5e96b68a5acf6574a7f596eb79 | Disable caching discovery for the Sheets API | ppavlidis/rnaseq-pipeline,ppavlidis/rnaseq-pipeline,ppavlidis/rnaseq-pipeline | rnaseq_pipeline/gsheet.py | rnaseq_pipeline/gsheet.py | import argparse
import logging
import os
import os.path
import pickle
import sys
from os.path import dirname, expanduser, join
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import luigi
import pandas as pd
import xdg.BaseDirectory
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
logger = logging.getLogger('luigi-interface')
def _authenticate():
# authentication
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
token_path = join(xdg.BaseDirectory.save_data_path('pavlab-rnaseq-pipeline'), 'token.pickle')
if os.path.exists(token_path):
with open(token_path, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_console()
# Save the credentials for the next run
with open(token_path, 'wb') as token:
pickle.dump(creds, token)
logger.info(f'Created Google Sheets API token under {token_path}.')
return creds
def retrieve_spreadsheet(spreadsheet_id, sheet_name):
service = build('sheets', 'v4', credentials=_authenticate(), cache_discovery=None)
# Retrieve the documents contents from the Docs service.
rnaseq_pipeline_queue = service.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=sheet_name).execute()
# this will fail if people add new columns
df = pd.DataFrame(rnaseq_pipeline_queue['values'][1:], columns=rnaseq_pipeline_queue['values'][0]+list(range(5)))
# type adjustment
df['priority'] = df.priority.fillna(0).replace('', '0').astype('int')
return df
| import argparse
import logging
import os
import os.path
import pickle
import sys
from os.path import dirname, expanduser, join
from googleapiclient.discovery import build
from google_auth_oauthlib.flow import InstalledAppFlow
from google.auth.transport.requests import Request
import luigi
import pandas as pd
import xdg.BaseDirectory
SCOPES = ['https://www.googleapis.com/auth/spreadsheets.readonly']
logger = logging.getLogger('luigi-interface')
def _authenticate():
# authentication
creds = None
# The file token.pickle stores the user's access and refresh tokens, and is
# created automatically when the authorization flow completes for the first
# time.
token_path = join(xdg.BaseDirectory.save_data_path('pavlab-rnaseq-pipeline'), 'token.pickle')
if os.path.exists(token_path):
with open(token_path, 'rb') as token:
creds = pickle.load(token)
# If there are no (valid) credentials available, let the user log in.
if not creds or not creds.valid:
if creds and creds.expired and creds.refresh_token:
creds.refresh(Request())
else:
flow = InstalledAppFlow.from_client_secrets_file(
'credentials.json', SCOPES)
creds = flow.run_console()
# Save the credentials for the next run
with open(token_path, 'wb') as token:
pickle.dump(creds, token)
logger.info(f'Created Google Sheets API token under {token_path}.')
return creds
def retrieve_spreadsheet(spreadsheet_id, sheet_name):
service = build('sheets', 'v4', credentials=_authenticate())
# Retrieve the documents contents from the Docs service.
rnaseq_pipeline_queue = service.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=sheet_name).execute()
# this will fail if people add new columns
df = pd.DataFrame(rnaseq_pipeline_queue['values'][1:], columns=rnaseq_pipeline_queue['values'][0]+list(range(5)))
# type adjustment
df['priority'] = df.priority.fillna(0).replace('', '0').astype('int')
return df
| unlicense | Python |
6e2124c2d7016618a52c5b858b6149f3f5770fa6 | Add support for SQLite | kennethreitz/dj-database-url,julianwachholz/dj-config-url,f0r4y312/django-connection-url,avorio/dj-database-url | dj_database_url.py | dj_database_url.py | # -*- coding: utf-8 -*-
import os
import urlparse
# Register database schemes in URLs.
urlparse.uses_netloc.append('postgres')
urlparse.uses_netloc.append('mysql')
urlparse.uses_netloc.append('sqlite')
DEFAULT_ENV = 'DATABASE_URL'
def config(env=DEFAULT_ENV):
"""Returns configured DATABASE dictionary from DATABASE_URL."""
config = {}
if env in os.environ:
config = parse(os.environ[env])
return config
def parse(url):
"""Parses a database URL."""
config = {}
url = urlparse.urlparse(url)
# Update with environment configuration.
config.update({
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port,
})
if url.scheme == 'postgres':
config['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
if url.scheme == 'mysql':
config['ENGINE'] = 'django.db.backends.mysql'
if url.scheme == 'sqlite':
config['ENGINE'] = 'django.db.backends.sqlite3'
return config
| # -*- coding: utf-8 -*-
import os
import urlparse
# Register database schemes in URLs.
urlparse.uses_netloc.append('postgres')
urlparse.uses_netloc.append('mysql')
DEFAULT_ENV = 'DATABASE_URL'
def config(env=DEFAULT_ENV):
"""Returns configured DATABASE dictionary from DATABASE_URL."""
config = {}
if env in os.environ:
config = parse(os.environ[env])
return config
def parse(url):
"""Parses a database URL."""
config = {}
url = urlparse.urlparse(url)
# Update with environment configuration.
config.update({
'NAME': url.path[1:],
'USER': url.username,
'PASSWORD': url.password,
'HOST': url.hostname,
'PORT': url.port,
})
if url.scheme == 'postgres':
config['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
if url.scheme == 'mysql':
config['ENGINE'] = 'django.db.backends.mysql'
return config | bsd-2-clause | Python |
8ea423173cba143f68a94d65ad5f3b5ce650d434 | Check the container, not the leaf :p | tonioo/modoboa,bearstech/modoboa,RavenB/modoboa,RavenB/modoboa,carragom/modoboa,modoboa/modoboa,tonioo/modoboa,mehulsbhatt/modoboa,carragom/modoboa,modoboa/modoboa,mehulsbhatt/modoboa,modoboa/modoboa,modoboa/modoboa,mehulsbhatt/modoboa,bearstech/modoboa,RavenB/modoboa,bearstech/modoboa,tonioo/modoboa,carragom/modoboa,bearstech/modoboa | modoboa/admin/management/commands/handle_mailbox_operations.py | modoboa/admin/management/commands/handle_mailbox_operations.py | import os
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from modoboa.lib import parameters
from modoboa.lib.sysutils import exec_cmd
from modoboa.admin import AdminConsole
from modoboa.admin.exceptions import AdminError
from modoboa.admin.models import MailboxOperation
class OperationError(Exception):
pass
class Command(BaseCommand):
args = ''
help = 'Handles rename and delete operations on mailboxes'
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.logger = logging.getLogger('modoboa.admin')
def rename_mailbox(self, operation):
if not os.path.exists(operation.argument):
return
new_mail_home = operation.mailbox.mail_home
dirname = os.path.dirname(new_mail_home)
if not os.path.exists(dirname):
try:
os.makedirs(dirname)
except os.error as e:
raise OperationError(str(e))
code, output = exec_cmd(
"mv %s %s" % (ope.argument, new_mail_home)
)
if code:
raise OperationError(output)
def delete_mailbox(self, operation):
if not os.path.exists(operation.argument):
return
code, output = exec_cmd(
"rm -r %s" % operation.argument
)
if code:
raise OperationError(output)
def handle(self, *args, **options):
AdminConsole().load()
if parameters.get_admin("HANDLE_MAILBOXES") == 'no':
return
for ope in MailboxOperation.objects.all():
try:
f = getattr(self, '%s_mailbox' % ope.type)
except AttributeError:
continue
try:
f(ope)
except (OperationError, AdminError) as e:
self.logger.critical('%s failed (reason: %s)', ope, e)
else:
self.logger.info('%s succeed', ope)
ope.delete()
| import os
import logging
from optparse import make_option
from django.core.management.base import BaseCommand
from modoboa.lib import parameters
from modoboa.lib.sysutils import exec_cmd
from modoboa.admin import AdminConsole
from modoboa.admin.exceptions import AdminError
from modoboa.admin.models import MailboxOperation
class OperationError(Exception):
pass
class Command(BaseCommand):
args = ''
help = 'Handles rename and delete operations on mailboxes'
def __init__(self, *args, **kwargs):
super(Command, self).__init__(*args, **kwargs)
self.logger = logging.getLogger('modoboa.admin')
def rename_mailbox(self, operation):
if not os.path.exists(operation.argument):
return
new_mail_home = operation.mailbox.mail_home
if not os.path.exists(new_mail_home):
try:
os.makedirs(os.path.dirname(new_mail_home))
except os.error as e:
raise OperationError(str(e))
code, output = exec_cmd(
"mv %s %s" % (ope.argument, new_mail_home)
)
if code:
raise OperationError(output)
def delete_mailbox(self, operation):
if not os.path.exists(operation.argument):
return
code, output = exec_cmd(
"rm -r %s" % operation.argument
)
if code:
raise OperationError(output)
def handle(self, *args, **options):
AdminConsole().load()
if parameters.get_admin("HANDLE_MAILBOXES") == 'no':
return
for ope in MailboxOperation.objects.all():
try:
f = getattr(self, '%s_mailbox' % ope.type)
except AttributeError:
continue
try:
f(ope)
except (OperationError, AdminError) as e:
self.logger.critical('%s failed (reason: %s)', ope, e)
else:
self.logger.info('%s succeed', ope)
ope.delete()
| isc | Python |
532df9f29506fe949488b449721ee22a322dfb8a | fix flake8 violation | rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo,rackerlabs/django-DefectDojo | dojo/tools/__init__.py | dojo/tools/__init__.py | __author__ = 'jay7958'
SCAN_GENERIC_FINDING = 'Generic Findings Import'
SCAN_SONARQUBE_API = 'SonarQube API Import'
SCAN_QUALYS_REPORT = 'Qualys Scan'
def requires_file(scan_type):
return (
scan_type and scan_type != SCAN_SONARQUBE_API
)
def handles_active_verified_statuses(scan_type):
return scan_type in [
SCAN_GENERIC_FINDING, SCAN_SONARQUBE_API, SCAN_QUALYS_REPORT
]
| __author__ = 'jay7958'
SCAN_GENERIC_FINDING = 'Generic Findings Import'
SCAN_SONARQUBE_API = 'SonarQube API Import'
SCAN_QUALYS_REPORT = 'Qualys Scan'
def requires_file(scan_type):
return (
scan_type and scan_type != SCAN_SONARQUBE_API
)
def handles_active_verified_statuses(scan_type):
return scan_type in [
SCAN_GENERIC_FINDING, SCAN_SONARQUBE_API, SCAN_QUALYS_REPORT
]
| bsd-3-clause | Python |
4c6fb23dd40216604f914d4f869b40d23b13bf73 | Bump version to no longer claim to be 1.4.5 final. | riklaunim/django-custom-multisite,riklaunim/django-custom-multisite,riklaunim/django-custom-multisite | django/__init__.py | django/__init__.py | VERSION = (1, 4, 6, 'alpha', 0)
def get_version(version=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
# At the toplevel, this would cause an import loop.
from django.utils.version import get_svn_revision
svn_revision = get_svn_revision()[4:]
if svn_revision != 'unknown':
sub = '.dev%s' % svn_revision
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return main + sub
| VERSION = (1, 4, 5, 'final', 0)
def get_version(version=None):
"""Derives a PEP386-compliant version number from VERSION."""
if version is None:
version = VERSION
assert len(version) == 5
assert version[3] in ('alpha', 'beta', 'rc', 'final')
# Now build the two parts of the version number:
# main = X.Y[.Z]
# sub = .devN - for pre-alpha releases
# | {a|b|c}N - for alpha, beta and rc releases
parts = 2 if version[2] == 0 else 3
main = '.'.join(str(x) for x in version[:parts])
sub = ''
if version[3] == 'alpha' and version[4] == 0:
# At the toplevel, this would cause an import loop.
from django.utils.version import get_svn_revision
svn_revision = get_svn_revision()[4:]
if svn_revision != 'unknown':
sub = '.dev%s' % svn_revision
elif version[3] != 'final':
mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'}
sub = mapping[version[3]] + str(version[4])
return main + sub
| bsd-3-clause | Python |
bb0c8d2830b749b1f0255afab9c7a04b8a6e2256 | fix asyncio test | snower/TorMySQL,snower/TorMySQL | tests/test_asyncio.py | tests/test_asyncio.py | # -*- coding: utf-8 -*-
# 17/12/11
# create by: snower
import os
try:
import asyncio
from tormysql.platform import use_asyncio
use_asyncio()
except:
pass
from tormysql.cursor import SSCursor
from tormysql.helpers import ConnectionPool
from tornado.testing import AsyncTestCase
from tornado.testing import gen_test
from tormysql.util import py3
class TestAsyncioCase(AsyncTestCase):
PARAMS = dict(
host=os.getenv("MYSQL_HOST", "127.0.0.1"),
port=int(os.getenv("MYSQL_PORT", "3306")),
user=os.getenv("MYSQL_USER", "root"),
passwd=os.getenv("MYSQL_PASSWD", ""),
db=os.getenv("MYSQL_DB", "test"),
charset=os.getenv("MYSQL_CHARSET", "utf8"),
no_delay=True,
sql_mode="REAL_AS_FLOAT",
init_command="SET max_join_size=DEFAULT"
)
def setUp(self):
super(TestAsyncioCase, self).setUp()
self.pool = ConnectionPool(
max_connections=int(os.getenv("MYSQL_POOL", 5)),
idle_seconds=7200,
**self.PARAMS
)
def tearDown(self):
super(TestAsyncioCase, self).tearDown()
self.pool.close()
def get_new_ioloop(self):
try:
import asyncio
from tornado.platform.asyncio import AsyncIOMainLoop
AsyncIOMainLoop().install()
from tornado.ioloop import IOLoop
return IOLoop.current()
except:
return super(TestAsyncioCase, self).get_new_ioloop()
if py3:
exec("""
@gen_test
async def test_execute(self):
cursor = await self.pool.execute("select * from test limit 1")
datas = cursor.fetchall()
assert datas
async with await self.pool.Connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute("SELECT * FROM test limit 10")
datas = cursor.fetchall()
assert datas
async with await self.pool.Connection() as conn:
async with conn.cursor(SSCursor) as cursor:
await cursor.execute("SELECT * FROM test limit 10000")
async for data in cursor:
assert data
""") | # -*- coding: utf-8 -*-
# 17/12/11
# create by: snower
import os
try:
import asyncio
from tormysql.platform import use_asyncio
use_asyncio()
except:
pass
from tormysql.helpers import ConnectionPool
from tornado.testing import AsyncTestCase
from tornado.testing import gen_test
from tormysql.util import py3
class TestAsyncioCase(AsyncTestCase):
PARAMS = dict(
host=os.getenv("MYSQL_HOST", "127.0.0.1"),
port=int(os.getenv("MYSQL_PORT", "3306")),
user=os.getenv("MYSQL_USER", "root"),
passwd=os.getenv("MYSQL_PASSWD", ""),
db=os.getenv("MYSQL_DB", "test"),
charset=os.getenv("MYSQL_CHARSET", "utf8"),
no_delay=True,
sql_mode="REAL_AS_FLOAT",
init_command="SET max_join_size=DEFAULT"
)
def setUp(self):
super(TestAsyncioCase, self).setUp()
self.pool = ConnectionPool(
max_connections=int(os.getenv("MYSQL_POOL", 5)),
idle_seconds=7200,
**self.PARAMS
)
def tearDown(self):
super(TestAsyncioCase, self).tearDown()
self.pool.close()
def get_new_ioloop(self):
try:
import asyncio
from tornado.platform.asyncio import AsyncIOMainLoop
AsyncIOMainLoop().install()
from tornado.ioloop import IOLoop
return IOLoop.current()
except:
return super(TestAsyncioCase, self).get_new_ioloop()
if py3:
exec("""
@gen_test
async def test_execute(self):
cursor = await pool.execute("select * from test limit 1")
datas = cursor.fetchall())
assert datas
async with await pool.Connection() as conn:
async with conn.cursor() as cursor:
await cursor.execute("SELECT * FROM test limit 10")
datas = cursor.fetchall()
assert datas
async with await pool.Connection() as conn:
async with conn.cursor(tormysql.SSCursor) as cursor:
await cursor.execute("SELECT * FROM test limit 10000")
async for data in cursor:
assert data
""") | mit | Python |
d81aa08030bb3430a9d69083cb40429f3f89a80d | Fix for unicode names | MilesCranmer/research_match,MilesCranmer/research_match | download.py | download.py | #!/usr/bin/python
import sys, os
import ads
reload(sys)
sys.setdefaultencoding('utf8')
names_file = open(sys.argv[1])
#Default abstract storage
abstract_directory = "abstracts"
if len(sys.argv) > 2:
abstract_directory = sys.argv[2]
if not os.path.exists(abstract_directory):
os.makedirs(abstract_directory)
number_abstracts = 4
if len(sys.argv) > 3:
number_abstracts = int(sys.argv[3])
author_num = 0
for line in names_file:
#Only names
if line[0]==',': continue
if len(line) < 4: continue
print "Author", author_num
cut_point = 0
#Find last space
for x in reversed(range(len(line))):
if line[x] == ' ':
cut_point = x
break
first_name = line[:x]
last_name = line[x+1:]
last_name = ''.join([char for char in last_name if char.isalpha()])
papers = ads.SearchQuery(
author=last_name+", "+first_name,
sort='date',
fl=['abstract'])
abstract_file = open(abstract_directory+"/"+\
first_name+" "+last_name+".txt",'w')
j = 0
for paper in papers:
abstract_file.write("Abstract "+str(j)+"\n")
try:
abstract_file.write(paper.abstract.encode('utf-8'))
except AttributeError:
pass
abstract_file.write("\n")
j += 1
if j > number_abstracts: break
author_num+=1
| #!/usr/bin/python
import sys, os
import ads as ads
names_file = open(sys.argv[1])
#Default abstract storage
abstract_directory = "abstracts"
if len(sys.argv) > 2:
abstract_directory = sys.argv[2]
if not os.path.exists(abstract_directory):
os.makedirs(abstract_directory)
number_abstracts = 4
if len(sys.argv) > 3:
number_abstracts = int(sys.argv[3])
author_num = 0
for line in names_file:
#Only names
if line[0]==',': continue
if len(line) < 4: continue
print "Author", author_num
cut_point = 0
#Find last space
for x in reversed(range(len(line))):
if line[x] == ' ':
cut_point = x
break
first_name = line[:x]
last_name = line[x+1:]
last_name = ''.join([char for char in last_name if char.isalpha()])
papers = ads.SearchQuery(
author=last_name+", "+first_name,
sort='date',
fl=['abstract'])
abstract_file = open(abstract_directory+"/"+\
last_name+"_"+first_name+".txt",'w')
j = 0
for paper in papers:
abstract_file.write("Abstract "+str(j)+"\n")
try:
abstract_file.write(paper.abstract.encode('utf-8'))
except AttributeError:
pass
abstract_file.write("\n")
j += 1
if j > number_abstracts: break
author_num+=1
| unlicense | Python |
9edef653ad07614a3cc553c837797eda8373fe18 | Add schema test | gotling/mopidy-auto,gotling/mopidy-auto,gotling/mopidy-auto | tests/test_extension.py | tests/test_extension.py | from __future__ import unicode_literals
import unittest
from mopidy import core
from mopidy_auto import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[auto]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'base_path' in schema
assert 'max_tracks' in schema
for index in range(3):
assert "s{}_hour".format(index) in schema
assert "s{}_minute".format(index) in schema
assert "s{}_folder".format(index) in schema
assert "s{}_max_volume".format(index) in schema
# TODO Write more test
#def test_get_album(self):
# self.core = core.Core.start(
# config, backends=[get_backend(config)]).proxy()
| from __future__ import unicode_literals
from mopidy_auto import Extension, frontend as frontend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[auto]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
# TODO Test the content of your config schema
#assert 'username' in schema
#assert 'password' in schema
# TODO Write more tests
| mit | Python |
c3e04e47c54eed8058751dbc86f7b364fc57a05b | fix tests for Django 4.0 | ivelum/djangoql,ivelum/djangoql,ivelum/djangoql | test_project/test_project/urls.py | test_project/test_project/urls.py | """test_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include
from django.contrib import admin
try:
from django.urls import re_path # Django >= 4.0
except ImportError:
try:
from django.conf.urls import re_path # Django < 4.0
except ImportError: # Django < 2.0
from django.conf.urls import url as re_path
from core.views import completion_demo
urlpatterns = [
re_path(r'^admin/', admin.site.urls),
re_path(r'^$', completion_demo),
]
if settings.DEBUG and settings.DJDT:
import debug_toolbar
urlpatterns = [
re_path(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| """test_project URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.10/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf import settings
from django.conf.urls import include, url
from django.contrib import admin
from core.views import completion_demo
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^$', completion_demo),
]
if settings.DEBUG and settings.DJDT:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
| mit | Python |
1e6a45f0b5c73ddedf0dfc1146ea7d48c6e10ea5 | print message to screen in debug mode | icve/liv-Ard,icve/liv-Ard,icve/liv-Ard,icve/liv-Ard,icve/liv-Ard | hostscripts/current_hostscript.py | hostscripts/current_hostscript.py | #!/mnt/usb/wk/jpt/py/bin/python
import time
from serial import Serial
from animations import Led_clock_pointer, Led_clock_flasher
from lib import lcdControl
from lib import SevSeg
from lib import Motion_sensor
from lib.get_data import get_temp, get_netstat
from animations import Seven_segment_clock, Rainfall
from sys import argv
from animations.stat_show import quick_slide
motionLogFile = "/mnt/usb/logs/motionLog.log"
device = "/dev/ttyUSB0"
baudrate = 9600
updateintv = .1
debug = "debug" in argv
print("update intv: {}".format(updateintv))
if __name__ == "__main__":
# Set up usb
usb = Serial(device, baudrate, timeout=2)
if debug:
from test.usb_relay import Usb_relay
usb = Usb_relay(usb)
time.sleep(3)
# init virtual devices
lcd = lcdControl.Lcd(usb)
sevdp = SevSeg(usb)
mtxdp = SevSeg(usb, dev_id=1)
moss = Motion_sensor(usb, motionLogFile)
# led_clock_pointer_sec = Led_clock_pointer(mtxdp, ring=1)
# led_clock_pointer_min = Led_clock_pointer(mtxdp, pointertype="min", ring=0)
# led_clock_flasher = Led_clock_flasher(mtxdp)
seven_segment_clock = Seven_segment_clock(sevdp)
rainfall = Rainfall(mtxdp, max_height=6, max_speed=2, min_speed=1)
lcd_show_tem_net = quick_slide(get_temp, "t", get_netstat, "net", lcd)
# turn on second display, > note: not sure why 0
mtxdp.setstate(0)
sevdp.setintensity(8)
mtxdp.setintensity(0)
sevdp.clear()
mtxdp.clear()
lcd.clear()
while True:
# seven_segment_clock update
seven_segment_clock.update()
# 8x8 LED matrix
# led_clock_pointer_sec.update()
# led_clock_pointer_min.update()
# led_clock_flasher.update()
rainfall.update()
# motion Sensor ck
moss.update()
# lcd
lcd_show_tem_net.show()
# on off cycle
hour = time.time()/(60*60) % 24
if(13 < hour < 21 and not debug):
lcd.backlight(0)
mtxdp.setstate(1)
sevdp.setstate(1)
else:
mtxdp.setstate(0)
sevdp.setstate(0)
lcd.backlight(1)
# clock
time.sleep(updateintv)
| #!/mnt/usb/wk/jpt/py/bin/python
import time
from serial import Serial
from animations import Led_clock_pointer, Led_clock_flasher
from lib import lcdControl
from lib import SevSeg
from lib import Motion_sensor
from lib.get_data import get_temp, get_netstat
from animations import Seven_segment_clock, Rainfall
from sys import argv
from animations.stat_show import quick_slide
motionLogFile = "/mnt/usb/logs/motionLog.log"
device = "/dev/ttyUSB0"
baudrate = 9600
updateintv = .1
debug = "debug" in argv
print("update intv: {}".format(updateintv))
if __name__ == "__main__":
# Set up usb
usb = Serial(device, baudrate, timeout=2)
time.sleep(3)
# init virtual devices
lcd = lcdControl.Lcd(usb)
sevdp = SevSeg(usb)
mtxdp = SevSeg(usb, dev_id=1)
moss = Motion_sensor(usb, motionLogFile)
# led_clock_pointer_sec = Led_clock_pointer(mtxdp, ring=1)
# led_clock_pointer_min = Led_clock_pointer(mtxdp, pointertype="min", ring=0)
# led_clock_flasher = Led_clock_flasher(mtxdp)
seven_segment_clock = Seven_segment_clock(sevdp)
rainfall = Rainfall(mtxdp, max_height=6, max_speed=5, min_speed=3)
lcd_show_tem_net = quick_slide(get_temp, "t", get_netstat, "net", lcd)
# turn on second display, > note: not sure why 0
mtxdp.setstate(0)
sevdp.setintensity(8)
mtxdp.setintensity(0)
sevdp.clear()
mtxdp.clear()
lcd.clear()
while True:
# seven_segment_clock update
seven_segment_clock.update()
# 8x8 LED matrix
# led_clock_pointer_sec.update()
# led_clock_pointer_min.update()
# led_clock_flasher.update()
rainfall.update()
# motion Sensor ck
moss.update()
# lcd
lcd_show_tem_net.show()
# on off cycle
hour = time.time()/(60*60) % 24
if(13 < hour < 21 and not debug):
lcd.backlight(0)
mtxdp.setstate(1)
sevdp.setstate(1)
else:
mtxdp.setstate(0)
sevdp.setstate(0)
lcd.backlight(1)
# clock
time.sleep(updateintv)
| mit | Python |
179d39cb47ba60714ac9498097cb93721978faf7 | add minor enhancement to the queue | total-impact/total-impact-core,total-impact/total-impact-core,Impactstory/total-impact-core,total-impact/total-impact-core,Impactstory/total-impact-core,Impactstory/total-impact-core,Impactstory/total-impact-core,total-impact/total-impact-core | totalimpact/queue.py | totalimpact/queue.py | from totalimpact.models import Item
import totalimpact.dao as dao
import datetime
# some data useful for testing
# d = {"DOI" : ["10.1371/journal.pcbi.1000361", "10.1016/j.meegid.2011.02.004"], "URL" : ["http://cottagelabs.com"]}
class Queue(dao.Dao):
__type__ = None
@property
def queue(self):
# change this for live
items = self.view('queues/'+self.__type__)
return [Item(**i['key']) for i in items.rows]
# TODO:
# return next item from this queue (e.g. whatever is on the top of the list
# does NOT remove item from tip of queue
def first(self):
# turn this into an instantiation of an item based on the query result
#return self.queue[0]
return Item(**{'_rev': '4-a3e3574c44c95b86bb2247fe49e171c8', '_id': 'test', '_deleted_conflicts': ['3-2b27cebd890ff56e616f3d7dadc69c74'], 'hello': 'world', 'aliases': {'url': ['http://cottagelabs.com'], 'doi': ['10.1371/journal.pcbi.1000361']}})
# implement this in inheriting classes if needs to be different
def save_and_unqueue(self,item):
# alter to use aliases method once exists
item.data[self.__type__]['last_updated'] = datetime.datetime.now()
item.save()
class AliasQueue(Queue):
__type__ = 'aliases'
class MetricsQueue(Queue):
__type__ = 'metrics'
@property
def provider(self):
try:
return self._provider
except:
self._provider = None
return self._provider
@provider.setter
def provider(self, _provider):
self._provider = _provider
def save_and_unqueue(self,item):
# alter to use aliases method once exists
if self.provider:
item.data[self.__type__][self.provider]['last_updated'] = datetime.datetime.now()
item.save()
else:
return 'No! you have not set a provider'
| from totalimpact.models import Item
import totalimpact.dao as dao
import datetime
# some data useful for testing
# d = {"DOI" : ["10.1371/journal.pcbi.1000361", "10.1016/j.meegid.2011.02.004"], "URL" : ["http://cottagelabs.com"]}
class Queue(dao.Dao):
__type__ = None
@property
def queue(self):
# change this for live
items = self.view('queues/'+self.__type__)
return [Item(**i['key']) for i in items.rows]
# TODO:
# return next item from this queue (e.g. whatever is on the top of the list
# does NOT remove item from tip of queue
def first(self):
# turn this into an instantiation of an item based on the query result
#return self.queue[0]
return Item(**{'_rev': '4-a3e3574c44c95b86bb2247fe49e171c8', '_id': 'test', '_deleted_conflicts': ['3-2b27cebd890ff56e616f3d7dadc69c74'], 'hello': 'world', 'aliases': {'url': ['http://cottagelabs.com'], 'doi': ['10.1371/journal.pcbi.1000361']}})
# implement this in inheriting classes if needs to be different
def save_and_unqueue(self,item):
# alter to use aliases method once exists
item.data[self.__type__]['last_updated'] = datetime.datetime.now()
item.save()
class AliasQueue(Queue):
__type__ = 'aliases'
class MetricsQueue(Queue):
__type__ = 'metrics'
@property
def provider(self):
return self._provider
@provider.setter
def provider(self, _provider):
self._provider = _provider
def save_and_unqueue(self,item):
# alter to use aliases method once exists
if self.provider:
item.data[self.__type__][self.provider]['last_updated'] = datetime.datetime.now()
item.save()
else:
return 'No! you have not set a provider'
| mit | Python |
fd456eb86bc5fa46f216d3d84aa19b8a1b6ae025 | Use the pythonish "... is not None" instead of "... != None". | CoryMcCartan/chapel,CoryMcCartan/chapel,CoryMcCartan/chapel,hildeth/chapel,chizarlicious/chapel,chizarlicious/chapel,CoryMcCartan/chapel,chizarlicious/chapel,chizarlicious/chapel,hildeth/chapel,CoryMcCartan/chapel,chizarlicious/chapel,chizarlicious/chapel,hildeth/chapel,hildeth/chapel,CoryMcCartan/chapel,hildeth/chapel,hildeth/chapel,chizarlicious/chapel,hildeth/chapel,CoryMcCartan/chapel | util/chplenv/utils.py | util/chplenv/utils.py | import os, re, subprocess
from distutils.spawn import find_executable
from collections import namedtuple
def memoize(func):
cache = func.cache = {}
def memoize_wrapper(*args, **kwargs):
if kwargs:
return func(*args, **kwargs)
if args not in cache:
cache[args] = func(*args)
return cache[args]
return memoize_wrapper
@memoize
def get_chpl_home():
chpl_home = os.environ.get('CHPL_HOME', '')
if not chpl_home:
dirname = os.path.dirname
chpl_home = dirname(dirname(dirname(os.path.realpath(__file__))))
return chpl_home
@memoize
def using_chapel_module():
chpl_home = os.environ.get('CHPL_HOME', '')
if chpl_home is not None:
return chpl_home == os.environ.get('CHPL_MODULE_HOME', '')
return False
@memoize
def get_compiler_version(compiler):
CompVersion = namedtuple('CompVersion', ['major', 'minor'])
if 'gnu' in compiler:
output = run_command(['gcc', '-dumpversion'])
match = re.search(r'(\d+)\.(\d+)', output)
if match:
return CompVersion(major=int(match.group(1)), minor=int(match.group(2)))
else:
raise ValueError("Could not find the GCC version")
else:
return CompVersion(major=0, minor=0)
class CommandError(Exception):
pass
# This could be replaced by subprocess.check_output, but that isn't available
# until python 2.7 and we only have 2.6 on most machines :(
def run_command(command, stdout=True, stderr=False):
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output = process.communicate()
if process.returncode != 0:
raise CommandError(
"command `{0}` failed - output was \n{1}".format(command,
output[1]))
else:
if stdout and stderr:
return output
elif stdout:
return output[0]
elif stderr:
return output[1]
else:
return ''
| import os, re, subprocess
from distutils.spawn import find_executable
from collections import namedtuple
def memoize(func):
cache = func.cache = {}
def memoize_wrapper(*args, **kwargs):
if kwargs:
return func(*args, **kwargs)
if args not in cache:
cache[args] = func(*args)
return cache[args]
return memoize_wrapper
@memoize
def get_chpl_home():
chpl_home = os.environ.get('CHPL_HOME', '')
if not chpl_home:
dirname = os.path.dirname
chpl_home = dirname(dirname(dirname(os.path.realpath(__file__))))
return chpl_home
@memoize
def using_chapel_module():
chpl_home = os.environ.get('CHPL_HOME', '')
if chpl_home != None:
return chpl_home == os.environ.get('CHPL_MODULE_HOME', '')
return False
@memoize
def get_compiler_version(compiler):
CompVersion = namedtuple('CompVersion', ['major', 'minor'])
if 'gnu' in compiler:
output = run_command(['gcc', '-dumpversion'])
match = re.search(r'(\d+)\.(\d+)', output)
if match:
return CompVersion(major=int(match.group(1)), minor=int(match.group(2)))
else:
raise ValueError("Could not find the GCC version")
else:
return CompVersion(major=0, minor=0)
class CommandError(Exception):
pass
# This could be replaced by subprocess.check_output, but that isn't available
# until python 2.7 and we only have 2.6 on most machines :(
def run_command(command, stdout=True, stderr=False):
process = subprocess.Popen(command,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
output = process.communicate()
if process.returncode != 0:
raise CommandError(
"command `{0}` failed - output was \n{1}".format(command,
output[1]))
else:
if stdout and stderr:
return output
elif stdout:
return output[0]
elif stderr:
return output[1]
else:
return ''
| apache-2.0 | Python |
cecd3fbd5a977f366ce5adad56c01b02a145b038 | Remove dud test | funkybob/knights-templater,funkybob/knights-templater | tests/test_comment.py | tests/test_comment.py | from .utils import TemplateTestCase, Mock
class CommentTagText(TemplateTestCase):
def test_comment(self):
self.assertRendered('{# test #}', '')
| from .utils import TemplateTestCase, Mock
from knights import Template
class LoadTagTest(TemplateTestCase):
def test_load_default(self):
t = Template('{! knights.defaultfilters !}')
self.assertIn('escape', t.parser.filters)
class CommentTagText(TemplateTestCase):
def test_comment(self):
self.assertRendered('{# test #}', '')
| mit | Python |
3118ab8122f03b95666d8980004583fe73ab7860 | Add new test | nhshd-slot/SLOT,nhshd-slot/SLOT,nhshd-slot/SLOT | tests/test_messaging.py | tests/test_messaging.py | from context import slot
class TestMessaging:
def test_converts_mobile_string_to_int(self):
result = slot.messaging.mobile_number_string_to_int("441234567890")
assert isinstance(result, int)
assert (result == 441234567890)
def test_converts_mobile_string_with_plus_prefix_to_int(self):
result = slot.messaging.mobile_number_string_to_int("+441234567890")
assert isinstance(result, int)
assert (result == 441234567890)
def test_converts_mobile_int_to_int(self):
result = slot.messaging.mobile_number_string_to_int(441234567890)
assert isinstance(result, int)
assert (result == 441234567890)
| from context import slot
class TestMessaging:
def test_converts_mobile_string_to_int(self):
result = slot.messaging.mobile_number_string_to_int("441234567890")
assert isinstance(result, int)
assert (result == 441234567890)
def test_converts_mobile_int_to_int(self):
result = slot.messaging.mobile_number_string_to_int(441234567890)
assert isinstance(result, int)
assert (result == 441234567890)
| mit | Python |
68a3d1486d14c5184ee3e70197cb0e0926b15e11 | Rename request fixture to comply with new pytest restrictions. | wichert/pyramid_jwt | tests/test_cookies.py | tests/test_cookies.py | import uuid
import pytest
from pyramid.interfaces import IAuthenticationPolicy
from webob import Request
from zope.interface.verify import verifyObject
from pyramid_jwt.policy import JWTTokenAuthenticationPolicy
@pytest.fixture(scope='module')
def principal():
return str(uuid.uuid4())
@pytest.fixture(scope='module')
def dummy_request():
return Request.blank('/')
def test_interface():
verifyObject(IAuthenticationPolicy, JWTTokenAuthenticationPolicy('secret'))
def test_cookie(dummy_request, principal):
policy = JWTTokenAuthenticationPolicy('secret')
cookie = policy.remember(dummy_request, principal).pop()
assert len(cookie) == 2
header, cookie = cookie
assert header == 'Set-Cookie'
assert len(cookie) > 0
def test_cookie_name(dummy_request, principal):
policy = JWTTokenAuthenticationPolicy('secret', cookie_name='auth')
_, cookie = policy.remember(dummy_request, principal).pop()
name, value = cookie.split('=', 1)
assert name == 'auth'
def test_secure_cookie():
policy = JWTTokenAuthenticationPolicy('secret', https_only=True)
dummy_request = Request.blank('/')
_, cookie = policy.remember(dummy_request, str(uuid.uuid4())).pop()
assert '; secure;' in cookie
assert '; HttpOnly' in cookie
def test_insecure_cookie(dummy_request, principal):
policy = JWTTokenAuthenticationPolicy('secret', https_only=False)
_, cookie = policy.remember(dummy_request, principal).pop()
assert '; secure;' not in cookie
assert '; HttpOnly' in cookie
def test_cookie_decode(dummy_request, principal):
policy = JWTTokenAuthenticationPolicy('secret', https_only=False)
header, cookie = policy.remember(dummy_request, principal).pop()
name, value = cookie.split('=', 1)
value, _ = value.split(';', 1)
dummy_request.cookies = {name: value}
claims = policy.get_claims(dummy_request)
assert claims['sub'] == principal
def test_cookie_max_age(dummy_request, principal):
policy = JWTTokenAuthenticationPolicy('secret', cookie_name='auth',
expiration=100)
_, cookie = policy.remember(dummy_request, principal).pop()
_, value = cookie.split('=', 1)
_, meta = value.split(';', 1)
assert 'Max-Age=100' in meta
assert "expires" in meta
@pytest.mark.freeze_time
def test_expired_token(dummy_request, principal, freezer):
policy = JWTTokenAuthenticationPolicy('secret', cookie_name='auth',
expiration=1)
_, cookie = policy.remember(dummy_request, principal).pop()
name, value = cookie.split('=', 1)
freezer.tick(delta=2)
value, _ = value.split(';', 1)
dummy_request.cookies = {name: value}
claims = policy.get_claims(dummy_request)
assert claims == {}
| import uuid
import pytest
from pyramid.interfaces import IAuthenticationPolicy
from webob import Request
from zope.interface.verify import verifyObject
from pyramid_jwt.policy import JWTTokenAuthenticationPolicy
@pytest.fixture(scope='module')
def principal():
return str(uuid.uuid4())
@pytest.fixture(scope='module')
def request():
return Request.blank('/')
def test_interface():
verifyObject(IAuthenticationPolicy, JWTTokenAuthenticationPolicy('secret'))
def test_cookie(request, principal):
policy = JWTTokenAuthenticationPolicy('secret')
cookie = policy.remember(request, principal).pop()
assert len(cookie) == 2
header, cookie = cookie
assert header == 'Set-Cookie'
assert len(cookie) > 0
def test_cookie_name(request, principal):
policy = JWTTokenAuthenticationPolicy('secret', cookie_name='auth')
_, cookie = policy.remember(request, principal).pop()
name, value = cookie.split('=', 1)
assert name == 'auth'
def test_secure_cookie():
policy = JWTTokenAuthenticationPolicy('secret', https_only=True)
request = Request.blank('/')
_, cookie = policy.remember(request, str(uuid.uuid4())).pop()
assert '; secure;' in cookie
assert '; HttpOnly' in cookie
def test_insecure_cookie(request, principal):
policy = JWTTokenAuthenticationPolicy('secret', https_only=False)
_, cookie = policy.remember(request, principal).pop()
assert '; secure;' not in cookie
assert '; HttpOnly' in cookie
def test_cookie_decode(request, principal):
policy = JWTTokenAuthenticationPolicy('secret', https_only=False)
header, cookie = policy.remember(request, principal).pop()
name, value = cookie.split('=', 1)
value, _ = value.split(';', 1)
request.cookies = {name: value}
claims = policy.get_claims(request)
assert claims['sub'] == principal
def test_cookie_max_age(request, principal):
policy = JWTTokenAuthenticationPolicy('secret', cookie_name='auth',
expiration=100)
_, cookie = policy.remember(request, principal).pop()
_, value = cookie.split('=', 1)
_, meta = value.split(';', 1)
assert 'Max-Age=100' in meta
assert "expires" in meta
@pytest.mark.freeze_time
def test_expired_token(request, principal, freezer):
policy = JWTTokenAuthenticationPolicy('secret', cookie_name='auth',
expiration=1)
_, cookie = policy.remember(request, principal).pop()
name, value = cookie.split('=', 1)
freezer.tick(delta=2)
value, _ = value.split(';', 1)
request.cookies = {name: value}
claims = policy.get_claims(request)
assert claims == {}
| bsd-2-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.