commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
444a66b0b0da31ed4febea2dcd82fbf6d12ea107
examples/deploy_local_file_resource.py
examples/deploy_local_file_resource.py
from juju import jasyncio from juju.model import Model from pathlib import Path async def main(): model = Model() print('Connecting to model') # connect to current model with current user, per Juju CLI await model.connect() try: print('Deploying local-charm') base_dir = Path(__file__).absolute().parent.parent charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir) resources = {"file-res": "test.file"} application = await model.deploy( charm_path, resources=resources, ) print('Waiting for active') await model.wait_for_idle() print('Removing Charm') await application.remove() finally: print('Disconnecting from model') await model.disconnect() if __name__ == '__main__': jasyncio.run(main())
from juju import jasyncio from juju.model import Model from pathlib import Path async def main(): model = Model() print('Connecting to model') # connect to current model with current user, per Juju CLI await model.connect() application = None try: print('Deploying local-charm') base_dir = Path(__file__).absolute().parent.parent charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir) resources = {"file-res": "test.file"} application = await model.deploy( charm_path, resources=resources, ) print('Waiting for active') await model.wait_for_idle() print('Removing Charm') await application.remove() except Exception as e: print(e) if application: await application.remove() await model.disconnect() finally: print('Disconnecting from model') await model.disconnect() if __name__ == '__main__': jasyncio.run(main())
Make sure we cleanup even if fails in example
Make sure we cleanup even if fails in example
Python
apache-2.0
juju/python-libjuju,juju/python-libjuju
from juju import jasyncio from juju.model import Model from pathlib import Path async def main(): model = Model() print('Connecting to model') # connect to current model with current user, per Juju CLI await model.connect() + application = None try: print('Deploying local-charm') base_dir = Path(__file__).absolute().parent.parent charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir) resources = {"file-res": "test.file"} application = await model.deploy( charm_path, resources=resources, ) print('Waiting for active') await model.wait_for_idle() print('Removing Charm') await application.remove() + except Exception as e: + print(e) + if application: + await application.remove() + await model.disconnect() finally: print('Disconnecting from model') await model.disconnect() if __name__ == '__main__': jasyncio.run(main())
Make sure we cleanup even if fails in example
## Code Before: from juju import jasyncio from juju.model import Model from pathlib import Path async def main(): model = Model() print('Connecting to model') # connect to current model with current user, per Juju CLI await model.connect() try: print('Deploying local-charm') base_dir = Path(__file__).absolute().parent.parent charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir) resources = {"file-res": "test.file"} application = await model.deploy( charm_path, resources=resources, ) print('Waiting for active') await model.wait_for_idle() print('Removing Charm') await application.remove() finally: print('Disconnecting from model') await model.disconnect() if __name__ == '__main__': jasyncio.run(main()) ## Instruction: Make sure we cleanup even if fails in example ## Code After: from juju import jasyncio from juju.model import Model from pathlib import Path async def main(): model = Model() print('Connecting to model') # connect to current model with current user, per Juju CLI await model.connect() application = None try: print('Deploying local-charm') base_dir = Path(__file__).absolute().parent.parent charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir) resources = {"file-res": "test.file"} application = await model.deploy( charm_path, resources=resources, ) print('Waiting for active') await model.wait_for_idle() print('Removing Charm') await application.remove() except Exception as e: print(e) if application: await application.remove() await model.disconnect() finally: print('Disconnecting from model') await model.disconnect() if __name__ == '__main__': jasyncio.run(main())
from juju import jasyncio from juju.model import Model from pathlib import Path async def main(): model = Model() print('Connecting to model') # connect to current model with current user, per Juju CLI await model.connect() + application = None try: print('Deploying local-charm') base_dir = Path(__file__).absolute().parent.parent charm_path = '{}/tests/integration/file-resource-charm'.format(base_dir) resources = {"file-res": "test.file"} application = await model.deploy( charm_path, resources=resources, ) print('Waiting for active') await model.wait_for_idle() print('Removing Charm') await application.remove() + except Exception as e: + print(e) + if application: + await application.remove() + await model.disconnect() finally: print('Disconnecting from model') await model.disconnect() if __name__ == '__main__': jasyncio.run(main())
28bc35bc8ed2646faf0d6662b54a5324c0fd1e31
pspec/cli.py
pspec/cli.py
from attest.hook import AssertImportHook from docopt import docopt import os import sys from .collectors import PSpecTests def main(): arguments = docopt(__doc__) paths = arguments['<path>'] if not paths: paths = [name for name in os.listdir('.') if os.path.isfile('%s/__init__.py' % name)] with AssertImportHook(): tests = PSpecTests(paths) tests.run() if __name__ == '__main__': main()
from attest.hook import AssertImportHook from docopt import docopt import os import sys from .collectors import PSpecTests def main(): # When run as a console script (i.e. ``pspec``), the CWD isn't # ``sys.path[0]``, but it should be. cwd = os.getcwd() if sys.path[0] not in ('', cwd): sys.path.insert(0, cwd) arguments = docopt(__doc__) paths = arguments['<path>'] if not paths: paths = [name for name in os.listdir('.') if os.path.isfile('%s/__init__.py' % name)] with AssertImportHook(): tests = PSpecTests(paths) tests.run() if __name__ == '__main__': main()
Put CWD at start of sys.path
Put CWD at start of sys.path
Python
bsd-3-clause
bfirsh/pspec
from attest.hook import AssertImportHook from docopt import docopt import os import sys from .collectors import PSpecTests def main(): + # When run as a console script (i.e. ``pspec``), the CWD isn't + # ``sys.path[0]``, but it should be. + cwd = os.getcwd() + if sys.path[0] not in ('', cwd): + sys.path.insert(0, cwd) + arguments = docopt(__doc__) paths = arguments['<path>'] if not paths: paths = [name for name in os.listdir('.') if os.path.isfile('%s/__init__.py' % name)] with AssertImportHook(): tests = PSpecTests(paths) tests.run() if __name__ == '__main__': main()
Put CWD at start of sys.path
## Code Before: from attest.hook import AssertImportHook from docopt import docopt import os import sys from .collectors import PSpecTests def main(): arguments = docopt(__doc__) paths = arguments['<path>'] if not paths: paths = [name for name in os.listdir('.') if os.path.isfile('%s/__init__.py' % name)] with AssertImportHook(): tests = PSpecTests(paths) tests.run() if __name__ == '__main__': main() ## Instruction: Put CWD at start of sys.path ## Code After: from attest.hook import AssertImportHook from docopt import docopt import os import sys from .collectors import PSpecTests def main(): # When run as a console script (i.e. ``pspec``), the CWD isn't # ``sys.path[0]``, but it should be. cwd = os.getcwd() if sys.path[0] not in ('', cwd): sys.path.insert(0, cwd) arguments = docopt(__doc__) paths = arguments['<path>'] if not paths: paths = [name for name in os.listdir('.') if os.path.isfile('%s/__init__.py' % name)] with AssertImportHook(): tests = PSpecTests(paths) tests.run() if __name__ == '__main__': main()
from attest.hook import AssertImportHook from docopt import docopt import os import sys from .collectors import PSpecTests def main(): + # When run as a console script (i.e. ``pspec``), the CWD isn't + # ``sys.path[0]``, but it should be. + cwd = os.getcwd() + if sys.path[0] not in ('', cwd): + sys.path.insert(0, cwd) + arguments = docopt(__doc__) paths = arguments['<path>'] if not paths: paths = [name for name in os.listdir('.') if os.path.isfile('%s/__init__.py' % name)] with AssertImportHook(): tests = PSpecTests(paths) tests.run() if __name__ == '__main__': main()
c848a5a1d94da7919b3272e9e0ee9748091ba04a
md/data/__init__.py
md/data/__init__.py
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/Maryland-Traffic-Stop-Data-2013.zip" # noqa DATASET_BASENAME = 'PIALog_16-0806' # DATASET_BASENAME = 'Small-0806'
DEFAULT_URL = 'https://s3-us-west-2.amazonaws.com/openpolicingdata/PIALog_16-0806.zip' # noqa DATASET_BASENAME = 'PIALog_16-0806' # DATASET_BASENAME = 'Small-0806'
Fix URL to current MD dataset on S3
Fix URL to current MD dataset on S3
Python
mit
OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops
- DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/Maryland-Traffic-Stop-Data-2013.zip" # noqa + DEFAULT_URL = 'https://s3-us-west-2.amazonaws.com/openpolicingdata/PIALog_16-0806.zip' # noqa DATASET_BASENAME = 'PIALog_16-0806' # DATASET_BASENAME = 'Small-0806'
Fix URL to current MD dataset on S3
## Code Before: DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/Maryland-Traffic-Stop-Data-2013.zip" # noqa DATASET_BASENAME = 'PIALog_16-0806' # DATASET_BASENAME = 'Small-0806' ## Instruction: Fix URL to current MD dataset on S3 ## Code After: DEFAULT_URL = 'https://s3-us-west-2.amazonaws.com/openpolicingdata/PIALog_16-0806.zip' # noqa DATASET_BASENAME = 'PIALog_16-0806' # DATASET_BASENAME = 'Small-0806'
- DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/Maryland-Traffic-Stop-Data-2013.zip" # noqa ? ^ ^^^^^^^^ ------------------- ^^ ^ + DEFAULT_URL = 'https://s3-us-west-2.amazonaws.com/openpolicingdata/PIALog_16-0806.zip' # noqa ? ^ ^^^^^^^^^ ^^^ ^ DATASET_BASENAME = 'PIALog_16-0806' # DATASET_BASENAME = 'Small-0806'
08de7924f56606225e1772320831a02f2ae4aabd
core/AppResources.py
core/AppResources.py
import os from AppVars import AppVars class AppResources(object): """ AppResources is a static class that can be used to find common resources easily. Just provide a name to the imageNamed() method and it will return the correct path. """ @staticmethod def imageNamed(imageName): """ Returns the full path to the given imageName. Note: Future versions might be more intelligent and can handle searching through the resource folder. For now it just combines the AppVars imagePath with the imageName. :type imageName: basestring :rtype: basestring """ return os.path.join(AppVars.imagePath(), imageName)
import os from AppVars import AppVars from core.elastix.Transformation import Transformation class AppResources(object): """ AppResources is a static class that can be used to find common resources easily. Just provide a name to the imageNamed() method and it will return the correct path. """ @staticmethod def imageNamed(imageName): """ Returns the full path to the given imageName. Note: Future versions might be more intelligent and can handle searching through the resource folder. For now it just combines the AppVars imagePath with the imageName. :type imageName: basestring :rtype: basestring """ return os.path.join(AppVars.imagePath(), imageName) @staticmethod def elastixTemplates(): """ Returns a list of all the available elastix templates in the resource folder. """ transformations = [] fileNames = os.listdir(AppVars.transformationsPath()) for fileName in fileNames: fullFileName = os.path.join(AppVars.transformationsPath(), fileName) transformation = Transformation() if transformation.loadFromFile(fullFileName): transformations.append(transformation) return transformations
Add method for getting the default transformations.
Add method for getting the default transformations.
Python
mit
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
import os from AppVars import AppVars + from core.elastix.Transformation import Transformation class AppResources(object): """ AppResources is a static class that can be used to find common resources easily. Just provide a name to the imageNamed() method and it will return the correct path. """ @staticmethod def imageNamed(imageName): """ Returns the full path to the given imageName. Note: Future versions might be more intelligent and can handle searching through the resource folder. For now it just combines the AppVars imagePath with the imageName. :type imageName: basestring :rtype: basestring """ return os.path.join(AppVars.imagePath(), imageName) + @staticmethod + def elastixTemplates(): + """ + Returns a list of all the available elastix templates in the + resource folder. + """ + transformations = [] + fileNames = os.listdir(AppVars.transformationsPath()) + for fileName in fileNames: + fullFileName = os.path.join(AppVars.transformationsPath(), fileName) + transformation = Transformation() + if transformation.loadFromFile(fullFileName): + transformations.append(transformation) + return transformations +
Add method for getting the default transformations.
## Code Before: import os from AppVars import AppVars class AppResources(object): """ AppResources is a static class that can be used to find common resources easily. Just provide a name to the imageNamed() method and it will return the correct path. """ @staticmethod def imageNamed(imageName): """ Returns the full path to the given imageName. Note: Future versions might be more intelligent and can handle searching through the resource folder. For now it just combines the AppVars imagePath with the imageName. :type imageName: basestring :rtype: basestring """ return os.path.join(AppVars.imagePath(), imageName) ## Instruction: Add method for getting the default transformations. ## Code After: import os from AppVars import AppVars from core.elastix.Transformation import Transformation class AppResources(object): """ AppResources is a static class that can be used to find common resources easily. Just provide a name to the imageNamed() method and it will return the correct path. """ @staticmethod def imageNamed(imageName): """ Returns the full path to the given imageName. Note: Future versions might be more intelligent and can handle searching through the resource folder. For now it just combines the AppVars imagePath with the imageName. :type imageName: basestring :rtype: basestring """ return os.path.join(AppVars.imagePath(), imageName) @staticmethod def elastixTemplates(): """ Returns a list of all the available elastix templates in the resource folder. """ transformations = [] fileNames = os.listdir(AppVars.transformationsPath()) for fileName in fileNames: fullFileName = os.path.join(AppVars.transformationsPath(), fileName) transformation = Transformation() if transformation.loadFromFile(fullFileName): transformations.append(transformation) return transformations
import os from AppVars import AppVars + from core.elastix.Transformation import Transformation class AppResources(object): """ AppResources is a static class that can be used to find common resources easily. Just provide a name to the imageNamed() method and it will return the correct path. """ @staticmethod def imageNamed(imageName): """ Returns the full path to the given imageName. Note: Future versions might be more intelligent and can handle searching through the resource folder. For now it just combines the AppVars imagePath with the imageName. :type imageName: basestring :rtype: basestring """ return os.path.join(AppVars.imagePath(), imageName) + + @staticmethod + def elastixTemplates(): + """ + Returns a list of all the available elastix templates in the + resource folder. + """ + transformations = [] + fileNames = os.listdir(AppVars.transformationsPath()) + for fileName in fileNames: + fullFileName = os.path.join(AppVars.transformationsPath(), fileName) + transformation = Transformation() + if transformation.loadFromFile(fullFileName): + transformations.append(transformation) + return transformations
f898d1cc96fe66a097def29552f3774f3509be83
insultgenerator/words.py
insultgenerator/words.py
import pkg_resources import random _insulting_adjectives = [] def _load_wordlists(): global _insulting_adjectives insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt") _insulting_adjectives = insulting_adjective_list.decode().split('\n') def get_insulting_adjective(): return _insulting_adjectives[0] return random.choice(_insulting_adjectives) _load_wordlists()
import pkg_resources import random _insulting_adjectives = [] def _load_wordlists(): global _insulting_adjectives insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt") _insulting_adjectives = insulting_adjective_list.decode().split('\n') def get_insulting_adjective(): return random.choice(_insulting_adjectives) _load_wordlists()
Revert "Adding test failure to ensure that CI is functioning correctly"
Revert "Adding test failure to ensure that CI is functioning correctly" This reverts commit 754be81c1ccc385d8e7b418460271966d7db2361.
Python
mit
tr00st/insult_generator
import pkg_resources import random _insulting_adjectives = [] def _load_wordlists(): global _insulting_adjectives insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt") _insulting_adjectives = insulting_adjective_list.decode().split('\n') def get_insulting_adjective(): - return _insulting_adjectives[0] return random.choice(_insulting_adjectives) _load_wordlists()
Revert "Adding test failure to ensure that CI is functioning correctly"
## Code Before: import pkg_resources import random _insulting_adjectives = [] def _load_wordlists(): global _insulting_adjectives insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt") _insulting_adjectives = insulting_adjective_list.decode().split('\n') def get_insulting_adjective(): return _insulting_adjectives[0] return random.choice(_insulting_adjectives) _load_wordlists() ## Instruction: Revert "Adding test failure to ensure that CI is functioning correctly" ## Code After: import pkg_resources import random _insulting_adjectives = [] def _load_wordlists(): global _insulting_adjectives insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt") _insulting_adjectives = insulting_adjective_list.decode().split('\n') def get_insulting_adjective(): return random.choice(_insulting_adjectives) _load_wordlists()
import pkg_resources import random _insulting_adjectives = [] def _load_wordlists(): global _insulting_adjectives insulting_adjective_list = pkg_resources.resource_string(__name__, "wordlists/insulting_adjectives.txt") _insulting_adjectives = insulting_adjective_list.decode().split('\n') def get_insulting_adjective(): - return _insulting_adjectives[0] return random.choice(_insulting_adjectives) _load_wordlists()
4227b5fb52c58304f993d2def11aeb1ed4d5a157
src/urldecorators/urlresolvers.py
src/urldecorators/urlresolvers.py
import types from django.core import urlresolvers as django_urlresolvers from django.utils.functional import curry class DecoratorMixin(object): """ Mixin class to return decorated views from RegexURLPattern/RegexURLResolver """ def __init__(self, *args, **kwargs): super(DecoratorMixin, self).__init__(*args, **kwargs) self.decorators = [] def resolve(self, path): match = super(DecoratorMixin, self).resolve(path) if not match: return match callback, args, kwargs = match callback = self.apply_decorators(callback) return callback, args, kwargs def apply_decorators(self, callback): if not isinstance(callback, types.FunctionType): callback = curry(callback) # Some decorators do not work with class views for decorator in self.decorators: callback = decorator(callback) return callback class RegexURLPattern(DecoratorMixin, django_urlresolvers.RegexURLPattern): pass class RegexURLResolver(DecoratorMixin, django_urlresolvers.RegexURLResolver): pass
import types from django.core import urlresolvers as django_urlresolvers from django.utils.functional import curry class DecoratorMixin(object): """ Mixin class to return decorated views from RegexURLPattern/RegexURLResolver """ def __init__(self, *args, **kwargs): super(DecoratorMixin, self).__init__(*args, **kwargs) self.decorators = [] def resolve(self, path): match = super(DecoratorMixin, self).resolve(path) if not match: return match try: # In Django 1.3 match is an instance of ResolverMatch class match.func = self.apply_decorators(match.func) except AttributeError: # Before Django 1.3 match is a tuple match = self.apply_decorators(match[0]), match[1], match[2] return match def apply_decorators(self, callback): if not isinstance(callback, types.FunctionType): callback = curry(callback) # Some decorators do not work with class views for decorator in self.decorators: callback = decorator(callback) return callback class RegexURLPattern(DecoratorMixin, django_urlresolvers.RegexURLPattern): pass class RegexURLResolver(DecoratorMixin, django_urlresolvers.RegexURLResolver): pass
Fix for the new ResolverMatch object in Django 1.3.
Fix for the new ResolverMatch object in Django 1.3.
Python
bsd-3-clause
mila/django-urldecorators,mila/django-urldecorators
import types from django.core import urlresolvers as django_urlresolvers from django.utils.functional import curry class DecoratorMixin(object): """ Mixin class to return decorated views from RegexURLPattern/RegexURLResolver """ def __init__(self, *args, **kwargs): super(DecoratorMixin, self).__init__(*args, **kwargs) self.decorators = [] def resolve(self, path): match = super(DecoratorMixin, self).resolve(path) if not match: return match - callback, args, kwargs = match - callback = self.apply_decorators(callback) - return callback, args, kwargs - + try: + # In Django 1.3 match is an instance of ResolverMatch class + match.func = self.apply_decorators(match.func) + except AttributeError: + # Before Django 1.3 match is a tuple + match = self.apply_decorators(match[0]), match[1], match[2] + return match + def apply_decorators(self, callback): - if not isinstance(callback, types.FunctionType): + if not isinstance(callback, types.FunctionType): callback = curry(callback) # Some decorators do not work with class views for decorator in self.decorators: callback = decorator(callback) return callback class RegexURLPattern(DecoratorMixin, django_urlresolvers.RegexURLPattern): pass class RegexURLResolver(DecoratorMixin, django_urlresolvers.RegexURLResolver): pass
Fix for the new ResolverMatch object in Django 1.3.
## Code Before: import types from django.core import urlresolvers as django_urlresolvers from django.utils.functional import curry class DecoratorMixin(object): """ Mixin class to return decorated views from RegexURLPattern/RegexURLResolver """ def __init__(self, *args, **kwargs): super(DecoratorMixin, self).__init__(*args, **kwargs) self.decorators = [] def resolve(self, path): match = super(DecoratorMixin, self).resolve(path) if not match: return match callback, args, kwargs = match callback = self.apply_decorators(callback) return callback, args, kwargs def apply_decorators(self, callback): if not isinstance(callback, types.FunctionType): callback = curry(callback) # Some decorators do not work with class views for decorator in self.decorators: callback = decorator(callback) return callback class RegexURLPattern(DecoratorMixin, django_urlresolvers.RegexURLPattern): pass class RegexURLResolver(DecoratorMixin, django_urlresolvers.RegexURLResolver): pass ## Instruction: Fix for the new ResolverMatch object in Django 1.3. ## Code After: import types from django.core import urlresolvers as django_urlresolvers from django.utils.functional import curry class DecoratorMixin(object): """ Mixin class to return decorated views from RegexURLPattern/RegexURLResolver """ def __init__(self, *args, **kwargs): super(DecoratorMixin, self).__init__(*args, **kwargs) self.decorators = [] def resolve(self, path): match = super(DecoratorMixin, self).resolve(path) if not match: return match try: # In Django 1.3 match is an instance of ResolverMatch class match.func = self.apply_decorators(match.func) except AttributeError: # Before Django 1.3 match is a tuple match = self.apply_decorators(match[0]), match[1], match[2] return match def apply_decorators(self, callback): if not isinstance(callback, types.FunctionType): callback = curry(callback) # Some decorators do not work with class views for decorator in self.decorators: callback = decorator(callback) return callback class RegexURLPattern(DecoratorMixin, django_urlresolvers.RegexURLPattern): pass class RegexURLResolver(DecoratorMixin, django_urlresolvers.RegexURLResolver): pass
import types from django.core import urlresolvers as django_urlresolvers from django.utils.functional import curry class DecoratorMixin(object): """ Mixin class to return decorated views from RegexURLPattern/RegexURLResolver """ def __init__(self, *args, **kwargs): super(DecoratorMixin, self).__init__(*args, **kwargs) self.decorators = [] def resolve(self, path): match = super(DecoratorMixin, self).resolve(path) if not match: return match - callback, args, kwargs = match - callback = self.apply_decorators(callback) - return callback, args, kwargs - + try: ? ++++ + # In Django 1.3 match is an instance of ResolverMatch class + match.func = self.apply_decorators(match.func) + except AttributeError: + # Before Django 1.3 match is a tuple + match = self.apply_decorators(match[0]), match[1], match[2] + return match + def apply_decorators(self, callback): - if not isinstance(callback, types.FunctionType): ? -------- + if not isinstance(callback, types.FunctionType): callback = curry(callback) # Some decorators do not work with class views for decorator in self.decorators: callback = decorator(callback) return callback class RegexURLPattern(DecoratorMixin, django_urlresolvers.RegexURLPattern): pass class RegexURLResolver(DecoratorMixin, django_urlresolvers.RegexURLResolver): pass
a7622fc3d996407799cec166968c1e56baf07ea9
wqflask/wqflask/markdown_routes.py
wqflask/wqflask/markdown_routes.py
import requests import mistune from flask import Blueprint from flask import render_template glossary_blueprint = Blueprint('glossary_blueprint', __name__) @glossary_blueprint.route('/') def glossary(): markdown_url = ("https://raw.githubusercontent.com" "/genenetwork/genenetwork2/" "wqflask/wqflask/static" "/glossary.md") md_content = requests.get(markdown_url) if md_content.status_code == 200: return render_template( "glossary_html", rendered_markdown=mistune.html( md_content.content.decode("utf-8"))), 200 return render_template( "glossary.html", rendered_markdown=mistune.html("# Github Down!")), 200
import os import requests import mistune from flask import Blueprint from flask import render_template glossary_blueprint = Blueprint('glossary_blueprint', __name__) def render_markdown(file_name): """Try to fetch the file name from Github and if that fails, try to look for it inside the file system """ markdown_url = (f"https://raw.githubusercontent.com" f"/genenetwork/genenetwork2/" f"wqflask/wqflask/static/" f"{file_name}") md_content = requests.get(markdown_url) if md_content.status_code == 200: return mistune.html(md_content.content.decode("utf-8")) with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), f"static/markdown/{file_name}")) as md_file: markdown = md_file.read() return mistune.html(markdown) @glossary_blueprint.route('/') def glossary(): return render_template( "glossary.html", rendered_markdown=render_markdown("glossary.md")), 200
Move logic for fetching md files to it's own function
Move logic for fetching md files to it's own function * wqflask/wqflask/markdown_routes.py (render_markdown): New function. (glossary): use render_markdown function.
Python
agpl-3.0
genenetwork/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2
+ import os import requests import mistune from flask import Blueprint from flask import render_template glossary_blueprint = Blueprint('glossary_blueprint', __name__) + def render_markdown(file_name): + """Try to fetch the file name from Github and if that fails, try to + look for it inside the file system + + """ + markdown_url = (f"https://raw.githubusercontent.com" + f"/genenetwork/genenetwork2/" + f"wqflask/wqflask/static/" + f"{file_name}") + md_content = requests.get(markdown_url) + if md_content.status_code == 200: + return mistune.html(md_content.content.decode("utf-8")) + + with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), + f"static/markdown/{file_name}")) as md_file: + markdown = md_file.read() + return mistune.html(markdown) + + @glossary_blueprint.route('/') def glossary(): - markdown_url = ("https://raw.githubusercontent.com" - "/genenetwork/genenetwork2/" - "wqflask/wqflask/static" - "/glossary.md") - md_content = requests.get(markdown_url) - if md_content.status_code == 200: - return render_template( - "glossary_html", - rendered_markdown=mistune.html( - md_content.content.decode("utf-8"))), 200 - return render_template( "glossary.html", - rendered_markdown=mistune.html("# Github Down!")), 200 + rendered_markdown=render_markdown("glossary.md")), 200
Move logic for fetching md files to it's own function
## Code Before: import requests import mistune from flask import Blueprint from flask import render_template glossary_blueprint = Blueprint('glossary_blueprint', __name__) @glossary_blueprint.route('/') def glossary(): markdown_url = ("https://raw.githubusercontent.com" "/genenetwork/genenetwork2/" "wqflask/wqflask/static" "/glossary.md") md_content = requests.get(markdown_url) if md_content.status_code == 200: return render_template( "glossary_html", rendered_markdown=mistune.html( md_content.content.decode("utf-8"))), 200 return render_template( "glossary.html", rendered_markdown=mistune.html("# Github Down!")), 200 ## Instruction: Move logic for fetching md files to it's own function ## Code After: import os import requests import mistune from flask import Blueprint from flask import render_template glossary_blueprint = Blueprint('glossary_blueprint', __name__) def render_markdown(file_name): """Try to fetch the file name from Github and if that fails, try to look for it inside the file system """ markdown_url = (f"https://raw.githubusercontent.com" f"/genenetwork/genenetwork2/" f"wqflask/wqflask/static/" f"{file_name}") md_content = requests.get(markdown_url) if md_content.status_code == 200: return mistune.html(md_content.content.decode("utf-8")) with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), f"static/markdown/{file_name}")) as md_file: markdown = md_file.read() return mistune.html(markdown) @glossary_blueprint.route('/') def glossary(): return render_template( "glossary.html", rendered_markdown=render_markdown("glossary.md")), 200
+ import os import requests import mistune from flask import Blueprint from flask import render_template glossary_blueprint = Blueprint('glossary_blueprint', __name__) + def render_markdown(file_name): + """Try to fetch the file name from Github and if that fails, try to + look for it inside the file system + + """ + markdown_url = (f"https://raw.githubusercontent.com" + f"/genenetwork/genenetwork2/" + f"wqflask/wqflask/static/" + f"{file_name}") + md_content = requests.get(markdown_url) + if md_content.status_code == 200: + return mistune.html(md_content.content.decode("utf-8")) + + with open(os.path.join(os.path.abspath(os.path.dirname(__file__)), + f"static/markdown/{file_name}")) as md_file: + markdown = md_file.read() + return mistune.html(markdown) + + @glossary_blueprint.route('/') def glossary(): - markdown_url = ("https://raw.githubusercontent.com" - "/genenetwork/genenetwork2/" - "wqflask/wqflask/static" - "/glossary.md") - md_content = requests.get(markdown_url) - if md_content.status_code == 200: - return render_template( - "glossary_html", - rendered_markdown=mistune.html( - md_content.content.decode("utf-8"))), 200 - return render_template( "glossary.html", - rendered_markdown=mistune.html("# Github Down!")), 200 + rendered_markdown=render_markdown("glossary.md")), 200
4a597ff48f5fd22ab1c6317e8ab1e65a887da284
dosagelib/__pyinstaller/hook-dosagelib.py
dosagelib/__pyinstaller/hook-dosagelib.py
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, copy_metadata hiddenimports = collect_submodules('dosagelib.plugins') datas = copy_metadata('dosage') + collect_data_files('dosagelib')
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, copy_metadata hiddenimports = ['dosagelib.data'] + collect_submodules('dosagelib.plugins') datas = copy_metadata('dosage') + collect_data_files('dosagelib')
Make sure dosagelib.data is importable
PyInstaller: Make sure dosagelib.data is importable
Python
mit
webcomics/dosage,webcomics/dosage
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, copy_metadata - hiddenimports = collect_submodules('dosagelib.plugins') + hiddenimports = ['dosagelib.data'] + collect_submodules('dosagelib.plugins') datas = copy_metadata('dosage') + collect_data_files('dosagelib')
Make sure dosagelib.data is importable
## Code Before: from PyInstaller.utils.hooks import collect_data_files, collect_submodules, copy_metadata hiddenimports = collect_submodules('dosagelib.plugins') datas = copy_metadata('dosage') + collect_data_files('dosagelib') ## Instruction: Make sure dosagelib.data is importable ## Code After: from PyInstaller.utils.hooks import collect_data_files, collect_submodules, copy_metadata hiddenimports = ['dosagelib.data'] + collect_submodules('dosagelib.plugins') datas = copy_metadata('dosage') + collect_data_files('dosagelib')
from PyInstaller.utils.hooks import collect_data_files, collect_submodules, copy_metadata - hiddenimports = collect_submodules('dosagelib.plugins') + hiddenimports = ['dosagelib.data'] + collect_submodules('dosagelib.plugins') ? +++++++++++++++++++++ datas = copy_metadata('dosage') + collect_data_files('dosagelib')
384fd7ba49ad0cfcb173656a5e31475e8c9b49b3
setup.py
setup.py
from distutils.core import setup import nagios setup(name='nagios-api', version=nagios.version, description='Control nagios using an API', author='Mark Smith', author_email='mark@qq.is', license='BSD New (3-clause) License', long_description=open('README.md').read(), url='https://github.com/xb95/nagios-api', packages=['nagios'], scripts=['nagios-cli', 'nagios-api'], requires=[ 'diesel(>=3.0)', 'greenlet(==0.3.4)', 'requests' ] )
from distutils.core import setup import nagios setup(name='nagios-api', version=nagios.version, description='Control nagios using an API', author='Mark Smith', author_email='mark@qq.is', license='BSD New (3-clause) License', long_description=open('README.md').read(), url='https://github.com/xb95/nagios-api', packages=['nagios'], scripts=['nagios-cli', 'nagios-api'], install_requires=[ 'diesel>=3.0', 'greenlet==0.3.4', 'requests' ] )
Use install_requires arg so dependencies are installed
Use install_requires arg so dependencies are installed
Python
bsd-3-clause
zorkian/nagios-api,zorkian/nagios-api
from distutils.core import setup import nagios setup(name='nagios-api', version=nagios.version, description='Control nagios using an API', author='Mark Smith', author_email='mark@qq.is', license='BSD New (3-clause) License', long_description=open('README.md').read(), url='https://github.com/xb95/nagios-api', packages=['nagios'], scripts=['nagios-cli', 'nagios-api'], - requires=[ + install_requires=[ - 'diesel(>=3.0)', + 'diesel>=3.0', - 'greenlet(==0.3.4)', + 'greenlet==0.3.4', 'requests' ] )
Use install_requires arg so dependencies are installed
## Code Before: from distutils.core import setup import nagios setup(name='nagios-api', version=nagios.version, description='Control nagios using an API', author='Mark Smith', author_email='mark@qq.is', license='BSD New (3-clause) License', long_description=open('README.md').read(), url='https://github.com/xb95/nagios-api', packages=['nagios'], scripts=['nagios-cli', 'nagios-api'], requires=[ 'diesel(>=3.0)', 'greenlet(==0.3.4)', 'requests' ] ) ## Instruction: Use install_requires arg so dependencies are installed ## Code After: from distutils.core import setup import nagios setup(name='nagios-api', version=nagios.version, description='Control nagios using an API', author='Mark Smith', author_email='mark@qq.is', license='BSD New (3-clause) License', long_description=open('README.md').read(), url='https://github.com/xb95/nagios-api', packages=['nagios'], scripts=['nagios-cli', 'nagios-api'], install_requires=[ 'diesel>=3.0', 'greenlet==0.3.4', 'requests' ] )
from distutils.core import setup import nagios setup(name='nagios-api', version=nagios.version, description='Control nagios using an API', author='Mark Smith', author_email='mark@qq.is', license='BSD New (3-clause) License', long_description=open('README.md').read(), url='https://github.com/xb95/nagios-api', packages=['nagios'], scripts=['nagios-cli', 'nagios-api'], - requires=[ + install_requires=[ ? ++++++++ - 'diesel(>=3.0)', ? - - + 'diesel>=3.0', - 'greenlet(==0.3.4)', ? - - + 'greenlet==0.3.4', 'requests' ] )
350bd08bdea2df07928d8203680a8bc33d1a7eb1
keops/settings.py
keops/settings.py
from katrid.conf.app_settings import * DATABASES = { 'default': { 'ENGINE': 'katrid.db.backends.sqlite3', 'NAME': 'db.sqlite3', } } AUTH_USER_MODEL = 'base.user' INSTALLED_APPS.append('keops') SERIALIZATION_MODULES = { 'python': 'keops.core.serializers.python', 'json': 'keops.core.serializers.json', 'xml': 'keops.core.serializers.xml_serializer', 'yaml': 'keops.core.serializers.pyyaml', 'csv': 'keops.core.serializers.csv_serializer', 'txt': 'keops.core.serializers.txt_serializer', 'mako': 'keops.core.serializers.mako_serializer', }
from katrid.conf.app_settings import * DATABASES = { 'default': { 'ENGINE': 'katrid.db.backends.sqlite3', 'NAME': 'db.sqlite3', } } AUTH_USER_MODEL = 'base.user' INSTALLED_APPS.append('keops') SERIALIZATION_MODULES = { 'python': 'keops.core.serializers.python', 'json': 'keops.core.serializers.json', 'xml': 'keops.core.serializers.xml_serializer', 'yaml': 'keops.core.serializers.pyyaml', 'csv': 'keops.core.serializers.csv_serializer', 'txt': 'keops.core.serializers.txt_serializer', }
Add exclude fields to model options
Add exclude fields to model options
Python
bsd-3-clause
katrid/keops,katrid/keops,katrid/keops
from katrid.conf.app_settings import * DATABASES = { 'default': { 'ENGINE': 'katrid.db.backends.sqlite3', 'NAME': 'db.sqlite3', } } AUTH_USER_MODEL = 'base.user' INSTALLED_APPS.append('keops') SERIALIZATION_MODULES = { 'python': 'keops.core.serializers.python', 'json': 'keops.core.serializers.json', 'xml': 'keops.core.serializers.xml_serializer', 'yaml': 'keops.core.serializers.pyyaml', 'csv': 'keops.core.serializers.csv_serializer', 'txt': 'keops.core.serializers.txt_serializer', - 'mako': 'keops.core.serializers.mako_serializer', }
Add exclude fields to model options
## Code Before: from katrid.conf.app_settings import * DATABASES = { 'default': { 'ENGINE': 'katrid.db.backends.sqlite3', 'NAME': 'db.sqlite3', } } AUTH_USER_MODEL = 'base.user' INSTALLED_APPS.append('keops') SERIALIZATION_MODULES = { 'python': 'keops.core.serializers.python', 'json': 'keops.core.serializers.json', 'xml': 'keops.core.serializers.xml_serializer', 'yaml': 'keops.core.serializers.pyyaml', 'csv': 'keops.core.serializers.csv_serializer', 'txt': 'keops.core.serializers.txt_serializer', 'mako': 'keops.core.serializers.mako_serializer', } ## Instruction: Add exclude fields to model options ## Code After: from katrid.conf.app_settings import * DATABASES = { 'default': { 'ENGINE': 'katrid.db.backends.sqlite3', 'NAME': 'db.sqlite3', } } AUTH_USER_MODEL = 'base.user' INSTALLED_APPS.append('keops') SERIALIZATION_MODULES = { 'python': 'keops.core.serializers.python', 'json': 'keops.core.serializers.json', 'xml': 'keops.core.serializers.xml_serializer', 'yaml': 'keops.core.serializers.pyyaml', 'csv': 'keops.core.serializers.csv_serializer', 'txt': 'keops.core.serializers.txt_serializer', }
from katrid.conf.app_settings import * DATABASES = { 'default': { 'ENGINE': 'katrid.db.backends.sqlite3', 'NAME': 'db.sqlite3', } } AUTH_USER_MODEL = 'base.user' INSTALLED_APPS.append('keops') SERIALIZATION_MODULES = { 'python': 'keops.core.serializers.python', 'json': 'keops.core.serializers.json', 'xml': 'keops.core.serializers.xml_serializer', 'yaml': 'keops.core.serializers.pyyaml', 'csv': 'keops.core.serializers.csv_serializer', 'txt': 'keops.core.serializers.txt_serializer', - 'mako': 'keops.core.serializers.mako_serializer', }
584b707fe83a49264c95b7cfa6fd84cfcce96a52
csunplugged/utils/group_lessons_by_age.py
csunplugged/utils/group_lessons_by_age.py
"""Return ordered groups of lessons.""" from collections import OrderedDict def group_lessons_by_age(lessons): """Return ordered groups of lessons. Lessons are grouped by the lesson minimum age and maximum ages, and then order by number. Returns: A ordered dictionary of grouped lessons. The key is a tuple of the minimum age and maximum ages for the lessons. The value for a key is a sorted list of lessons. The dictionary is ordered by minimum age, then maximum age. """ grouped_lessons = OrderedDict() lessons = lessons.order_by("min_age", "max_age", "number") for lesson in lessons: if (lesson.min_age, lesson.max_age) in grouped_lessons: grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson) else: grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson] return grouped_lessons
"""Return ordered groups of lessons.""" from collections import OrderedDict def group_lessons_by_age(lessons): """Return ordered groups of lessons. Lessons are grouped by the lesson minimum age and maximum ages, and then order by number. Args: lessons: QuerySet of Lesson objects (QuerySet). Returns: A ordered dictionary of grouped lessons. The key is a tuple of the minimum age and maximum ages for the lessons. The value for a key is a sorted list of lessons. The dictionary is ordered by minimum age, then maximum age. """ grouped_lessons = OrderedDict() lessons = lessons.order_by("min_age", "max_age", "number") for lesson in lessons: if (lesson.min_age, lesson.max_age) in grouped_lessons: grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson) else: grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson] return grouped_lessons
Add missing args docstring details
Add missing args docstring details
Python
mit
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
"""Return ordered groups of lessons.""" from collections import OrderedDict def group_lessons_by_age(lessons): """Return ordered groups of lessons. Lessons are grouped by the lesson minimum age and maximum ages, and then order by number. + + Args: + lessons: QuerySet of Lesson objects (QuerySet). Returns: A ordered dictionary of grouped lessons. The key is a tuple of the minimum age and maximum ages for the lessons. The value for a key is a sorted list of lessons. The dictionary is ordered by minimum age, then maximum age. """ grouped_lessons = OrderedDict() lessons = lessons.order_by("min_age", "max_age", "number") for lesson in lessons: if (lesson.min_age, lesson.max_age) in grouped_lessons: grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson) else: grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson] return grouped_lessons
Add missing args docstring details
## Code Before: """Return ordered groups of lessons.""" from collections import OrderedDict def group_lessons_by_age(lessons): """Return ordered groups of lessons. Lessons are grouped by the lesson minimum age and maximum ages, and then order by number. Returns: A ordered dictionary of grouped lessons. The key is a tuple of the minimum age and maximum ages for the lessons. The value for a key is a sorted list of lessons. The dictionary is ordered by minimum age, then maximum age. """ grouped_lessons = OrderedDict() lessons = lessons.order_by("min_age", "max_age", "number") for lesson in lessons: if (lesson.min_age, lesson.max_age) in grouped_lessons: grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson) else: grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson] return grouped_lessons ## Instruction: Add missing args docstring details ## Code After: """Return ordered groups of lessons.""" from collections import OrderedDict def group_lessons_by_age(lessons): """Return ordered groups of lessons. Lessons are grouped by the lesson minimum age and maximum ages, and then order by number. Args: lessons: QuerySet of Lesson objects (QuerySet). Returns: A ordered dictionary of grouped lessons. The key is a tuple of the minimum age and maximum ages for the lessons. The value for a key is a sorted list of lessons. The dictionary is ordered by minimum age, then maximum age. """ grouped_lessons = OrderedDict() lessons = lessons.order_by("min_age", "max_age", "number") for lesson in lessons: if (lesson.min_age, lesson.max_age) in grouped_lessons: grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson) else: grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson] return grouped_lessons
"""Return ordered groups of lessons.""" from collections import OrderedDict def group_lessons_by_age(lessons): """Return ordered groups of lessons. Lessons are grouped by the lesson minimum age and maximum ages, and then order by number. + + Args: + lessons: QuerySet of Lesson objects (QuerySet). Returns: A ordered dictionary of grouped lessons. The key is a tuple of the minimum age and maximum ages for the lessons. The value for a key is a sorted list of lessons. The dictionary is ordered by minimum age, then maximum age. """ grouped_lessons = OrderedDict() lessons = lessons.order_by("min_age", "max_age", "number") for lesson in lessons: if (lesson.min_age, lesson.max_age) in grouped_lessons: grouped_lessons[(lesson.min_age, lesson.max_age)].append(lesson) else: grouped_lessons[(lesson.min_age, lesson.max_age)] = [lesson] return grouped_lessons
53f7acf5fc04ca6f86456fda95504ba41046d860
openedx/features/specializations/templatetags/sso_meta_tag.py
openedx/features/specializations/templatetags/sso_meta_tag.py
from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context);
from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
Add Django Custom Tag SSO
Add Django Custom Tag SSO
Python
agpl-3.0
philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform
from django import template - from django.template import Template + from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): + return get_template('features/specializations/sso_meta_template.html').render(context.flatten()) - return Template('<meta name="title" content="${ title }">' + ' ' + - '<meta name="description" content="${ subtitle }">' + ' ' + - ## OG (Open Graph) title and description added below to give social media info to display - ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) - '<meta property="og:title" content="${ title }">' + ' ' + - '<meta property="og:description" content="${ subtitle }">' + ' ' + - '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - '<meta property="og:image:width" content="512">' + ' ' + - '<meta property="og:image:height" content="512">' + ' ' + - '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - - '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + - '<meta name="twitter:title" content="${ title }">' + ' ' + - '<meta name="twitter:description" content="${ subtitle }">').render(context); - -
Add Django Custom Tag SSO
## Code Before: from django import template from django.template import Template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return Template('<meta name="title" content="${ title }">' + ' ' + '<meta name="description" content="${ subtitle }">' + ' ' + ## OG (Open Graph) title and description added below to give social media info to display ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) '<meta property="og:title" content="${ title }">' + ' ' + '<meta property="og:description" content="${ subtitle }">' + ' ' + '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta property="og:image:width" content="512">' + ' ' + '<meta property="og:image:height" content="512">' + ' ' + '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + '<meta name="twitter:title" content="${ title }">' + ' ' + '<meta name="twitter:description" content="${ subtitle }">').render(context); ## Instruction: Add Django Custom Tag SSO ## Code After: from django import template from django.template.loader import get_template register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): return get_template('features/specializations/sso_meta_template.html').render(context.flatten())
from django import template - from django.template import Template ? ^ + from django.template.loader import get_template ? +++++++ ^^^^^ register = template.Library() @register.simple_tag(takes_context=True) def sso_meta(context): + return get_template('features/specializations/sso_meta_template.html').render(context.flatten()) - return Template('<meta name="title" content="${ title }">' + ' ' + - '<meta name="description" content="${ subtitle }">' + ' ' + - ## OG (Open Graph) title and description added below to give social media info to display - ## (https://developers.facebook.com/docs/opengraph/howtos/maximizing-distribution-media-content#tags) - - '<meta property="og:title" content="${ title }">' + ' ' + - '<meta property="og:description" content="${ subtitle }">' + ' ' + - '<meta prefix="og: http://ogp.me/ns#" name="image" property="og:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - '<meta property="og:image:width" content="512">' + ' ' + - '<meta property="og:image:height" content="512">' + ' ' + - '<meta name="twitter:image" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - - '<meta name="twitter:card" content="${ banner_image[\'large\'][\'url\'] }">' + ' ' + - '<meta name="twitter:site" content="@PhilanthropyUni">' + ' ' + - '<meta name="twitter:title" content="${ title }">' + ' ' + - '<meta name="twitter:description" content="${ subtitle }">').render(context); -
ab8b6ed75f27820ce2711d597838584fe68e62ef
setup.py
setup.py
import os from distutils.core import setup filepath = os.path.dirname(__file__) readme_file = os.path.join(filepath, 'README.md') try: import pypandoc long_description = pypandoc.convert(readme_file, 'rst') except(IOError, ImportError): long_description = open(readme_file).read() def extract_version(filename): import re pattern = re.compile(r'''__version__\s*=\s*"(?P<ver>[0-9\.]+)".*''') with file(filename, 'r') as fd: for line in fd: match = pattern.match(line) if match: ver = match.groupdict()['ver'] break else: raise Exception('ERROR: cannot find version string.') return ver version = extract_version('cmdlet/__init__.py') stage = '' setup( name = 'cmdlet', packages = ['cmdlet'], version = version, description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.', long_description=long_description, author = 'Gary Lee', author_email = 'garywlee@gmail.com', url = 'https://github.com/GaryLee/cmdlet', download_url = 'https://github.com/GaryLee/cmdlet/tarball/v%s%s' % (version, stage), keywords = ['pipe', 'generator', 'iterator'], classifiers = [], )
import os from distutils.core import setup description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.' filepath = os.path.dirname(__file__) readme_file = os.path.join(filepath, 'README.md') if not os.path.exist(readme_file): long_description = description else: try: import pypandoc long_description = pypandoc.convert(readme_file, 'rst') except(IOError, ImportError): long_description = open(readme_file).read() def extract_version(filename): import re pattern = re.compile(r'''__version__\s*=\s*"(?P<ver>[0-9\.]+)".*''') with file(filename, 'r') as fd: for line in fd: match = pattern.match(line) if match: ver = match.groupdict()['ver'] break else: raise Exception('ERROR: cannot find version string.') return ver version = extract_version('cmdlet/__init__.py') stage = '' setup( name = 'cmdlet', packages = ['cmdlet'], version = version, description = description, long_description=long_description, author = 'Gary Lee', author_email = 'garywlee@gmail.com', url = 'https://github.com/GaryLee/cmdlet', download_url = 'https://github.com/GaryLee/cmdlet/tarball/v%s%s' % (version, stage), keywords = ['pipe', 'generator', 'iterator'], classifiers = [], )
Use short description if README.md not found.
Use short description if README.md not found.
Python
mit
GaryLee/cmdlet
import os from distutils.core import setup + description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.' filepath = os.path.dirname(__file__) readme_file = os.path.join(filepath, 'README.md') - try: + + if not os.path.exist(readme_file): + long_description = description + else: + try: - import pypandoc + import pypandoc - long_description = pypandoc.convert(readme_file, 'rst') + long_description = pypandoc.convert(readme_file, 'rst') - except(IOError, ImportError): + except(IOError, ImportError): - long_description = open(readme_file).read() + long_description = open(readme_file).read() def extract_version(filename): import re pattern = re.compile(r'''__version__\s*=\s*"(?P<ver>[0-9\.]+)".*''') with file(filename, 'r') as fd: for line in fd: match = pattern.match(line) if match: ver = match.groupdict()['ver'] break else: raise Exception('ERROR: cannot find version string.') return ver version = extract_version('cmdlet/__init__.py') stage = '' setup( name = 'cmdlet', packages = ['cmdlet'], version = version, - description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.', + description = description, long_description=long_description, author = 'Gary Lee', author_email = 'garywlee@gmail.com', url = 'https://github.com/GaryLee/cmdlet', download_url = 'https://github.com/GaryLee/cmdlet/tarball/v%s%s' % (version, stage), keywords = ['pipe', 'generator', 'iterator'], classifiers = [], )
Use short description if README.md not found.
## Code Before: import os from distutils.core import setup filepath = os.path.dirname(__file__) readme_file = os.path.join(filepath, 'README.md') try: import pypandoc long_description = pypandoc.convert(readme_file, 'rst') except(IOError, ImportError): long_description = open(readme_file).read() def extract_version(filename): import re pattern = re.compile(r'''__version__\s*=\s*"(?P<ver>[0-9\.]+)".*''') with file(filename, 'r') as fd: for line in fd: match = pattern.match(line) if match: ver = match.groupdict()['ver'] break else: raise Exception('ERROR: cannot find version string.') return ver version = extract_version('cmdlet/__init__.py') stage = '' setup( name = 'cmdlet', packages = ['cmdlet'], version = version, description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.', long_description=long_description, author = 'Gary Lee', author_email = 'garywlee@gmail.com', url = 'https://github.com/GaryLee/cmdlet', download_url = 'https://github.com/GaryLee/cmdlet/tarball/v%s%s' % (version, stage), keywords = ['pipe', 'generator', 'iterator'], classifiers = [], ) ## Instruction: Use short description if README.md not found. ## Code After: import os from distutils.core import setup description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.' filepath = os.path.dirname(__file__) readme_file = os.path.join(filepath, 'README.md') if not os.path.exist(readme_file): long_description = description else: try: import pypandoc long_description = pypandoc.convert(readme_file, 'rst') except(IOError, ImportError): long_description = open(readme_file).read() def extract_version(filename): import re pattern = re.compile(r'''__version__\s*=\s*"(?P<ver>[0-9\.]+)".*''') with file(filename, 'r') as fd: for line in fd: match = pattern.match(line) if match: ver = match.groupdict()['ver'] break else: raise Exception('ERROR: cannot find version string.') return ver version = extract_version('cmdlet/__init__.py') stage = '' setup( name = 'cmdlet', packages = ['cmdlet'], version = version, description = description, long_description=long_description, author = 'Gary Lee', author_email = 'garywlee@gmail.com', url = 'https://github.com/GaryLee/cmdlet', download_url = 'https://github.com/GaryLee/cmdlet/tarball/v%s%s' % (version, stage), keywords = ['pipe', 'generator', 'iterator'], classifiers = [], )
import os from distutils.core import setup + description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.' filepath = os.path.dirname(__file__) readme_file = os.path.join(filepath, 'README.md') - try: + + if not os.path.exist(readme_file): + long_description = description + else: + try: - import pypandoc + import pypandoc ? ++++ - long_description = pypandoc.convert(readme_file, 'rst') + long_description = pypandoc.convert(readme_file, 'rst') ? ++++ - except(IOError, ImportError): + except(IOError, ImportError): ? ++++ - long_description = open(readme_file).read() + long_description = open(readme_file).read() ? ++++ def extract_version(filename): import re pattern = re.compile(r'''__version__\s*=\s*"(?P<ver>[0-9\.]+)".*''') with file(filename, 'r') as fd: for line in fd: match = pattern.match(line) if match: ver = match.groupdict()['ver'] break else: raise Exception('ERROR: cannot find version string.') return ver version = extract_version('cmdlet/__init__.py') stage = '' setup( name = 'cmdlet', packages = ['cmdlet'], version = version, - description = 'Cmdlet provides pipe-like mechanism to cascade functions and generators.', + description = description, long_description=long_description, author = 'Gary Lee', author_email = 'garywlee@gmail.com', url = 'https://github.com/GaryLee/cmdlet', download_url = 'https://github.com/GaryLee/cmdlet/tarball/v%s%s' % (version, stage), keywords = ['pipe', 'generator', 'iterator'], classifiers = [], )
1d866c7a66d0efde1b6a9beb5ecf89b9c6360b1e
spotpy/unittests/test_objectivefunctions.py
spotpy/unittests/test_objectivefunctions.py
import unittest from spotpy import objectivefunctions as of import numpy as np #https://docs.python.org/3/library/unittest.html class TestObjectiveFunctions(unittest.TestCase): # How many digits to match in case of floating point answers tolerance = 10 def setUp(self): np.random.seed(42) self.simulation = np.random.randn(10) self.evaluation = np.random.randn(10) print(self.simulation) print(self.evaluation) def test_bias(self): res = of.bias(self.evaluation, self.simulation) self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance) def test_length_mismatch_return_nan(self): all_funcs = of._all_functions for func in all_funcs: res = func([0], [0, 1]) self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res)) if __name__ == '__main__': unittest.main()
import unittest from spotpy import objectivefunctions as of import numpy as np #https://docs.python.org/3/library/unittest.html class TestObjectiveFunctions(unittest.TestCase): # How many digits to match in case of floating point answers tolerance = 10 def setUp(self): np.random.seed(42) self.simulation = np.random.randn(10) self.evaluation = np.random.randn(10) def test_bias(self): res = of.bias(self.evaluation, self.simulation) self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance) def test_pbias(self): res = of.pbias(self.evaluation, self.simulation) self.assertAlmostEqual(res, -156.66937901878677, self.tolerance) def test_nashsutcliffe(self): res = of.nashsutcliffe(self.evaluation, self.simulation) self.assertAlmostEqual(res, -4.1162070769985508, self.tolerance) def test_length_mismatch_return_nan(self): all_funcs = of._all_functions for func in all_funcs: res = func([0], [0, 1]) self.assertTrue(np.isnan(res), "Expected np.nan in length mismatch, Got {}".format(res)) if __name__ == '__main__': unittest.main()
Add tests for pbias and nashsutcliffe
Add tests for pbias and nashsutcliffe
Python
mit
bees4ever/spotpy,thouska/spotpy,thouska/spotpy,bees4ever/spotpy,bees4ever/spotpy,thouska/spotpy
import unittest from spotpy import objectivefunctions as of import numpy as np #https://docs.python.org/3/library/unittest.html class TestObjectiveFunctions(unittest.TestCase): # How many digits to match in case of floating point answers tolerance = 10 def setUp(self): np.random.seed(42) self.simulation = np.random.randn(10) self.evaluation = np.random.randn(10) - print(self.simulation) - print(self.evaluation) - def test_bias(self): res = of.bias(self.evaluation, self.simulation) self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance) + + def test_pbias(self): + res = of.pbias(self.evaluation, self.simulation) + self.assertAlmostEqual(res, -156.66937901878677, self.tolerance) + + def test_nashsutcliffe(self): + res = of.nashsutcliffe(self.evaluation, self.simulation) + self.assertAlmostEqual(res, -4.1162070769985508, self.tolerance) def test_length_mismatch_return_nan(self): all_funcs = of._all_functions for func in all_funcs: res = func([0], [0, 1]) - self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res)) + self.assertTrue(np.isnan(res), "Expected np.nan in length mismatch, Got {}".format(res)) if __name__ == '__main__': unittest.main()
Add tests for pbias and nashsutcliffe
## Code Before: import unittest from spotpy import objectivefunctions as of import numpy as np #https://docs.python.org/3/library/unittest.html class TestObjectiveFunctions(unittest.TestCase): # How many digits to match in case of floating point answers tolerance = 10 def setUp(self): np.random.seed(42) self.simulation = np.random.randn(10) self.evaluation = np.random.randn(10) print(self.simulation) print(self.evaluation) def test_bias(self): res = of.bias(self.evaluation, self.simulation) self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance) def test_length_mismatch_return_nan(self): all_funcs = of._all_functions for func in all_funcs: res = func([0], [0, 1]) self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res)) if __name__ == '__main__': unittest.main() ## Instruction: Add tests for pbias and nashsutcliffe ## Code After: import unittest from spotpy import objectivefunctions as of import numpy as np #https://docs.python.org/3/library/unittest.html class TestObjectiveFunctions(unittest.TestCase): # How many digits to match in case of floating point answers tolerance = 10 def setUp(self): np.random.seed(42) self.simulation = np.random.randn(10) self.evaluation = np.random.randn(10) def test_bias(self): res = of.bias(self.evaluation, self.simulation) self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance) def test_pbias(self): res = of.pbias(self.evaluation, self.simulation) self.assertAlmostEqual(res, -156.66937901878677, self.tolerance) def test_nashsutcliffe(self): res = of.nashsutcliffe(self.evaluation, self.simulation) self.assertAlmostEqual(res, -4.1162070769985508, self.tolerance) def test_length_mismatch_return_nan(self): all_funcs = of._all_functions for func in all_funcs: res = func([0], [0, 1]) self.assertTrue(np.isnan(res), "Expected np.nan in length mismatch, Got {}".format(res)) if __name__ == '__main__': unittest.main()
import unittest from spotpy import objectivefunctions as of import numpy as np #https://docs.python.org/3/library/unittest.html class TestObjectiveFunctions(unittest.TestCase): # How many digits to match in case of floating point answers tolerance = 10 def setUp(self): np.random.seed(42) self.simulation = np.random.randn(10) self.evaluation = np.random.randn(10) - print(self.simulation) - print(self.evaluation) - def test_bias(self): res = of.bias(self.evaluation, self.simulation) self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance) + + def test_pbias(self): + res = of.pbias(self.evaluation, self.simulation) + self.assertAlmostEqual(res, -156.66937901878677, self.tolerance) + + def test_nashsutcliffe(self): + res = of.nashsutcliffe(self.evaluation, self.simulation) + self.assertAlmostEqual(res, -4.1162070769985508, self.tolerance) def test_length_mismatch_return_nan(self): all_funcs = of._all_functions for func in all_funcs: res = func([0], [0, 1]) - self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res)) ? ^ ^^^^^^^^ + self.assertTrue(np.isnan(res), "Expected np.nan in length mismatch, Got {}".format(res)) ? ^^^^^^^^^ +++ ^ if __name__ == '__main__': unittest.main()
d6b2c3fcae81ca30d406778f66c6f8b12cfb04d8
tests/window/WINDOW_CAPTION.py
tests/window/WINDOW_CAPTION.py
'''Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' import time import unittest from pyglet import window class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): w1 = window.Window(200, 200) w2 = window.Window(200, 200) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close() if __name__ == '__main__': unittest.main()
'''Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' import time import unittest from pyglet import window class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): w1 = window.Window(400, 200, resizable=True) w2 = window.Window(400, 200, resizable=True) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close() if __name__ == '__main__': unittest.main()
Make windows bigger in this test so the captions can be read.
Make windows bigger in this test so the captions can be read. Index: tests/window/WINDOW_CAPTION.py =================================================================== --- tests/window/WINDOW_CAPTION.py (revision 777) +++ tests/window/WINDOW_CAPTION.py (working copy) @@ -19,8 +19,8 @@ class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): - w1 = window.Window(200, 200) - w2 = window.Window(200, 200) + w1 = window.Window(400, 200, resizable=True) + w2 = window.Window(400, 200, resizable=True) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?')
Python
bsd-3-clause
adamlwgriffiths/Pyglet,niklaskorz/pyglet,niklaskorz/pyglet,adamlwgriffiths/Pyglet,adamlwgriffiths/Pyglet,adamlwgriffiths/Pyglet,seeminglee/pyglet64,niklaskorz/pyglet,seeminglee/pyglet64,niklaskorz/pyglet,seeminglee/pyglet64
'''Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' import time import unittest from pyglet import window class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): - w1 = window.Window(200, 200) + w1 = window.Window(400, 200, resizable=True) - w2 = window.Window(200, 200) + w2 = window.Window(400, 200, resizable=True) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close() if __name__ == '__main__': unittest.main()
Make windows bigger in this test so the captions can be read.
## Code Before: '''Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' import time import unittest from pyglet import window class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): w1 = window.Window(200, 200) w2 = window.Window(200, 200) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close() if __name__ == '__main__': unittest.main() ## Instruction: Make windows bigger in this test so the captions can be read. ## Code After: '''Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' import time import unittest from pyglet import window class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): w1 = window.Window(400, 200, resizable=True) w2 = window.Window(400, 200, resizable=True) count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close() if __name__ == '__main__': unittest.main()
'''Test that the window caption can be set. Expected behaviour: Two windows will be opened, one with the caption "Window caption 1" counting up every second; the other with a Unicode string including some non-ASCII characters. Press escape or close either window to finished the test. ''' __docformat__ = 'restructuredtext' __version__ = '$Id: $' import time import unittest from pyglet import window class WINDOW_CAPTION(unittest.TestCase): def test_caption(self): - w1 = window.Window(200, 200) ? ^ + w1 = window.Window(400, 200, resizable=True) ? ^ ++++++++++++++++ - w2 = window.Window(200, 200) ? ^ + w2 = window.Window(400, 200, resizable=True) ? ^ ++++++++++++++++ count = 1 w1.set_caption('Window caption %d' % count) w2.set_caption(u'\u00bfHabla espa\u00f1ol?') last_time = time.time() while not (w1.has_exit or w2.has_exit): if time.time() - last_time > 1: count += 1 w1.set_caption('Window caption %d' % count) last_time = time.time() w1.dispatch_events() w2.dispatch_events() w1.close() w2.close() if __name__ == '__main__': unittest.main()
99cb654ec5730f6be33bb091aa3ac9e70963470c
hc/front/tests/test_add_channel.py
hc/front/tests/test_add_channel.py
from django.contrib.auth.models import User from django.test import TestCase from hc.api.models import Channel class AddChannelTestCase(TestCase): def setUp(self): self.alice = User(username="alice") self.alice.set_password("password") self.alice.save() def test_it_works(self): url = "/integrations/add/" form = {"kind": "email", "value": "alice@example.org"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 302 assert Channel.objects.count() == 1 def test_it_rejects_bad_kind(self): url = "/integrations/add/" form = {"kind": "dog", "value": "Lassie"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 400, r.status_code def test_instructions_work(self): self.client.login(username="alice", password="password") for frag in ("email", "webhook", "pd", "pushover", "slack", "hipchat"): url = "/integrations/add_%s/" % frag r = self.client.get(url) self.assertContains(r, "Integration Settings", status_code=200)
from django.conf import settings from django.contrib.auth.models import User from django.test import TestCase from hc.api.models import Channel class AddChannelTestCase(TestCase): def setUp(self): self.alice = User(username="alice") self.alice.set_password("password") self.alice.save() settings.PUSHOVER_API_TOKEN = "bogus_token" settings.PUSHOVER_SUBSCRIPTION_URL = "bogus_url" def test_it_works(self): url = "/integrations/add/" form = {"kind": "email", "value": "alice@example.org"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 302 assert Channel.objects.count() == 1 def test_it_rejects_bad_kind(self): url = "/integrations/add/" form = {"kind": "dog", "value": "Lassie"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 400, r.status_code def test_instructions_work(self): self.client.login(username="alice", password="password") for frag in ("email", "webhook", "pd", "pushover", "slack", "hipchat"): url = "/integrations/add_%s/" % frag r = self.client.get(url) self.assertContains(r, "Integration Settings", status_code=200)
Fix tests when Pushover is not configured
Fix tests when Pushover is not configured
Python
bsd-3-clause
healthchecks/healthchecks,iphoting/healthchecks,BetterWorks/healthchecks,BetterWorks/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,healthchecks/healthchecks,iphoting/healthchecks,iphoting/healthchecks,BetterWorks/healthchecks,BetterWorks/healthchecks,iphoting/healthchecks
+ from django.conf import settings from django.contrib.auth.models import User from django.test import TestCase from hc.api.models import Channel class AddChannelTestCase(TestCase): def setUp(self): self.alice = User(username="alice") self.alice.set_password("password") self.alice.save() + + settings.PUSHOVER_API_TOKEN = "bogus_token" + settings.PUSHOVER_SUBSCRIPTION_URL = "bogus_url" def test_it_works(self): url = "/integrations/add/" form = {"kind": "email", "value": "alice@example.org"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 302 assert Channel.objects.count() == 1 def test_it_rejects_bad_kind(self): url = "/integrations/add/" form = {"kind": "dog", "value": "Lassie"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 400, r.status_code def test_instructions_work(self): self.client.login(username="alice", password="password") for frag in ("email", "webhook", "pd", "pushover", "slack", "hipchat"): url = "/integrations/add_%s/" % frag r = self.client.get(url) self.assertContains(r, "Integration Settings", status_code=200)
Fix tests when Pushover is not configured
## Code Before: from django.contrib.auth.models import User from django.test import TestCase from hc.api.models import Channel class AddChannelTestCase(TestCase): def setUp(self): self.alice = User(username="alice") self.alice.set_password("password") self.alice.save() def test_it_works(self): url = "/integrations/add/" form = {"kind": "email", "value": "alice@example.org"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 302 assert Channel.objects.count() == 1 def test_it_rejects_bad_kind(self): url = "/integrations/add/" form = {"kind": "dog", "value": "Lassie"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 400, r.status_code def test_instructions_work(self): self.client.login(username="alice", password="password") for frag in ("email", "webhook", "pd", "pushover", "slack", "hipchat"): url = "/integrations/add_%s/" % frag r = self.client.get(url) self.assertContains(r, "Integration Settings", status_code=200) ## Instruction: Fix tests when Pushover is not configured ## Code After: from django.conf import settings from django.contrib.auth.models import User from django.test import TestCase from hc.api.models import Channel class AddChannelTestCase(TestCase): def setUp(self): self.alice = User(username="alice") self.alice.set_password("password") self.alice.save() settings.PUSHOVER_API_TOKEN = "bogus_token" settings.PUSHOVER_SUBSCRIPTION_URL = "bogus_url" def test_it_works(self): url = "/integrations/add/" form = {"kind": "email", "value": "alice@example.org"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 302 assert Channel.objects.count() == 1 def test_it_rejects_bad_kind(self): url = "/integrations/add/" form = {"kind": "dog", "value": "Lassie"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 400, r.status_code def test_instructions_work(self): self.client.login(username="alice", password="password") for frag in ("email", "webhook", "pd", "pushover", "slack", "hipchat"): url = "/integrations/add_%s/" % frag r = self.client.get(url) self.assertContains(r, "Integration Settings", status_code=200)
+ from django.conf import settings from django.contrib.auth.models import User from django.test import TestCase from hc.api.models import Channel class AddChannelTestCase(TestCase): def setUp(self): self.alice = User(username="alice") self.alice.set_password("password") self.alice.save() + + settings.PUSHOVER_API_TOKEN = "bogus_token" + settings.PUSHOVER_SUBSCRIPTION_URL = "bogus_url" def test_it_works(self): url = "/integrations/add/" form = {"kind": "email", "value": "alice@example.org"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 302 assert Channel.objects.count() == 1 def test_it_rejects_bad_kind(self): url = "/integrations/add/" form = {"kind": "dog", "value": "Lassie"} self.client.login(username="alice", password="password") r = self.client.post(url, form) assert r.status_code == 400, r.status_code def test_instructions_work(self): self.client.login(username="alice", password="password") for frag in ("email", "webhook", "pd", "pushover", "slack", "hipchat"): url = "/integrations/add_%s/" % frag r = self.client.get(url) self.assertContains(r, "Integration Settings", status_code=200)
fda9d6fd0a8f437b06fa4e34396ca52f4874d32c
modules/pipeurlbuilder.py
modules/pipeurlbuilder.py
import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' path = util.get_value(conf['PATH'], item, **kwargs) if not isinstance(path, list): path = [path] url += "/".join(path) url = url.rstrip("/") params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']]) if params: url += "?" + urllib.urlencode(params) yield url
import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' path = conf['PATH'] if not isinstance(path, list): path = [path] path = [util.get_value(p, item, **kwargs) for p in path] url += "/".join(path) url = url.rstrip("/") params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']]) if params: url += "?" + urllib.urlencode(params) yield url
Fix to handle multiple path segments
Fix to handle multiple path segments
Python
mit
nerevu/riko,nerevu/riko
import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' - path = util.get_value(conf['PATH'], item, **kwargs) + path = conf['PATH'] if not isinstance(path, list): path = [path] + path = [util.get_value(p, item, **kwargs) for p in path] url += "/".join(path) url = url.rstrip("/") params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']]) if params: url += "?" + urllib.urlencode(params) yield url
Fix to handle multiple path segments
## Code Before: import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' path = util.get_value(conf['PATH'], item, **kwargs) if not isinstance(path, list): path = [path] url += "/".join(path) url = url.rstrip("/") params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']]) if params: url += "?" + urllib.urlencode(params) yield url ## Instruction: Fix to handle multiple path segments ## Code After: import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' path = conf['PATH'] if not isinstance(path, list): path = [path] path = [util.get_value(p, item, **kwargs) for p in path] url += "/".join(path) url = url.rstrip("/") params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']]) if params: url += "?" + urllib.urlencode(params) yield url
import urllib from pipe2py import util def pipe_urlbuilder(context, _INPUT, conf, **kwargs): """This source builds a url and yields it forever. Keyword arguments: context -- pipeline context _INPUT -- not used conf: BASE -- base PATH -- path elements PARAM -- query parameters Yields (_OUTPUT): url """ for item in _INPUT: #note: we could cache get_value results if item==True url = util.get_value(conf['BASE'], item, **kwargs) if not url.endswith('/'): url += '/' - path = util.get_value(conf['PATH'], item, **kwargs) + path = conf['PATH'] if not isinstance(path, list): path = [path] + path = [util.get_value(p, item, **kwargs) for p in path] url += "/".join(path) url = url.rstrip("/") params = dict([(util.get_value(p['key'], item, **kwargs), util.get_value(p['value'], item, **kwargs)) for p in conf['PARAM']]) if params: url += "?" + urllib.urlencode(params) yield url
e16960eaaf38513e80fb18580c3e4320978407e4
chainer/training/triggers/__init__.py
chainer/training/triggers/__init__.py
from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
Fix the order of importing
Fix the order of importing
Python
mit
wkentaro/chainer,ktnyt/chainer,chainer/chainer,ktnyt/chainer,jnishi/chainer,jnishi/chainer,wkentaro/chainer,chainer/chainer,ktnyt/chainer,ktnyt/chainer,niboshi/chainer,jnishi/chainer,hvy/chainer,chainer/chainer,pfnet/chainer,niboshi/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,hvy/chainer,keisuke-umezawa/chainer,hvy/chainer,rezoo/chainer,okuta/chainer,okuta/chainer,okuta/chainer,aonotas/chainer,niboshi/chainer,hvy/chainer,wkentaro/chainer,chainer/chainer,ronekko/chainer,jnishi/chainer,okuta/chainer,anaruse/chainer,tkerola/chainer,niboshi/chainer,keisuke-umezawa/chainer,wkentaro/chainer
from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA - from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
Fix the order of importing
## Code Before: from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA ## Instruction: Fix the order of importing ## Code After: from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA
from chainer.training.triggers import interval_trigger # NOQA from chainer.training.triggers import minmax_value_trigger # NOQA # import class and function from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA from chainer.training.triggers.interval_trigger import IntervalTrigger # NOQA from chainer.training.triggers.manual_schedule_trigger import ManualScheduleTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import BestValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MaxValueTrigger # NOQA from chainer.training.triggers.minmax_value_trigger import MinValueTrigger # NOQA - from chainer.training.triggers.early_stopping_trigger import EarlyStoppingTrigger # NOQA
454e107abfdc9e3038a18500568e9a1357364bd0
pygraphc/similarity/JaroWinkler.py
pygraphc/similarity/JaroWinkler.py
import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') return jellyfish.jaro_winkler(string1, string2) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) if distance > 0: distance_with_id = (unique_event_id[0], unique_event_id[1], distance) return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') distance = jellyfish.jaro_winkler(string1, string2) if distance > 0.: return round(distance, 3) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) distance_with_id = (unique_event_id[0], unique_event_id[1], distance) return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
Add checking for zero distance
Add checking for zero distance
Python
mit
studiawan/pygraphc
import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') - return jellyfish.jaro_winkler(string1, string2) + distance = jellyfish.jaro_winkler(string1, string2) + if distance > 0.: + return round(distance, 3) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) - if distance > 0: - distance_with_id = (unique_event_id[0], unique_event_id[1], distance) + distance_with_id = (unique_event_id[0], unique_event_id[1], distance) - return distance_with_id + return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
Add checking for zero distance
## Code Before: import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') return jellyfish.jaro_winkler(string1, string2) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) if distance > 0: distance_with_id = (unique_event_id[0], unique_event_id[1], distance) return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances ## Instruction: Add checking for zero distance ## Code After: import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') distance = jellyfish.jaro_winkler(string1, string2) if distance > 0.: return round(distance, 3) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) distance_with_id = (unique_event_id[0], unique_event_id[1], distance) return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
import jellyfish import multiprocessing from itertools import combinations class JaroWinkler(object): def __init__(self, event_attributes, event_length): self.event_attributes = event_attributes self.event_length = event_length def __jarowinkler(self, unique_event_id): string1 = unicode(self.event_attributes[unique_event_id[0]]['preprocessed_event'], 'utf-8') string2 = unicode(self.event_attributes[unique_event_id[1]]['preprocessed_event'], 'utf-8') - return jellyfish.jaro_winkler(string1, string2) ? ^ ^^^^ + distance = jellyfish.jaro_winkler(string1, string2) ? ^^^^^^^ ^^ + if distance > 0.: + return round(distance, 3) def __call__(self, unique_event_id): distance = self.__jarowinkler(unique_event_id) - if distance > 0: - distance_with_id = (unique_event_id[0], unique_event_id[1], distance) ? ---- + distance_with_id = (unique_event_id[0], unique_event_id[1], distance) - return distance_with_id ? ---- + return distance_with_id def get_jarowinkler(self): # get unique event id combination event_id_combination = list(combinations(xrange(self.event_length), 2)) # get distance with multiprocessing pool = multiprocessing.Pool(processes=4) distances = pool.map(self, event_id_combination) pool.close() pool.join() return distances
480e51fc6b09cc47105b4615c0ff9047b39a9067
eva_cttv_pipeline/trait_mapping/utils.py
eva_cttv_pipeline/trait_mapping/utils.py
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
Modify the URL helper to not rely on None values
Modify the URL helper to not rely on None values
Python
apache-2.0
EBIvariation/eva-cttv-pipeline
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): + try: - return_value = function(url) - if return_value is not None: - return return_value - logger.warning("attempt {}: failed running function {} with url {}".format( - retry_num, function, url)) + return function(url) + except Exception as e: + logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) + logger.warning(e) logger.warning("error on last attempt, skipping") return None
Modify the URL helper to not rely on None values
## Code Before: import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): return_value = function(url) if return_value is not None: return return_value logger.warning("attempt {}: failed running function {} with url {}".format( retry_num, function, url)) logger.warning("error on last attempt, skipping") return None ## Instruction: Modify the URL helper to not rely on None values ## Code After: import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): try: return function(url) except Exception as e: logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) logger.warning(e) logger.warning("error on last attempt, skipping") return None
import logging logger = logging.getLogger(__package__) def request_retry_helper(function, retry_count: int, url: str): """ Given a function make a number of attempts to call function for it to successfully return a non-None value, subsequently returning this value. Makes the number of tries specified in retry_count parameter. :param function: Function that could need multiple attempts to return a non-None value :param retry_count: Number of attempts to make :param url: String specifying the url to make a request. :return: Returned value of the function. """ for retry_num in range(retry_count): + try: - return_value = function(url) - if return_value is not None: - return return_value - logger.warning("attempt {}: failed running function {} with url {}".format( - retry_num, function, url)) ? -- --- ^^ - + return function(url) ? + ^ + except Exception as e: + logger.warning("attempt {}: failed running function {} with url {}".format(retry_num, function, url)) + logger.warning(e) logger.warning("error on last attempt, skipping") return None
132f91c5f3f193ca3b1a246b9ef5b20b4e03609f
core/validators.py
core/validators.py
from datetime import datetime, timedelta from django.core.exceptions import ValidationError def validate_approximatedate(date): if date.month == 0: raise ValidationError( 'Event date can\'t be a year only. ' 'Please, provide at least a month and a year.' ) def validate_event_date(date): today = datetime.today() event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d')) if event_date - datetime.date(today) < timedelta(days=90): raise ValidationError('Your event date is too close. ' 'Workshop date should be at least 3 months (90 days) from now.')
from datetime import date, datetime, timedelta from django.core.exceptions import ValidationError def validate_approximatedate(date): if date.month == 0: raise ValidationError( 'Event date can\'t be a year only. ' 'Please, provide at least a month and a year.' ) def validate_event_date(e_date): today = date.today() event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day), '%Y-%m-%d')) if event_date - today < timedelta(days=90): raise ValidationError('Your event date is too close. ' 'Workshop date should be at least 3 months (90 days) from now.')
Apply suggested changes on date
Apply suggested changes on date
Python
bsd-3-clause
DjangoGirls/djangogirls,DjangoGirls/djangogirls,DjangoGirls/djangogirls
- from datetime import datetime, timedelta + from datetime import date, datetime, timedelta from django.core.exceptions import ValidationError def validate_approximatedate(date): if date.month == 0: raise ValidationError( 'Event date can\'t be a year only. ' 'Please, provide at least a month and a year.' ) - def validate_event_date(date): + def validate_event_date(e_date): - today = datetime.today() + today = date.today() - event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d')) + event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day), + '%Y-%m-%d')) - if event_date - datetime.date(today) < timedelta(days=90): + if event_date - today < timedelta(days=90): raise ValidationError('Your event date is too close. ' 'Workshop date should be at least 3 months (90 days) from now.')
Apply suggested changes on date
## Code Before: from datetime import datetime, timedelta from django.core.exceptions import ValidationError def validate_approximatedate(date): if date.month == 0: raise ValidationError( 'Event date can\'t be a year only. ' 'Please, provide at least a month and a year.' ) def validate_event_date(date): today = datetime.today() event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d')) if event_date - datetime.date(today) < timedelta(days=90): raise ValidationError('Your event date is too close. ' 'Workshop date should be at least 3 months (90 days) from now.') ## Instruction: Apply suggested changes on date ## Code After: from datetime import date, datetime, timedelta from django.core.exceptions import ValidationError def validate_approximatedate(date): if date.month == 0: raise ValidationError( 'Event date can\'t be a year only. ' 'Please, provide at least a month and a year.' ) def validate_event_date(e_date): today = date.today() event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day), '%Y-%m-%d')) if event_date - today < timedelta(days=90): raise ValidationError('Your event date is too close. ' 'Workshop date should be at least 3 months (90 days) from now.')
- from datetime import datetime, timedelta + from datetime import date, datetime, timedelta ? ++++++ from django.core.exceptions import ValidationError def validate_approximatedate(date): if date.month == 0: raise ValidationError( 'Event date can\'t be a year only. ' 'Please, provide at least a month and a year.' ) - def validate_event_date(date): + def validate_event_date(e_date): ? ++ - today = datetime.today() ? ---- + today = date.today() - event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(date.year, date.month, date.day), '%Y-%m-%d')) ? ------------- + event_date = datetime.date(datetime.strptime('{0}-{1}-{2}'.format(e_date.year, e_date.month, e_date.day), ? ++ ++ ++ + '%Y-%m-%d')) - if event_date - datetime.date(today) < timedelta(days=90): ? -------------- - + if event_date - today < timedelta(days=90): raise ValidationError('Your event date is too close. ' 'Workshop date should be at least 3 months (90 days) from now.')
0a628ed81ca11fc4175b480aad9a136b8a4fe1c2
constantsgen/pythonwriter.py
constantsgen/pythonwriter.py
class PythonWriter: def __init__(self, constants): self.constants = constants def write(self, out): out.write("# This file was generated by generate_constants.\n\n") out.write("from enum import Enum, unique\n\n") for name, enum in self.constants.enum_values.items(): out.write(""" @unique class {}(Enum):\n""".format(name)) for base_name, value in enum.items(): # For the enum value names remove everything up through the # first underscore and convert the remainder to lowercase. For # example the value NV_BOOL is assigned to bool. If there is no # underscore, find() returns -1 and the entire string is used. first_underscore = base_name.find("_") name = base_name[first_underscore + 1:].lower() out.write(" {} = {}\n".format(name, value)) for name, value in self.constants.constant_values.items(): out.write("{} = {}\n".format(name, value))
class PythonWriter: def __init__(self, constants): self.constants = constants def write(self, out): out.write("# This file was generated by generate_constants.\n\n") out.write("from enum import Enum, unique\n\n") for name, enum in self.constants.enum_values.items(): out.write(""" @unique class {}(Enum):\n""".format(name)) for base_name, value in enum.items(): # For the enum value names remove everything up through the # first underscore and convert the remainder to lowercase. For # example the value NV_BOOL is assigned to bool. If there is no # underscore, find() returns -1 and the entire string is used. first_underscore = base_name.find("_") name = base_name[first_underscore + 1:].lower() out.write(" {} = {}\n".format(name, value)) out.write("\n\n") for name, value in self.constants.constant_values.items(): out.write("{} = {}\n".format(name, value))
Add PEP8 whitespace around Enums
Add PEP8 whitespace around Enums
Python
bsd-3-clause
barracudanetworks/constantsgen,barracudanetworks/constantsgen,barracudanetworks/constantsgen
class PythonWriter: def __init__(self, constants): self.constants = constants def write(self, out): out.write("# This file was generated by generate_constants.\n\n") out.write("from enum import Enum, unique\n\n") for name, enum in self.constants.enum_values.items(): out.write(""" - @unique class {}(Enum):\n""".format(name)) for base_name, value in enum.items(): # For the enum value names remove everything up through the # first underscore and convert the remainder to lowercase. For # example the value NV_BOOL is assigned to bool. If there is no # underscore, find() returns -1 and the entire string is used. first_underscore = base_name.find("_") name = base_name[first_underscore + 1:].lower() out.write(" {} = {}\n".format(name, value)) + out.write("\n\n") + for name, value in self.constants.constant_values.items(): out.write("{} = {}\n".format(name, value))
Add PEP8 whitespace around Enums
## Code Before: class PythonWriter: def __init__(self, constants): self.constants = constants def write(self, out): out.write("# This file was generated by generate_constants.\n\n") out.write("from enum import Enum, unique\n\n") for name, enum in self.constants.enum_values.items(): out.write(""" @unique class {}(Enum):\n""".format(name)) for base_name, value in enum.items(): # For the enum value names remove everything up through the # first underscore and convert the remainder to lowercase. For # example the value NV_BOOL is assigned to bool. If there is no # underscore, find() returns -1 and the entire string is used. first_underscore = base_name.find("_") name = base_name[first_underscore + 1:].lower() out.write(" {} = {}\n".format(name, value)) for name, value in self.constants.constant_values.items(): out.write("{} = {}\n".format(name, value)) ## Instruction: Add PEP8 whitespace around Enums ## Code After: class PythonWriter: def __init__(self, constants): self.constants = constants def write(self, out): out.write("# This file was generated by generate_constants.\n\n") out.write("from enum import Enum, unique\n\n") for name, enum in self.constants.enum_values.items(): out.write(""" @unique class {}(Enum):\n""".format(name)) for base_name, value in enum.items(): # For the enum value names remove everything up through the # first underscore and convert the remainder to lowercase. For # example the value NV_BOOL is assigned to bool. If there is no # underscore, find() returns -1 and the entire string is used. first_underscore = base_name.find("_") name = base_name[first_underscore + 1:].lower() out.write(" {} = {}\n".format(name, value)) out.write("\n\n") for name, value in self.constants.constant_values.items(): out.write("{} = {}\n".format(name, value))
class PythonWriter: def __init__(self, constants): self.constants = constants def write(self, out): out.write("# This file was generated by generate_constants.\n\n") out.write("from enum import Enum, unique\n\n") for name, enum in self.constants.enum_values.items(): out.write(""" - @unique class {}(Enum):\n""".format(name)) for base_name, value in enum.items(): # For the enum value names remove everything up through the # first underscore and convert the remainder to lowercase. For # example the value NV_BOOL is assigned to bool. If there is no # underscore, find() returns -1 and the entire string is used. first_underscore = base_name.find("_") name = base_name[first_underscore + 1:].lower() out.write(" {} = {}\n".format(name, value)) + out.write("\n\n") + for name, value in self.constants.constant_values.items(): out.write("{} = {}\n".format(name, value))
76ca06c26d74aaad1f0773321fdd382b12addcdc
src/django_easyfilters/utils.py
src/django_easyfilters/utils.py
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject import six def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not six.PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: try: rel = opts.get_field_by_name(name)[0] except FieldDoesNotExist: return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject from six import PY3 def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: rel = opts.get_field_by_name(name)[0] if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
Fix error handling in get_model_field (passthrough).
Fix error handling in get_model_field (passthrough).
Python
mit
ionelmc/django-easyfilters,ionelmc/django-easyfilters
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject - import six + from six import PY3 + def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ - if not six.PY3: + if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: - try: - rel = opts.get_field_by_name(name)[0] + rel = opts.get_field_by_name(name)[0] - except FieldDoesNotExist: - return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
Fix error handling in get_model_field (passthrough).
## Code Before: try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject import six def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not six.PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: try: rel = opts.get_field_by_name(name)[0] except FieldDoesNotExist: return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m ## Instruction: Fix error handling in get_model_field (passthrough). ## Code After: try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject from six import PY3 def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: rel = opts.get_field_by_name(name)[0] if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
try: from django.db.models.constants import LOOKUP_SEP except ImportError: # Django < 1.5 fallback from django.db.models.sql.constants import LOOKUP_SEP from django.db.models.related import RelatedObject - import six + from six import PY3 + def python_2_unicode_compatible(klass): # Copied from Django 1.5 """ A decorator that defines __unicode__ and __str__ methods under Python 2. Under Python 3 it does nothing. To support Python 2 and 3 with a single code base, define a __str__ method returning text and apply this decorator to the class. """ - if not six.PY3: ? ---- + if not PY3: klass.__unicode__ = klass.__str__ klass.__str__ = lambda self: self.__unicode__().encode('utf-8') return klass def get_model_field(model, f): parts = f.split(LOOKUP_SEP) opts = model._meta for name in parts[:-1]: - try: - rel = opts.get_field_by_name(name)[0] ? ---- + rel = opts.get_field_by_name(name)[0] - except FieldDoesNotExist: - return None if isinstance(rel, RelatedObject): model = rel.model opts = rel.opts else: model = rel.rel.to opts = model._meta rel, model, direct, m2m = opts.get_field_by_name(parts[-1]) return rel, m2m
0f9f4f1ee325d72d09625850ba6a153ae5616ab0
nose2/tests/functional/test_collect_plugin.py
nose2/tests/functional/test_collect_plugin.py
import re from nose2.tests._common import FunctionalTestCase class CollectOnlyFunctionalTest(FunctionalTestCase): def test_collect_tests_in_package(self): self.assertTestRunOutputMatches( self.runIn('scenario/tests_in_package', '-v', '--collect-only'), stderr=EXPECT_LAYOUT1) # expectations EXPECT_LAYOUT1 = re.compile("""\ Ran 25 tests in \d.\d+s OK""")
import re from nose2.tests._common import FunctionalTestCase class CollectOnlyFunctionalTest(FunctionalTestCase): def test_collect_tests_in_package(self): self.assertTestRunOutputMatches( self.runIn('scenario/tests_in_package', '-v', '--collect-only', '--plugin=nose2.plugins.collect'), stderr=EXPECT_LAYOUT1) # expectations EXPECT_LAYOUT1 = re.compile("""\ Ran 25 tests in \d.\d+s OK""")
Update test to load plugin
Update test to load plugin collectonly no longer loaded by default
Python
bsd-2-clause
ptthiem/nose2,little-dude/nose2,little-dude/nose2,leth/nose2,ojengwa/nose2,ezigman/nose2,ojengwa/nose2,ezigman/nose2,leth/nose2,ptthiem/nose2
import re from nose2.tests._common import FunctionalTestCase class CollectOnlyFunctionalTest(FunctionalTestCase): def test_collect_tests_in_package(self): self.assertTestRunOutputMatches( - self.runIn('scenario/tests_in_package', '-v', '--collect-only'), + self.runIn('scenario/tests_in_package', '-v', '--collect-only', + '--plugin=nose2.plugins.collect'), stderr=EXPECT_LAYOUT1) # expectations EXPECT_LAYOUT1 = re.compile("""\ Ran 25 tests in \d.\d+s OK""")
Update test to load plugin
## Code Before: import re from nose2.tests._common import FunctionalTestCase class CollectOnlyFunctionalTest(FunctionalTestCase): def test_collect_tests_in_package(self): self.assertTestRunOutputMatches( self.runIn('scenario/tests_in_package', '-v', '--collect-only'), stderr=EXPECT_LAYOUT1) # expectations EXPECT_LAYOUT1 = re.compile("""\ Ran 25 tests in \d.\d+s OK""") ## Instruction: Update test to load plugin ## Code After: import re from nose2.tests._common import FunctionalTestCase class CollectOnlyFunctionalTest(FunctionalTestCase): def test_collect_tests_in_package(self): self.assertTestRunOutputMatches( self.runIn('scenario/tests_in_package', '-v', '--collect-only', '--plugin=nose2.plugins.collect'), stderr=EXPECT_LAYOUT1) # expectations EXPECT_LAYOUT1 = re.compile("""\ Ran 25 tests in \d.\d+s OK""")
import re from nose2.tests._common import FunctionalTestCase class CollectOnlyFunctionalTest(FunctionalTestCase): def test_collect_tests_in_package(self): self.assertTestRunOutputMatches( - self.runIn('scenario/tests_in_package', '-v', '--collect-only'), ? - + self.runIn('scenario/tests_in_package', '-v', '--collect-only', + '--plugin=nose2.plugins.collect'), stderr=EXPECT_LAYOUT1) # expectations EXPECT_LAYOUT1 = re.compile("""\ Ran 25 tests in \d.\d+s OK""")
70d435e1176a1132db6a04c34c04567df354d1d9
cla_backend/apps/reports/management/commands/mi_cb1_report.py
cla_backend/apps/reports/management/commands/mi_cb1_report.py
import logging from django.core.management.base import BaseCommand logger = logging.getLogger(__name__) class Command(BaseCommand): help = "This runs the MCCB1sSLA report" def handle(self, *args, **options): self.create_report() def create_report(): print("stuff goes here") # '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}' # report_data = json_stuff_goes_here # ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data)
import logging from django.core.management.base import BaseCommand from reports.tasks import ExportTask from core.models import get_web_user from django.views.decorators.csrf import csrf_exempt logger = logging.getLogger(__name__) class Command(BaseCommand): help = "This runs the MCCB1sSLA report" def handle(self, *args, **options): self.create_report() @csrf_exempt def create_report(self): report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}' # report_data = json_stuff_goes_here web_user = get_web_user() filename_of_report = "WEEKLY-REPORT-TEST.csv" ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data)
Send weekly report to aws
Send weekly report to aws
Python
mit
ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend,ministryofjustice/cla_backend
import logging from django.core.management.base import BaseCommand - + from reports.tasks import ExportTask + from core.models import get_web_user + from django.views.decorators.csrf import csrf_exempt logger = logging.getLogger(__name__) class Command(BaseCommand): help = "This runs the MCCB1sSLA report" def handle(self, *args, **options): self.create_report() + @csrf_exempt - def create_report(): + def create_report(self): - print("stuff goes here") - - # '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}' + report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}' # report_data = json_stuff_goes_here + web_user = get_web_user() + filename_of_report = "WEEKLY-REPORT-TEST.csv" - # ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data) + ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data)
Send weekly report to aws
## Code Before: import logging from django.core.management.base import BaseCommand logger = logging.getLogger(__name__) class Command(BaseCommand): help = "This runs the MCCB1sSLA report" def handle(self, *args, **options): self.create_report() def create_report(): print("stuff goes here") # '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}' # report_data = json_stuff_goes_here # ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data) ## Instruction: Send weekly report to aws ## Code After: import logging from django.core.management.base import BaseCommand from reports.tasks import ExportTask from core.models import get_web_user from django.views.decorators.csrf import csrf_exempt logger = logging.getLogger(__name__) class Command(BaseCommand): help = "This runs the MCCB1sSLA report" def handle(self, *args, **options): self.create_report() @csrf_exempt def create_report(self): report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}' # report_data = json_stuff_goes_here web_user = get_web_user() filename_of_report = "WEEKLY-REPORT-TEST.csv" ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data)
import logging from django.core.management.base import BaseCommand - + from reports.tasks import ExportTask + from core.models import get_web_user + from django.views.decorators.csrf import csrf_exempt logger = logging.getLogger(__name__) class Command(BaseCommand): help = "This runs the MCCB1sSLA report" def handle(self, *args, **options): self.create_report() + @csrf_exempt - def create_report(): + def create_report(self): ? ++++ - print("stuff goes here") - - # '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "08/05/2021", "date_to": "10/05/2021"}' ? ^ ------ ------ + report_data = '{"action": "Export", "csrfmiddlewaretoken": "PQk4Pt55CL0NBapx9hSqZTJkSn6tL6TL", "date_from": "2021-05-08", "date_to": "2021-05-10"}' ? ^^^^^^^^^^^^^ ++++++ ++++++ # report_data = json_stuff_goes_here + web_user = get_web_user() + filename_of_report = "WEEKLY-REPORT-TEST.csv" - # ExportTask().delay(user_person.pk, filename_of_report, mi_cb1_extract_agilisys, report_data) ? ---------- ------- ^^^^^ ^^ ^^^^^^^^^ + ExportTask().delay(web_user.pk, filename_of_report, "MICB1Extract", report_data) ? ++++ ^^^^^ ^ ^
3706700e4725d23752269c2e833adfa736d0ce96
worker/jobs/session/__init__.py
worker/jobs/session/__init__.py
import os from typing import Optional, List from jobs.base.job import Job # If on a K8s cluster then use the K8s-based sessions # otherwise use the subsprocess-based session if "KUBERNETES_SERVICE_HOST" in os.environ: from .kubernetes_session import KubernetesSession Session = KubernetesSession # type: ignore else: from .subprocess_session import SubprocessSession Session = SubprocessSession # type: ignore Session.name = "session"
from typing import Type, Union from .kubernetes_session import api_instance, KubernetesSession from .subprocess_session import SubprocessSession # If on a K8s is available then use that # otherwise use the subsprocess-based session Session: Type[Union[KubernetesSession, SubprocessSession]] if api_instance is not None: Session = KubernetesSession else: Session = SubprocessSession Session.name = "session"
Improve switching between session types
fix(Worker): Improve switching between session types
Python
apache-2.0
stencila/hub,stencila/hub,stencila/hub,stencila/hub,stencila/hub
+ from typing import Type, Union - import os - from typing import Optional, List - from jobs.base.job import Job + from .kubernetes_session import api_instance, KubernetesSession + from .subprocess_session import SubprocessSession - # If on a K8s cluster then use the K8s-based sessions + # If on a K8s is available then use that # otherwise use the subsprocess-based session + Session: Type[Union[KubernetesSession, SubprocessSession]] + if api_instance is not None: - if "KUBERNETES_SERVICE_HOST" in os.environ: - from .kubernetes_session import KubernetesSession - - Session = KubernetesSession # type: ignore + Session = KubernetesSession else: - from .subprocess_session import SubprocessSession - - Session = SubprocessSession # type: ignore + Session = SubprocessSession Session.name = "session"
Improve switching between session types
## Code Before: import os from typing import Optional, List from jobs.base.job import Job # If on a K8s cluster then use the K8s-based sessions # otherwise use the subsprocess-based session if "KUBERNETES_SERVICE_HOST" in os.environ: from .kubernetes_session import KubernetesSession Session = KubernetesSession # type: ignore else: from .subprocess_session import SubprocessSession Session = SubprocessSession # type: ignore Session.name = "session" ## Instruction: Improve switching between session types ## Code After: from typing import Type, Union from .kubernetes_session import api_instance, KubernetesSession from .subprocess_session import SubprocessSession # If on a K8s is available then use that # otherwise use the subsprocess-based session Session: Type[Union[KubernetesSession, SubprocessSession]] if api_instance is not None: Session = KubernetesSession else: Session = SubprocessSession Session.name = "session"
+ from typing import Type, Union - import os - from typing import Optional, List - from jobs.base.job import Job + from .kubernetes_session import api_instance, KubernetesSession + from .subprocess_session import SubprocessSession - # If on a K8s cluster then use the K8s-based sessions + # If on a K8s is available then use that # otherwise use the subsprocess-based session + Session: Type[Union[KubernetesSession, SubprocessSession]] + if api_instance is not None: - if "KUBERNETES_SERVICE_HOST" in os.environ: - from .kubernetes_session import KubernetesSession - - Session = KubernetesSession # type: ignore ? ---------------- + Session = KubernetesSession else: - from .subprocess_session import SubprocessSession - - Session = SubprocessSession # type: ignore ? ---------------- + Session = SubprocessSession Session.name = "session"
0ac671d554f322524741a795f4a3250ef705f872
server/ec2spotmanager/migrations/0010_extend_instance_types.py
server/ec2spotmanager/migrations/0010_extend_instance_types.py
from __future__ import unicode_literals from django.db import migrations, models import ec2spotmanager.models class Migration(migrations.Migration): dependencies = [ ('ec2spotmanager', '0009_add_instance_size'), ] operations = [ migrations.AlterField( model_name='poolconfiguration', name='ec2_instance_types', field=models.CharField(blank=True, max_length=4095, null=True), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ec2spotmanager', '0009_add_instance_size'), ] operations = [ migrations.AlterField( model_name='poolconfiguration', name='ec2_instance_types', field=models.CharField(blank=True, max_length=4095, null=True), ), ]
Fix Flake8 error in migration.
Fix Flake8 error in migration.
Python
mpl-2.0
MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager
from __future__ import unicode_literals from django.db import migrations, models - import ec2spotmanager.models class Migration(migrations.Migration): dependencies = [ ('ec2spotmanager', '0009_add_instance_size'), ] operations = [ migrations.AlterField( model_name='poolconfiguration', name='ec2_instance_types', field=models.CharField(blank=True, max_length=4095, null=True), ), ]
Fix Flake8 error in migration.
## Code Before: from __future__ import unicode_literals from django.db import migrations, models import ec2spotmanager.models class Migration(migrations.Migration): dependencies = [ ('ec2spotmanager', '0009_add_instance_size'), ] operations = [ migrations.AlterField( model_name='poolconfiguration', name='ec2_instance_types', field=models.CharField(blank=True, max_length=4095, null=True), ), ] ## Instruction: Fix Flake8 error in migration. ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('ec2spotmanager', '0009_add_instance_size'), ] operations = [ migrations.AlterField( model_name='poolconfiguration', name='ec2_instance_types', field=models.CharField(blank=True, max_length=4095, null=True), ), ]
from __future__ import unicode_literals from django.db import migrations, models - import ec2spotmanager.models class Migration(migrations.Migration): dependencies = [ ('ec2spotmanager', '0009_add_instance_size'), ] operations = [ migrations.AlterField( model_name='poolconfiguration', name='ec2_instance_types', field=models.CharField(blank=True, max_length=4095, null=True), ), ]
79404e483462fd71aed1150e91657becd8a5aaf8
clifford/test/test_multivector_inverse.py
clifford/test/test_multivector_inverse.py
import numpy as np import pytest import clifford as cf class TestClosedForm: @pytest.mark.parametrize('p, q', [ (p, total_dims - p) for total_dims in [1, 2, 3, 4, 5] for p in range(total_dims + 1) ]) def test_hitzer_inverse(self, p, q): Ntests = 100 layout, blades = cf.Cl(p, q) for i in range(Ntests): mv = layout.randomMV() mv_inv = mv.hitzer_inverse() assert np.all(np.abs(((mv * mv_inv) - 1.).value) < 1.e-11)
import numpy as np import pytest import clifford as cf class TestClosedForm: @pytest.mark.parametrize('p, q', [ (p, total_dims - p) for total_dims in [1, 2, 3, 4, 5] for p in range(total_dims + 1) ]) def test_hitzer_inverse(self, p, q): Ntests = 100 layout, blades = cf.Cl(p, q) for i in range(Ntests): mv = layout.randomMV() mv_inv = mv.hitzer_inverse() np.testing.assert_almost_equal((mv * mv_inv).value, (1.0 + 0*blades["e1"]).value)
Swap hitzer inverse test to use np.testing
Swap hitzer inverse test to use np.testing
Python
bsd-3-clause
arsenovic/clifford,arsenovic/clifford
import numpy as np import pytest import clifford as cf class TestClosedForm: @pytest.mark.parametrize('p, q', [ (p, total_dims - p) for total_dims in [1, 2, 3, 4, 5] for p in range(total_dims + 1) ]) def test_hitzer_inverse(self, p, q): Ntests = 100 layout, blades = cf.Cl(p, q) for i in range(Ntests): mv = layout.randomMV() mv_inv = mv.hitzer_inverse() - assert np.all(np.abs(((mv * mv_inv) - 1.).value) < 1.e-11) + np.testing.assert_almost_equal((mv * mv_inv).value, + (1.0 + 0*blades["e1"]).value)
Swap hitzer inverse test to use np.testing
## Code Before: import numpy as np import pytest import clifford as cf class TestClosedForm: @pytest.mark.parametrize('p, q', [ (p, total_dims - p) for total_dims in [1, 2, 3, 4, 5] for p in range(total_dims + 1) ]) def test_hitzer_inverse(self, p, q): Ntests = 100 layout, blades = cf.Cl(p, q) for i in range(Ntests): mv = layout.randomMV() mv_inv = mv.hitzer_inverse() assert np.all(np.abs(((mv * mv_inv) - 1.).value) < 1.e-11) ## Instruction: Swap hitzer inverse test to use np.testing ## Code After: import numpy as np import pytest import clifford as cf class TestClosedForm: @pytest.mark.parametrize('p, q', [ (p, total_dims - p) for total_dims in [1, 2, 3, 4, 5] for p in range(total_dims + 1) ]) def test_hitzer_inverse(self, p, q): Ntests = 100 layout, blades = cf.Cl(p, q) for i in range(Ntests): mv = layout.randomMV() mv_inv = mv.hitzer_inverse() np.testing.assert_almost_equal((mv * mv_inv).value, (1.0 + 0*blades["e1"]).value)
import numpy as np import pytest import clifford as cf class TestClosedForm: @pytest.mark.parametrize('p, q', [ (p, total_dims - p) for total_dims in [1, 2, 3, 4, 5] for p in range(total_dims + 1) ]) def test_hitzer_inverse(self, p, q): Ntests = 100 layout, blades = cf.Cl(p, q) for i in range(Ntests): mv = layout.randomMV() mv_inv = mv.hitzer_inverse() - assert np.all(np.abs(((mv * mv_inv) - 1.).value) < 1.e-11) + np.testing.assert_almost_equal((mv * mv_inv).value, + (1.0 + 0*blades["e1"]).value)
83598d24c46683b7d2eb3e99d39cbd5babba5073
tests/api/views/clubs/create_test.py
tests/api/views/clubs/create_test.py
from skylines.model import Club from tests.api import basic_auth def test_create(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 200 assert Club.get(res.json['id'])
from skylines.model import Club from tests.api import basic_auth from tests.data import add_fixtures, clubs def test_create(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 200 club = Club.get(res.json['id']) assert club assert club.owner_id == test_user.id def test_without_authentication(db_session, client): res = client.put('/clubs', json={ 'name': 'LV Aachen', }) assert res.status_code == 401 assert res.json['error'] == 'invalid_token' def test_non_json_data(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, data='foobar?') assert res.status_code == 400 assert res.json['error'] == 'invalid-request' def test_invalid_data(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': '', }) assert res.status_code == 422 assert res.json['error'] == 'validation-failed' def test_existing_club(db_session, client, test_user): lva = clubs.lva() add_fixtures(db_session, lva) headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 422 assert res.json['error'] == 'duplicate-club-name'
Add more "PUT /clubs" tests
tests/api: Add more "PUT /clubs" tests
Python
agpl-3.0
RBE-Avionik/skylines,Harry-R/skylines,shadowoneau/skylines,skylines-project/skylines,Harry-R/skylines,shadowoneau/skylines,Turbo87/skylines,Turbo87/skylines,RBE-Avionik/skylines,Harry-R/skylines,Turbo87/skylines,skylines-project/skylines,skylines-project/skylines,Turbo87/skylines,shadowoneau/skylines,RBE-Avionik/skylines,skylines-project/skylines,Harry-R/skylines,shadowoneau/skylines,RBE-Avionik/skylines
from skylines.model import Club from tests.api import basic_auth + from tests.data import add_fixtures, clubs def test_create(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 200 - assert Club.get(res.json['id']) + club = Club.get(res.json['id']) + assert club + assert club.owner_id == test_user.id + + + def test_without_authentication(db_session, client): + res = client.put('/clubs', json={ + 'name': 'LV Aachen', + }) + assert res.status_code == 401 + assert res.json['error'] == 'invalid_token' + + + def test_non_json_data(db_session, client, test_user): + headers = basic_auth(test_user.email_address, test_user.original_password) + + res = client.put('/clubs', headers=headers, data='foobar?') + assert res.status_code == 400 + assert res.json['error'] == 'invalid-request' + + + def test_invalid_data(db_session, client, test_user): + headers = basic_auth(test_user.email_address, test_user.original_password) + + res = client.put('/clubs', headers=headers, json={ + 'name': '', + }) + assert res.status_code == 422 + assert res.json['error'] == 'validation-failed' + + + def test_existing_club(db_session, client, test_user): + lva = clubs.lva() + add_fixtures(db_session, lva) + + headers = basic_auth(test_user.email_address, test_user.original_password) + + res = client.put('/clubs', headers=headers, json={ + 'name': 'LV Aachen', + }) + assert res.status_code == 422 + assert res.json['error'] == 'duplicate-club-name' +
Add more "PUT /clubs" tests
## Code Before: from skylines.model import Club from tests.api import basic_auth def test_create(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 200 assert Club.get(res.json['id']) ## Instruction: Add more "PUT /clubs" tests ## Code After: from skylines.model import Club from tests.api import basic_auth from tests.data import add_fixtures, clubs def test_create(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 200 club = Club.get(res.json['id']) assert club assert club.owner_id == test_user.id def test_without_authentication(db_session, client): res = client.put('/clubs', json={ 'name': 'LV Aachen', }) assert res.status_code == 401 assert res.json['error'] == 'invalid_token' def test_non_json_data(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, data='foobar?') assert res.status_code == 400 assert res.json['error'] == 'invalid-request' def test_invalid_data(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': '', }) assert res.status_code == 422 assert res.json['error'] == 'validation-failed' def test_existing_club(db_session, client, test_user): lva = clubs.lva() add_fixtures(db_session, lva) headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 422 assert res.json['error'] == 'duplicate-club-name'
from skylines.model import Club from tests.api import basic_auth + from tests.data import add_fixtures, clubs def test_create(db_session, client, test_user): headers = basic_auth(test_user.email_address, test_user.original_password) res = client.put('/clubs', headers=headers, json={ 'name': 'LV Aachen', }) assert res.status_code == 200 + - assert Club.get(res.json['id']) ? ^^^^^^ + club = Club.get(res.json['id']) ? ^^^^^^ + assert club + assert club.owner_id == test_user.id + + + def test_without_authentication(db_session, client): + res = client.put('/clubs', json={ + 'name': 'LV Aachen', + }) + assert res.status_code == 401 + assert res.json['error'] == 'invalid_token' + + + def test_non_json_data(db_session, client, test_user): + headers = basic_auth(test_user.email_address, test_user.original_password) + + res = client.put('/clubs', headers=headers, data='foobar?') + assert res.status_code == 400 + assert res.json['error'] == 'invalid-request' + + + def test_invalid_data(db_session, client, test_user): + headers = basic_auth(test_user.email_address, test_user.original_password) + + res = client.put('/clubs', headers=headers, json={ + 'name': '', + }) + assert res.status_code == 422 + assert res.json['error'] == 'validation-failed' + + + def test_existing_club(db_session, client, test_user): + lva = clubs.lva() + add_fixtures(db_session, lva) + + headers = basic_auth(test_user.email_address, test_user.original_password) + + res = client.put('/clubs', headers=headers, json={ + 'name': 'LV Aachen', + }) + assert res.status_code == 422 + assert res.json['error'] == 'duplicate-club-name'
48ae2127fcd2e6b1ba1b0d2649d936991a30881b
juliet.py
juliet.py
import argparse, sys from src import Configurator, Builder, Loader def main(): """ Parse command line arguments and execute passed subcommands. """ # Parse subcommand parser = argparse.ArgumentParser(description='Pythonic static sites generator') subparsers = parser.add_subparsers(dest="sp", help="sub-command to be executed") parser_build = subparsers.add_parser('build', help="Build static site from local directory") args = parser.parse_args() # Execute passed sub-command or return error if(args.sp == "build"): build(args) def build(args): """ Build website to configured location. """ # Parse configuration and define Environment config = {} config["site"] = Configurator.getConfig() config["posts"] = Loader.getFromFolder("posts/", args) config["pages"] = Loader.getFromFolder("pages/", args) jinjaEnv = Configurator.configureJinja(config["site"]) print(config) # Build statics Builder.buildStatics(config) # Build posts and pages Builder.buildPosts(config, jinjaEnv) # Build page and pages #Builder.buildPages(config, jinjaEnv) if __name__ == "__main__": main()
import argparse, sys from src import Configurator, Builder, Loader def main(): """ Parse command line arguments and execute passed subcommands. """ # Parse subcommand parser = argparse.ArgumentParser(description='Pythonic static sites generator') subparsers = parser.add_subparsers(dest="sp", help="sub-command to be executed") parser_build = subparsers.add_parser('build', help="Build static site from local directory") args = parser.parse_args() # Execute passed sub-command or return error if(args.sp == "build"): build(args) def build(args): """ Build website to configured location. """ # Parse configuration config = {} config["site"] = Configurator.getConfig() # Load articles, pages and static elements from the files config["posts"] = Loader.getFromFolder("posts/", args) config["pages"] = Loader.getFromFolder("pages/", args) config["statics"] = Loader.getFromFolder("themes/" + config["site"]["theme"] + "/statics/", args) # Configure Jinja2 environment jinjaEnv = Configurator.configureJinja(config["site"]) print(config) # Build statics Builder.buildStatics(config) # Build posts and pages Builder.buildPosts(config, jinjaEnv) Builder.buildPages(config, jinjaEnv) if __name__ == "__main__": main()
Load statics like posts and pages. Documentation.
Load statics like posts and pages. Documentation.
Python
mit
hlef/juliet,hlef/juliet,hlef/juliet
import argparse, sys from src import Configurator, Builder, Loader def main(): """ Parse command line arguments and execute passed subcommands. """ # Parse subcommand parser = argparse.ArgumentParser(description='Pythonic static sites generator') subparsers = parser.add_subparsers(dest="sp", help="sub-command to be executed") parser_build = subparsers.add_parser('build', help="Build static site from local directory") args = parser.parse_args() # Execute passed sub-command or return error if(args.sp == "build"): build(args) def build(args): """ Build website to configured location. """ - # Parse configuration and define Environment + # Parse configuration config = {} config["site"] = Configurator.getConfig() + + # Load articles, pages and static elements from the files config["posts"] = Loader.getFromFolder("posts/", args) config["pages"] = Loader.getFromFolder("pages/", args) + config["statics"] = Loader.getFromFolder("themes/" + config["site"]["theme"] + "/statics/", args) + + # Configure Jinja2 environment jinjaEnv = Configurator.configureJinja(config["site"]) print(config) # Build statics Builder.buildStatics(config) # Build posts and pages Builder.buildPosts(config, jinjaEnv) - - # Build page and pages - #Builder.buildPages(config, jinjaEnv) + Builder.buildPages(config, jinjaEnv) if __name__ == "__main__": main()
Load statics like posts and pages. Documentation.
## Code Before: import argparse, sys from src import Configurator, Builder, Loader def main(): """ Parse command line arguments and execute passed subcommands. """ # Parse subcommand parser = argparse.ArgumentParser(description='Pythonic static sites generator') subparsers = parser.add_subparsers(dest="sp", help="sub-command to be executed") parser_build = subparsers.add_parser('build', help="Build static site from local directory") args = parser.parse_args() # Execute passed sub-command or return error if(args.sp == "build"): build(args) def build(args): """ Build website to configured location. """ # Parse configuration and define Environment config = {} config["site"] = Configurator.getConfig() config["posts"] = Loader.getFromFolder("posts/", args) config["pages"] = Loader.getFromFolder("pages/", args) jinjaEnv = Configurator.configureJinja(config["site"]) print(config) # Build statics Builder.buildStatics(config) # Build posts and pages Builder.buildPosts(config, jinjaEnv) # Build page and pages #Builder.buildPages(config, jinjaEnv) if __name__ == "__main__": main() ## Instruction: Load statics like posts and pages. Documentation. ## Code After: import argparse, sys from src import Configurator, Builder, Loader def main(): """ Parse command line arguments and execute passed subcommands. """ # Parse subcommand parser = argparse.ArgumentParser(description='Pythonic static sites generator') subparsers = parser.add_subparsers(dest="sp", help="sub-command to be executed") parser_build = subparsers.add_parser('build', help="Build static site from local directory") args = parser.parse_args() # Execute passed sub-command or return error if(args.sp == "build"): build(args) def build(args): """ Build website to configured location. """ # Parse configuration config = {} config["site"] = Configurator.getConfig() # Load articles, pages and static elements from the files config["posts"] = Loader.getFromFolder("posts/", args) config["pages"] = Loader.getFromFolder("pages/", args) config["statics"] = Loader.getFromFolder("themes/" + config["site"]["theme"] + "/statics/", args) # Configure Jinja2 environment jinjaEnv = Configurator.configureJinja(config["site"]) print(config) # Build statics Builder.buildStatics(config) # Build posts and pages Builder.buildPosts(config, jinjaEnv) Builder.buildPages(config, jinjaEnv) if __name__ == "__main__": main()
import argparse, sys from src import Configurator, Builder, Loader def main(): """ Parse command line arguments and execute passed subcommands. """ # Parse subcommand parser = argparse.ArgumentParser(description='Pythonic static sites generator') subparsers = parser.add_subparsers(dest="sp", help="sub-command to be executed") parser_build = subparsers.add_parser('build', help="Build static site from local directory") args = parser.parse_args() # Execute passed sub-command or return error if(args.sp == "build"): build(args) def build(args): """ Build website to configured location. """ - # Parse configuration and define Environment + # Parse configuration config = {} config["site"] = Configurator.getConfig() + + # Load articles, pages and static elements from the files config["posts"] = Loader.getFromFolder("posts/", args) config["pages"] = Loader.getFromFolder("pages/", args) + config["statics"] = Loader.getFromFolder("themes/" + config["site"]["theme"] + "/statics/", args) + + # Configure Jinja2 environment jinjaEnv = Configurator.configureJinja(config["site"]) print(config) # Build statics Builder.buildStatics(config) # Build posts and pages Builder.buildPosts(config, jinjaEnv) - - # Build page and pages - #Builder.buildPages(config, jinjaEnv) ? - + Builder.buildPages(config, jinjaEnv) if __name__ == "__main__": main()
3e30737c98a4a9e890d362ba4cbbb2315163bc29
cfgov/v1/__init__.py
cfgov/v1/__init__.py
from __future__ import absolute_import # Python 2 only from django.contrib.staticfiles.storage import staticfiles_storage from django.core.urlresolvers import reverse from django.template.defaultfilters import slugify from wagtail.wagtailcore.templatetags import wagtailcore_tags from wagtail.wagtailadmin.templatetags import wagtailuserbar from jinja2 import Environment from compressor.contrib.jinja2ext import CompressorExtension def environment(**options): options.setdefault('extensions', []).append(CompressorExtension) env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'reverse': reverse, 'wagtailuserbar': wagtailuserbar.wagtailuserbar }) env.filters.update({ 'slugify': slugify, 'richtext': wagtailcore_tags.richtext }) return env
from django.contrib.staticfiles.storage import staticfiles_storage from django.core.urlresolvers import reverse from django.template.defaultfilters import slugify from wagtail.wagtailcore.templatetags import wagtailcore_tags from wagtail.wagtailadmin.templatetags import wagtailuserbar from jinja2 import Environment from compressor.contrib.jinja2ext import CompressorExtension def environment(**options): options.setdefault('extensions', []).append(CompressorExtension) env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'reverse': reverse, 'wagtailuserbar': wagtailuserbar.wagtailuserbar }) env.filters.update({ 'slugify': slugify, 'richtext': wagtailcore_tags.richtext }) return env
Remove unused Python 2 support
Remove unused Python 2 support
Python
cc0-1.0
kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh,kave/cfgov-refresh
- from __future__ import absolute_import # Python 2 only - from django.contrib.staticfiles.storage import staticfiles_storage from django.core.urlresolvers import reverse from django.template.defaultfilters import slugify from wagtail.wagtailcore.templatetags import wagtailcore_tags from wagtail.wagtailadmin.templatetags import wagtailuserbar from jinja2 import Environment from compressor.contrib.jinja2ext import CompressorExtension def environment(**options): options.setdefault('extensions', []).append(CompressorExtension) env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'reverse': reverse, 'wagtailuserbar': wagtailuserbar.wagtailuserbar }) env.filters.update({ 'slugify': slugify, 'richtext': wagtailcore_tags.richtext }) return env
Remove unused Python 2 support
## Code Before: from __future__ import absolute_import # Python 2 only from django.contrib.staticfiles.storage import staticfiles_storage from django.core.urlresolvers import reverse from django.template.defaultfilters import slugify from wagtail.wagtailcore.templatetags import wagtailcore_tags from wagtail.wagtailadmin.templatetags import wagtailuserbar from jinja2 import Environment from compressor.contrib.jinja2ext import CompressorExtension def environment(**options): options.setdefault('extensions', []).append(CompressorExtension) env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'reverse': reverse, 'wagtailuserbar': wagtailuserbar.wagtailuserbar }) env.filters.update({ 'slugify': slugify, 'richtext': wagtailcore_tags.richtext }) return env ## Instruction: Remove unused Python 2 support ## Code After: from django.contrib.staticfiles.storage import staticfiles_storage from django.core.urlresolvers import reverse from django.template.defaultfilters import slugify from wagtail.wagtailcore.templatetags import wagtailcore_tags from wagtail.wagtailadmin.templatetags import wagtailuserbar from jinja2 import Environment from compressor.contrib.jinja2ext import CompressorExtension def environment(**options): options.setdefault('extensions', []).append(CompressorExtension) env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'reverse': reverse, 'wagtailuserbar': wagtailuserbar.wagtailuserbar }) env.filters.update({ 'slugify': slugify, 'richtext': wagtailcore_tags.richtext }) return env
- from __future__ import absolute_import # Python 2 only - from django.contrib.staticfiles.storage import staticfiles_storage from django.core.urlresolvers import reverse from django.template.defaultfilters import slugify from wagtail.wagtailcore.templatetags import wagtailcore_tags from wagtail.wagtailadmin.templatetags import wagtailuserbar from jinja2 import Environment from compressor.contrib.jinja2ext import CompressorExtension def environment(**options): options.setdefault('extensions', []).append(CompressorExtension) env = Environment(**options) env.globals.update({ 'static': staticfiles_storage.url, 'reverse': reverse, 'wagtailuserbar': wagtailuserbar.wagtailuserbar }) env.filters.update({ 'slugify': slugify, 'richtext': wagtailcore_tags.richtext }) return env
ada858de787991c885030bb122e50df36b6fdc11
github3/__init__.py
github3/__init__.py
__title__ = 'github3' __author__ = 'Ian Cordasco' __license__ = 'Modified BSD' __copyright__ = 'Copyright 2012 Ian Cordasco' __version__ = '0.1a5' from .api import * from .github import GitHub from .models import GitHubError from .event import Event from .gist import Gist, GistComment, GistFile from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash from .issue import Issue, IssueComment, IssueEvent, Label, Milestone from .legacy import LegacyUser, LegacyRepo, LegacyIssue from .org import Organization, Team from .pulls import PullRequest from .repo import Repository, Branch from .user import User
__title__ = 'github3' __author__ = 'Ian Cordasco' __license__ = 'Modified BSD' __copyright__ = 'Copyright 2012 Ian Cordasco' __version__ = '0.1a5' from .api import * from .github import GitHub from .models import GitHubError
Clean up namespace as mentioned.
Clean up namespace as mentioned.
Python
bsd-3-clause
balloob/github3.py,krxsky/github3.py,icio/github3.py,sigmavirus24/github3.py,wbrefvem/github3.py,christophelec/github3.py,ueg1990/github3.py,itsmemattchung/github3.py,agamdua/github3.py,h4ck3rm1k3/github3.py,degustaf/github3.py,jim-minter/github3.py
__title__ = 'github3' __author__ = 'Ian Cordasco' __license__ = 'Modified BSD' __copyright__ = 'Copyright 2012 Ian Cordasco' __version__ = '0.1a5' from .api import * from .github import GitHub from .models import GitHubError - from .event import Event - from .gist import Gist, GistComment, GistFile - from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash - from .issue import Issue, IssueComment, IssueEvent, Label, Milestone - from .legacy import LegacyUser, LegacyRepo, LegacyIssue - from .org import Organization, Team - from .pulls import PullRequest - from .repo import Repository, Branch - from .user import User
Clean up namespace as mentioned.
## Code Before: __title__ = 'github3' __author__ = 'Ian Cordasco' __license__ = 'Modified BSD' __copyright__ = 'Copyright 2012 Ian Cordasco' __version__ = '0.1a5' from .api import * from .github import GitHub from .models import GitHubError from .event import Event from .gist import Gist, GistComment, GistFile from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash from .issue import Issue, IssueComment, IssueEvent, Label, Milestone from .legacy import LegacyUser, LegacyRepo, LegacyIssue from .org import Organization, Team from .pulls import PullRequest from .repo import Repository, Branch from .user import User ## Instruction: Clean up namespace as mentioned. ## Code After: __title__ = 'github3' __author__ = 'Ian Cordasco' __license__ = 'Modified BSD' __copyright__ = 'Copyright 2012 Ian Cordasco' __version__ = '0.1a5' from .api import * from .github import GitHub from .models import GitHubError
__title__ = 'github3' __author__ = 'Ian Cordasco' __license__ = 'Modified BSD' __copyright__ = 'Copyright 2012 Ian Cordasco' __version__ = '0.1a5' from .api import * from .github import GitHub from .models import GitHubError - from .event import Event - from .gist import Gist, GistComment, GistFile - from .git import Blob, GitData, Commit, Reference, GitObject, Tag, Tree, Hash - from .issue import Issue, IssueComment, IssueEvent, Label, Milestone - from .legacy import LegacyUser, LegacyRepo, LegacyIssue - from .org import Organization, Team - from .pulls import PullRequest - from .repo import Repository, Branch - from .user import User
df8c19fe4679aa0d4fff90a15efcf4183a8ec8c1
api/v2/serializers/details/image_version.py
api/v2/serializers/details/image_version.py
from core.models import ApplicationVersion as ImageVersion from rest_framework import serializers from api.v2.serializers.summaries import LicenseSerializer from api.v2.serializers.summaries import ImageVersionSummarySerializer from api.v2.serializers.fields import ProviderMachineRelatedField class ImageVersionSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for ApplicationVersion (aka 'image_version') """ # NOTE: Implicitly included via 'fields' # id, application parent = ImageVersionSummarySerializer() #name, change_log, allow_imaging licenses = LicenseSerializer(many=True, read_only=True) # NEW membership = serializers.SlugRelatedField( slug_field='name', read_only=True, many=True) # NEW machines = ProviderMachineRelatedField(many=True) start_date = serializers.DateTimeField() end_date = serializers.DateTimeField() class Meta: model = ImageVersion view_name = 'api:v2:providermachine-detail' fields = ('id', 'parent', 'name', 'change_log', 'machines', 'allow_imaging', 'licenses', 'membership', 'start_date', 'end_date')
from core.models import ApplicationVersion as ImageVersion from rest_framework import serializers from api.v2.serializers.summaries import ( LicenseSerializer, UserSummarySerializer, IdentitySummarySerializer, ImageVersionSummarySerializer) from api.v2.serializers.fields import ProviderMachineRelatedField class ImageVersionSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for ApplicationVersion (aka 'image_version') """ # NOTE: Implicitly included via 'fields' # id, application parent = ImageVersionSummarySerializer() #name, change_log, allow_imaging licenses = LicenseSerializer(many=True, read_only=True) # NEW membership = serializers.SlugRelatedField( slug_field='name', read_only=True, many=True) # NEW user = UserSummarySerializer(source='created_by') identity = IdentitySummarySerializer(source='created_by_identity') machines = ProviderMachineRelatedField(many=True) start_date = serializers.DateTimeField() end_date = serializers.DateTimeField() class Meta: model = ImageVersion view_name = 'api:v2:providermachine-detail' fields = ('id', 'parent', 'name', 'change_log', 'machines', 'allow_imaging', 'licenses', 'membership', 'start_date', 'end_date')
Add 'user' and 'identity' attributes to the ImageVersion Details Serializer
Add 'user' and 'identity' attributes to the ImageVersion Details Serializer
Python
apache-2.0
CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend,CCI-MOC/GUI-Backend
from core.models import ApplicationVersion as ImageVersion from rest_framework import serializers - from api.v2.serializers.summaries import LicenseSerializer + from api.v2.serializers.summaries import ( - from api.v2.serializers.summaries import ImageVersionSummarySerializer + LicenseSerializer, + UserSummarySerializer, + IdentitySummarySerializer, + ImageVersionSummarySerializer) from api.v2.serializers.fields import ProviderMachineRelatedField class ImageVersionSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for ApplicationVersion (aka 'image_version') """ # NOTE: Implicitly included via 'fields' # id, application parent = ImageVersionSummarySerializer() #name, change_log, allow_imaging licenses = LicenseSerializer(many=True, read_only=True) # NEW membership = serializers.SlugRelatedField( slug_field='name', read_only=True, many=True) # NEW + user = UserSummarySerializer(source='created_by') + identity = IdentitySummarySerializer(source='created_by_identity') machines = ProviderMachineRelatedField(many=True) start_date = serializers.DateTimeField() end_date = serializers.DateTimeField() class Meta: model = ImageVersion view_name = 'api:v2:providermachine-detail' fields = ('id', 'parent', 'name', 'change_log', 'machines', 'allow_imaging', 'licenses', 'membership', 'start_date', 'end_date')
Add 'user' and 'identity' attributes to the ImageVersion Details Serializer
## Code Before: from core.models import ApplicationVersion as ImageVersion from rest_framework import serializers from api.v2.serializers.summaries import LicenseSerializer from api.v2.serializers.summaries import ImageVersionSummarySerializer from api.v2.serializers.fields import ProviderMachineRelatedField class ImageVersionSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for ApplicationVersion (aka 'image_version') """ # NOTE: Implicitly included via 'fields' # id, application parent = ImageVersionSummarySerializer() #name, change_log, allow_imaging licenses = LicenseSerializer(many=True, read_only=True) # NEW membership = serializers.SlugRelatedField( slug_field='name', read_only=True, many=True) # NEW machines = ProviderMachineRelatedField(many=True) start_date = serializers.DateTimeField() end_date = serializers.DateTimeField() class Meta: model = ImageVersion view_name = 'api:v2:providermachine-detail' fields = ('id', 'parent', 'name', 'change_log', 'machines', 'allow_imaging', 'licenses', 'membership', 'start_date', 'end_date') ## Instruction: Add 'user' and 'identity' attributes to the ImageVersion Details Serializer ## Code After: from core.models import ApplicationVersion as ImageVersion from rest_framework import serializers from api.v2.serializers.summaries import ( LicenseSerializer, UserSummarySerializer, IdentitySummarySerializer, ImageVersionSummarySerializer) from api.v2.serializers.fields import ProviderMachineRelatedField class ImageVersionSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for ApplicationVersion (aka 'image_version') """ # NOTE: Implicitly included via 'fields' # id, application parent = ImageVersionSummarySerializer() #name, change_log, allow_imaging licenses = LicenseSerializer(many=True, read_only=True) # NEW membership = serializers.SlugRelatedField( slug_field='name', read_only=True, many=True) # NEW user = UserSummarySerializer(source='created_by') identity = IdentitySummarySerializer(source='created_by_identity') machines = ProviderMachineRelatedField(many=True) start_date = serializers.DateTimeField() end_date = serializers.DateTimeField() class Meta: model = ImageVersion view_name = 'api:v2:providermachine-detail' fields = ('id', 'parent', 'name', 'change_log', 'machines', 'allow_imaging', 'licenses', 'membership', 'start_date', 'end_date')
from core.models import ApplicationVersion as ImageVersion from rest_framework import serializers - from api.v2.serializers.summaries import LicenseSerializer ? ^^^^^^^^^^^^^^^^^ + from api.v2.serializers.summaries import ( ? ^ - from api.v2.serializers.summaries import ImageVersionSummarySerializer + LicenseSerializer, + UserSummarySerializer, + IdentitySummarySerializer, + ImageVersionSummarySerializer) from api.v2.serializers.fields import ProviderMachineRelatedField class ImageVersionSerializer(serializers.HyperlinkedModelSerializer): """ Serializer for ApplicationVersion (aka 'image_version') """ # NOTE: Implicitly included via 'fields' # id, application parent = ImageVersionSummarySerializer() #name, change_log, allow_imaging licenses = LicenseSerializer(many=True, read_only=True) # NEW membership = serializers.SlugRelatedField( slug_field='name', read_only=True, many=True) # NEW + user = UserSummarySerializer(source='created_by') + identity = IdentitySummarySerializer(source='created_by_identity') machines = ProviderMachineRelatedField(many=True) start_date = serializers.DateTimeField() end_date = serializers.DateTimeField() class Meta: model = ImageVersion view_name = 'api:v2:providermachine-detail' fields = ('id', 'parent', 'name', 'change_log', 'machines', 'allow_imaging', 'licenses', 'membership', 'start_date', 'end_date')
ed1a14ef8f2038950b7e56c7ae5c21daa1d6618a
ordered_model/models.py
ordered_model/models.py
from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.db import models class OrderedModel(models.Model): """ An abstract model that allows objects to be ordered relative to each other. Provides an ``order`` field. """ order = models.PositiveIntegerField(editable=False, db_index=True) class Meta: abstract = True ordering = ('order',) def save(self, *args, **kwargs): if not self.id: qs = self.__class__.objects.order_by('-order') try: self.order = qs[0].order + 1 except IndexError: self.order = 0 super(OrderedModel, self).save(*args, **kwargs) def _move(self, up, qs=None): if qs is None: qs = self.__class__._default_manager if up: qs = qs.order_by('-order').filter(order__lt=self.order) else: qs = qs.filter(order__gt=self.order) try: replacement = qs[0] except IndexError: # already first/last return self.order, replacement.order = replacement.order, self.order self.save() replacement.save() def move(self, direction, qs=None): self._move(direction == 'up', qs) def move_down(self): """ Move this object down one position. """ return self._move(up=False) def move_up(self): """ Move this object up one position. """ return self._move(up=True)
from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.db import models from django.db.models import Max class OrderedModel(models.Model): """ An abstract model that allows objects to be ordered relative to each other. Provides an ``order`` field. """ order = models.PositiveIntegerField(editable=False, db_index=True) class Meta: abstract = True ordering = ('order',) def save(self, *args, **kwargs): if not self.id: c = self.__class__.objects.all().aggregate(Max('order')).get('order__max') self.order = c and c + 1 or 0 super(OrderedModel, self).save(*args, **kwargs) def _move(self, up, qs=None): if qs is None: qs = self.__class__._default_manager if up: qs = qs.order_by('-order').filter(order__lt=self.order) else: qs = qs.filter(order__gt=self.order) try: replacement = qs[0] except IndexError: # already first/last return self.order, replacement.order = replacement.order, self.order self.save() replacement.save() def move(self, direction, qs=None): self._move(direction == 'up', qs) def move_down(self): """ Move this object down one position. """ return self._move(up=False) def move_up(self): """ Move this object up one position. """ return self._move(up=True)
Use aggregate Max to fetch new order value.
Use aggregate Max to fetch new order value.
Python
bsd-3-clause
foozmeat/django-ordered-model,foozmeat/django-ordered-model,pombredanne/django-ordered-model,pombredanne/django-ordered-model,pombredanne/django-ordered-model,foozmeat/django-ordered-model
from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.db import models + from django.db.models import Max class OrderedModel(models.Model): """ An abstract model that allows objects to be ordered relative to each other. Provides an ``order`` field. """ order = models.PositiveIntegerField(editable=False, db_index=True) class Meta: abstract = True ordering = ('order',) def save(self, *args, **kwargs): if not self.id: + c = self.__class__.objects.all().aggregate(Max('order')).get('order__max') + self.order = c and c + 1 or 0 - qs = self.__class__.objects.order_by('-order') - try: - self.order = qs[0].order + 1 - except IndexError: - self.order = 0 super(OrderedModel, self).save(*args, **kwargs) def _move(self, up, qs=None): if qs is None: qs = self.__class__._default_manager if up: qs = qs.order_by('-order').filter(order__lt=self.order) else: qs = qs.filter(order__gt=self.order) try: replacement = qs[0] except IndexError: # already first/last return self.order, replacement.order = replacement.order, self.order self.save() replacement.save() def move(self, direction, qs=None): self._move(direction == 'up', qs) def move_down(self): """ Move this object down one position. """ return self._move(up=False) def move_up(self): """ Move this object up one position. """ return self._move(up=True)
Use aggregate Max to fetch new order value.
## Code Before: from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.db import models class OrderedModel(models.Model): """ An abstract model that allows objects to be ordered relative to each other. Provides an ``order`` field. """ order = models.PositiveIntegerField(editable=False, db_index=True) class Meta: abstract = True ordering = ('order',) def save(self, *args, **kwargs): if not self.id: qs = self.__class__.objects.order_by('-order') try: self.order = qs[0].order + 1 except IndexError: self.order = 0 super(OrderedModel, self).save(*args, **kwargs) def _move(self, up, qs=None): if qs is None: qs = self.__class__._default_manager if up: qs = qs.order_by('-order').filter(order__lt=self.order) else: qs = qs.filter(order__gt=self.order) try: replacement = qs[0] except IndexError: # already first/last return self.order, replacement.order = replacement.order, self.order self.save() replacement.save() def move(self, direction, qs=None): self._move(direction == 'up', qs) def move_down(self): """ Move this object down one position. """ return self._move(up=False) def move_up(self): """ Move this object up one position. """ return self._move(up=True) ## Instruction: Use aggregate Max to fetch new order value. ## Code After: from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.db import models from django.db.models import Max class OrderedModel(models.Model): """ An abstract model that allows objects to be ordered relative to each other. Provides an ``order`` field. """ order = models.PositiveIntegerField(editable=False, db_index=True) class Meta: abstract = True ordering = ('order',) def save(self, *args, **kwargs): if not self.id: c = self.__class__.objects.all().aggregate(Max('order')).get('order__max') self.order = c and c + 1 or 0 super(OrderedModel, self).save(*args, **kwargs) def _move(self, up, qs=None): if qs is None: qs = self.__class__._default_manager if up: qs = qs.order_by('-order').filter(order__lt=self.order) else: qs = qs.filter(order__gt=self.order) try: replacement = qs[0] except IndexError: # already first/last return self.order, replacement.order = replacement.order, self.order self.save() replacement.save() def move(self, direction, qs=None): self._move(direction == 'up', qs) def move_down(self): """ Move this object down one position. """ return self._move(up=False) def move_up(self): """ Move this object up one position. """ return self._move(up=True)
from django.contrib.contenttypes.models import ContentType from django.core.urlresolvers import reverse from django.db import models + from django.db.models import Max class OrderedModel(models.Model): """ An abstract model that allows objects to be ordered relative to each other. Provides an ``order`` field. """ order = models.PositiveIntegerField(editable=False, db_index=True) class Meta: abstract = True ordering = ('order',) def save(self, *args, **kwargs): if not self.id: + c = self.__class__.objects.all().aggregate(Max('order')).get('order__max') + self.order = c and c + 1 or 0 - qs = self.__class__.objects.order_by('-order') - try: - self.order = qs[0].order + 1 - except IndexError: - self.order = 0 super(OrderedModel, self).save(*args, **kwargs) def _move(self, up, qs=None): if qs is None: qs = self.__class__._default_manager if up: qs = qs.order_by('-order').filter(order__lt=self.order) else: qs = qs.filter(order__gt=self.order) try: replacement = qs[0] except IndexError: # already first/last return self.order, replacement.order = replacement.order, self.order self.save() replacement.save() def move(self, direction, qs=None): self._move(direction == 'up', qs) def move_down(self): """ Move this object down one position. """ return self._move(up=False) def move_up(self): """ Move this object up one position. """ return self._move(up=True)
d6b4024d502e189e67d9027a50e472b7c295a83f
misc/migrate_miro_vhs.py
misc/migrate_miro_vhs.py
import boto3 def get_existing_records(dynamodb_client): """ Generates existing Miro records from the SourceData table. """ paginator = dynamodb_client.get_paginator('scan') for page in paginator.paginate(TableName='SourceData'): for item in page['Items']: yield item if __name__ == '__main__': dynamodb_client = boto3.client('dynamodb') for item in get_existing_records(dynamodb_client): print(item) break
import boto3 OLD_TABLE = 'SourceData' OLD_BUCKET = 'wellcomecollection-vhs-sourcedata' NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro' NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro' def get_existing_records(dynamodb_client): """ Generates existing Miro records from the SourceData table. """ paginator = dynamodb_client.get_paginator('scan') for page in paginator.paginate(TableName=OLD_TABLE): for item in page['Items']: if 'reindexShard' not in item: print(item) if item['sourceName'] != {'S': 'miro'}: continue yield item if __name__ == '__main__': dynamodb_client = boto3.client('dynamodb') s3_client = boto3.client('s3') for item in get_existing_records(dynamodb_client): del item['sourceName'] s3_client.copy_object( Bucket=NEW_BUCKET, Key=item['s3key']['S'].replace('miro/', ''), CopySource={ 'Bucket': OLD_BUCKET, 'Key': item['s3key']['S'] } ) print(item) break
Copy the S3 object into the new bucket
Copy the S3 object into the new bucket
Python
mit
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
import boto3 + + + OLD_TABLE = 'SourceData' + OLD_BUCKET = 'wellcomecollection-vhs-sourcedata' + + NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro' + NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro' def get_existing_records(dynamodb_client): """ Generates existing Miro records from the SourceData table. """ paginator = dynamodb_client.get_paginator('scan') - for page in paginator.paginate(TableName='SourceData'): + for page in paginator.paginate(TableName=OLD_TABLE): for item in page['Items']: + if 'reindexShard' not in item: + print(item) + + if item['sourceName'] != {'S': 'miro'}: + continue yield item if __name__ == '__main__': dynamodb_client = boto3.client('dynamodb') + s3_client = boto3.client('s3') for item in get_existing_records(dynamodb_client): + del item['sourceName'] + + s3_client.copy_object( + Bucket=NEW_BUCKET, + Key=item['s3key']['S'].replace('miro/', ''), + CopySource={ + 'Bucket': OLD_BUCKET, + 'Key': item['s3key']['S'] + } + ) + print(item) break
Copy the S3 object into the new bucket
## Code Before: import boto3 def get_existing_records(dynamodb_client): """ Generates existing Miro records from the SourceData table. """ paginator = dynamodb_client.get_paginator('scan') for page in paginator.paginate(TableName='SourceData'): for item in page['Items']: yield item if __name__ == '__main__': dynamodb_client = boto3.client('dynamodb') for item in get_existing_records(dynamodb_client): print(item) break ## Instruction: Copy the S3 object into the new bucket ## Code After: import boto3 OLD_TABLE = 'SourceData' OLD_BUCKET = 'wellcomecollection-vhs-sourcedata' NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro' NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro' def get_existing_records(dynamodb_client): """ Generates existing Miro records from the SourceData table. """ paginator = dynamodb_client.get_paginator('scan') for page in paginator.paginate(TableName=OLD_TABLE): for item in page['Items']: if 'reindexShard' not in item: print(item) if item['sourceName'] != {'S': 'miro'}: continue yield item if __name__ == '__main__': dynamodb_client = boto3.client('dynamodb') s3_client = boto3.client('s3') for item in get_existing_records(dynamodb_client): del item['sourceName'] s3_client.copy_object( Bucket=NEW_BUCKET, Key=item['s3key']['S'].replace('miro/', ''), CopySource={ 'Bucket': OLD_BUCKET, 'Key': item['s3key']['S'] } ) print(item) break
import boto3 + + + OLD_TABLE = 'SourceData' + OLD_BUCKET = 'wellcomecollection-vhs-sourcedata' + + NEW_TABLE = 'wellcomecollection-vhs-sourcedata-miro' + NEW_BUCKET = 'wellcomecollection-vhs-sourcedata-miro' def get_existing_records(dynamodb_client): """ Generates existing Miro records from the SourceData table. """ paginator = dynamodb_client.get_paginator('scan') - for page in paginator.paginate(TableName='SourceData'): ? ^^^^^^^ ^^^^ + for page in paginator.paginate(TableName=OLD_TABLE): ? ^^ ^^^^^^ for item in page['Items']: + if 'reindexShard' not in item: + print(item) + + if item['sourceName'] != {'S': 'miro'}: + continue yield item if __name__ == '__main__': dynamodb_client = boto3.client('dynamodb') + s3_client = boto3.client('s3') for item in get_existing_records(dynamodb_client): + del item['sourceName'] + + s3_client.copy_object( + Bucket=NEW_BUCKET, + Key=item['s3key']['S'].replace('miro/', ''), + CopySource={ + 'Bucket': OLD_BUCKET, + 'Key': item['s3key']['S'] + } + ) + print(item) break
c633112d6336c37e15577eb6d035488cc42bfd59
indra/explanation/model_checker/__init__.py
indra/explanation/model_checker/__init__.py
from .model_checker import ModelChecker, PathResult, PathMetric from .pysb import PysbModelChecker from .signed_graph import SignedGraphModelChecker from .unsigned_graph import UnsignedGraphModelChecker from .pybel import PybelModelChecker
from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter from .pysb import PysbModelChecker from .signed_graph import SignedGraphModelChecker from .unsigned_graph import UnsignedGraphModelChecker from .pybel import PybelModelChecker
Add get_path_iter to model_checker importables
Add get_path_iter to model_checker importables
Python
bsd-2-clause
sorgerlab/belpy,johnbachman/indra,johnbachman/indra,bgyori/indra,sorgerlab/belpy,bgyori/indra,johnbachman/belpy,bgyori/indra,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra
- from .model_checker import ModelChecker, PathResult, PathMetric + from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter from .pysb import PysbModelChecker from .signed_graph import SignedGraphModelChecker from .unsigned_graph import UnsignedGraphModelChecker from .pybel import PybelModelChecker
Add get_path_iter to model_checker importables
## Code Before: from .model_checker import ModelChecker, PathResult, PathMetric from .pysb import PysbModelChecker from .signed_graph import SignedGraphModelChecker from .unsigned_graph import UnsignedGraphModelChecker from .pybel import PybelModelChecker ## Instruction: Add get_path_iter to model_checker importables ## Code After: from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter from .pysb import PysbModelChecker from .signed_graph import SignedGraphModelChecker from .unsigned_graph import UnsignedGraphModelChecker from .pybel import PybelModelChecker
- from .model_checker import ModelChecker, PathResult, PathMetric + from .model_checker import ModelChecker, PathResult, PathMetric, get_path_iter ? +++++++++++++++ from .pysb import PysbModelChecker from .signed_graph import SignedGraphModelChecker from .unsigned_graph import UnsignedGraphModelChecker from .pybel import PybelModelChecker
b5fc8db375e7273fb3b7cbb2318f57f141e25045
src/commoner/profiles/models.py
src/commoner/profiles/models.py
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p') homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
Allow the photo to be blank.
Allow the photo to be blank.
Python
agpl-3.0
cc-archive/commoner,cc-archive/commoner
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) - photo = models.ImageField(upload_to='p') + photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
Allow the photo to be blank.
## Code Before: import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p') homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) ) ## Instruction: Allow the photo to be blank. ## Code After: import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) photo = models.ImageField(upload_to='p', blank=True, null=True) homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
import urlparse from django.db import models from django.db.models import permalink from django.core.urlresolvers import reverse from django.contrib.auth.models import User from commoner.util import getBaseURL class CommonerProfile(models.Model): user = models.ForeignKey(User, unique=True) nickname = models.CharField(max_length=255, blank=True) - photo = models.ImageField(upload_to='p') + photo = models.ImageField(upload_to='p', blank=True, null=True) ? +++++++++++++++++++++++ homepage = models.URLField(max_length=255, blank=True) location = models.CharField(max_length=255, blank=True) story = models.TextField(blank=True) def __unicode__(self): if self.nickname: return u"%s (%s)" % (self.user.username, self.nickname) return self.user.username def display_name(self): return self.nickname or self.user.username def get_absolute_url(self, request=None): if request is None: return reverse('profile_view', args=(self.user.username, ) ) else: return urlparse.urljoin( getBaseURL(request), reverse('profile_view', args=(self.user.username, ) ) )
6f5e987b5a102b0c4b0bfcd88c17faab00655142
ctypeslib/test/test_toolchain.py
ctypeslib/test/test_toolchain.py
import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": def test(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen if __name__ == "__main__": import unittest unittest.main()
import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": def test_windows(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen def test(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "stdio.h", "-o", "_stdio_gen.xml"]) xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"]) import _stdio_gen if __name__ == "__main__": import unittest unittest.main()
Add a test for stdio.h.
Add a test for stdio.h. git-svn-id: ac2c3632cb6543e7ab5fafd132c7fe15057a1882@60472 6015fed2-1504-0410-9fe1-9d1591cc4771
Python
mit
trolldbois/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib,luzfcb/ctypeslib,luzfcb/ctypeslib,trolldbois/ctypeslib
import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": - def test(self): + def test_windows(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen + def test(self): + h2xml.main(["h2xml", "-q", + "-D WIN32_LEAN_AND_MEAN", + "-D _UNICODE", "-D UNICODE", + "-c", "stdio.h", + "-o", "_stdio_gen.xml"]) + xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"]) + import _stdio_gen + + if __name__ == "__main__": import unittest unittest.main()
Add a test for stdio.h.
## Code Before: import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": def test(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen if __name__ == "__main__": import unittest unittest.main() ## Instruction: Add a test for stdio.h. ## Code After: import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": def test_windows(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen def test(self): h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "stdio.h", "-o", "_stdio_gen.xml"]) xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"]) import _stdio_gen if __name__ == "__main__": import unittest unittest.main()
import unittest import sys from ctypeslib import h2xml, xml2py class ToolchainTest(unittest.TestCase): if sys.platform == "win32": - def test(self): + def test_windows(self): ? ++++++++ h2xml.main(["h2xml", "-q", "-D WIN32_LEAN_AND_MEAN", "-D _UNICODE", "-D UNICODE", "-c", "windows.h", "-o", "_windows_gen.xml"]) xml2py.main(["xml2py", "_windows_gen.xml", "-w", "-o", "_winapi_gen.py"]) import _winapi_gen + def test(self): + h2xml.main(["h2xml", "-q", + "-D WIN32_LEAN_AND_MEAN", + "-D _UNICODE", "-D UNICODE", + "-c", "stdio.h", + "-o", "_stdio_gen.xml"]) + xml2py.main(["xml2py", "_stdio_gen.xml", "-o", "_stdio_gen.py"]) + import _stdio_gen + + if __name__ == "__main__": import unittest unittest.main()
beb3882b89b41ca104dbb9f2fb97f609f45ce106
corehq/apps/users/decorators.py
corehq/apps/users/decorators.py
from django.http import HttpResponseForbidden from corehq.apps.domain.decorators import login_and_domain_required def require_permission(permission, data=None, login_decorator=login_and_domain_required): try: permission = permission.name except AttributeError: try: permission = permission.__name__ except AttributeError: pass def decorator(view_func): def _inner(request, domain, *args, **kwargs): if hasattr(request, "couch_user") and (request.user.is_superuser or request.couch_user.has_permission(domain, permission, data=data)): if login_decorator: return login_decorator(view_func)(request, domain, *args, **kwargs) else: return view_func(request, domain, *args, **kwargs) else: return HttpResponseForbidden() return _inner return decorator
from django.http import HttpResponseForbidden from corehq.apps.domain.decorators import login_and_domain_required def require_permission(permission, data=None, login_decorator=login_and_domain_required): try: permission = permission.name except AttributeError: try: permission = permission.__name__ except AttributeError: pass def decorator(view_func): def _inner(request, domain, *args, **kwargs): if hasattr(request, "couch_user") and (request.user.is_superuser or request.couch_user.has_permission(domain, permission, data=data)): return view_func(request, domain, *args, **kwargs) else: return HttpResponseForbidden() if login_decorator: return login_decorator(_inner) else: return _inner return decorator
Apply login decorator before permissions check; less 403s, more 302s
Apply login decorator before permissions check; less 403s, more 302s
Python
bsd-3-clause
puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,SEL-Columbia/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq
from django.http import HttpResponseForbidden from corehq.apps.domain.decorators import login_and_domain_required def require_permission(permission, data=None, login_decorator=login_and_domain_required): try: permission = permission.name except AttributeError: try: permission = permission.__name__ except AttributeError: pass def decorator(view_func): def _inner(request, domain, *args, **kwargs): if hasattr(request, "couch_user") and (request.user.is_superuser or request.couch_user.has_permission(domain, permission, data=data)): - if login_decorator: - return login_decorator(view_func)(request, domain, *args, **kwargs) - else: - return view_func(request, domain, *args, **kwargs) + return view_func(request, domain, *args, **kwargs) else: return HttpResponseForbidden() + + if login_decorator: + return login_decorator(_inner) + else: - return _inner + return _inner + return decorator
Apply login decorator before permissions check; less 403s, more 302s
## Code Before: from django.http import HttpResponseForbidden from corehq.apps.domain.decorators import login_and_domain_required def require_permission(permission, data=None, login_decorator=login_and_domain_required): try: permission = permission.name except AttributeError: try: permission = permission.__name__ except AttributeError: pass def decorator(view_func): def _inner(request, domain, *args, **kwargs): if hasattr(request, "couch_user") and (request.user.is_superuser or request.couch_user.has_permission(domain, permission, data=data)): if login_decorator: return login_decorator(view_func)(request, domain, *args, **kwargs) else: return view_func(request, domain, *args, **kwargs) else: return HttpResponseForbidden() return _inner return decorator ## Instruction: Apply login decorator before permissions check; less 403s, more 302s ## Code After: from django.http import HttpResponseForbidden from corehq.apps.domain.decorators import login_and_domain_required def require_permission(permission, data=None, login_decorator=login_and_domain_required): try: permission = permission.name except AttributeError: try: permission = permission.__name__ except AttributeError: pass def decorator(view_func): def _inner(request, domain, *args, **kwargs): if hasattr(request, "couch_user") and (request.user.is_superuser or request.couch_user.has_permission(domain, permission, data=data)): return view_func(request, domain, *args, **kwargs) else: return HttpResponseForbidden() if login_decorator: return login_decorator(_inner) else: return _inner return decorator
from django.http import HttpResponseForbidden from corehq.apps.domain.decorators import login_and_domain_required def require_permission(permission, data=None, login_decorator=login_and_domain_required): try: permission = permission.name except AttributeError: try: permission = permission.__name__ except AttributeError: pass def decorator(view_func): def _inner(request, domain, *args, **kwargs): if hasattr(request, "couch_user") and (request.user.is_superuser or request.couch_user.has_permission(domain, permission, data=data)): - if login_decorator: - return login_decorator(view_func)(request, domain, *args, **kwargs) - else: - return view_func(request, domain, *args, **kwargs) ? ---- + return view_func(request, domain, *args, **kwargs) else: return HttpResponseForbidden() + + if login_decorator: + return login_decorator(_inner) + else: - return _inner + return _inner ? ++++ + return decorator
0dffa6879415ebd1750c264d49e84a4d1d9a1bb0
sequere/models.py
sequere/models.py
from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.query import QuerySet class FollowQuerySet(QuerySet): pass class FollowManager(models.Manager): def get_query_set(self): return FollowQuerySet(self.model) @python_2_unicode_compatible class Follow(models.Model): created_at = models.DateTimeField(auto_now_add=True) from_object_id = models.PositiveIntegerField() from_identifier = models.CharField(max_length=50, db_index=True) to_object_id = models.PositiveIntegerField() to_identifier = models.CharField(max_length=50, db_index=True) objects = FollowManager() def __str__(self): return '<%s: %d>' % (self.identifier, self.object_id) def follow(from_instance, to_instance): pass def is_following(from_instance, to_instance): pass def unfollow(from_instance, to_instance): pass def get_followings(instance): pass def get_followers(instance): pass
from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.query import QuerySet from .backends import get_backend class FollowQuerySet(QuerySet): pass class FollowManager(models.Manager): def get_query_set(self): return FollowQuerySet(self.model) @python_2_unicode_compatible class Follow(models.Model): created_at = models.DateTimeField(auto_now_add=True) from_object_id = models.PositiveIntegerField() from_identifier = models.CharField(max_length=50, db_index=True) to_object_id = models.PositiveIntegerField() to_identifier = models.CharField(max_length=50, db_index=True) objects = FollowManager() def __str__(self): return '<%s: %d>' % (self.identifier, self.object_id) def follow(from_instance, to_instance): return get_backend().follow(from_instance, to_instance) def is_following(from_instance, to_instance): return get_backend().is_following(from_instance, to_instance) def unfollow(from_instance, to_instance): return get_backend().unfollow(from_instance, to_instance) def get_followings(instance): return get_backend().get_followings(instance) def get_followers(instance): return get_backend().get_followers(instance)
Use get_backend in proxy methods
Use get_backend in proxy methods
Python
mit
thoas/django-sequere
from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.query import QuerySet + + from .backends import get_backend class FollowQuerySet(QuerySet): pass class FollowManager(models.Manager): def get_query_set(self): return FollowQuerySet(self.model) @python_2_unicode_compatible class Follow(models.Model): created_at = models.DateTimeField(auto_now_add=True) from_object_id = models.PositiveIntegerField() from_identifier = models.CharField(max_length=50, db_index=True) to_object_id = models.PositiveIntegerField() to_identifier = models.CharField(max_length=50, db_index=True) objects = FollowManager() def __str__(self): return '<%s: %d>' % (self.identifier, self.object_id) def follow(from_instance, to_instance): - pass + return get_backend().follow(from_instance, to_instance) def is_following(from_instance, to_instance): - pass + return get_backend().is_following(from_instance, to_instance) def unfollow(from_instance, to_instance): - pass + return get_backend().unfollow(from_instance, to_instance) def get_followings(instance): - pass + return get_backend().get_followings(instance) def get_followers(instance): - pass + return get_backend().get_followers(instance)
Use get_backend in proxy methods
## Code Before: from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.query import QuerySet class FollowQuerySet(QuerySet): pass class FollowManager(models.Manager): def get_query_set(self): return FollowQuerySet(self.model) @python_2_unicode_compatible class Follow(models.Model): created_at = models.DateTimeField(auto_now_add=True) from_object_id = models.PositiveIntegerField() from_identifier = models.CharField(max_length=50, db_index=True) to_object_id = models.PositiveIntegerField() to_identifier = models.CharField(max_length=50, db_index=True) objects = FollowManager() def __str__(self): return '<%s: %d>' % (self.identifier, self.object_id) def follow(from_instance, to_instance): pass def is_following(from_instance, to_instance): pass def unfollow(from_instance, to_instance): pass def get_followings(instance): pass def get_followers(instance): pass ## Instruction: Use get_backend in proxy methods ## Code After: from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.query import QuerySet from .backends import get_backend class FollowQuerySet(QuerySet): pass class FollowManager(models.Manager): def get_query_set(self): return FollowQuerySet(self.model) @python_2_unicode_compatible class Follow(models.Model): created_at = models.DateTimeField(auto_now_add=True) from_object_id = models.PositiveIntegerField() from_identifier = models.CharField(max_length=50, db_index=True) to_object_id = models.PositiveIntegerField() to_identifier = models.CharField(max_length=50, db_index=True) objects = FollowManager() def __str__(self): return '<%s: %d>' % (self.identifier, self.object_id) def follow(from_instance, to_instance): return get_backend().follow(from_instance, to_instance) def is_following(from_instance, to_instance): return get_backend().is_following(from_instance, to_instance) def unfollow(from_instance, to_instance): return get_backend().unfollow(from_instance, to_instance) def get_followings(instance): return get_backend().get_followings(instance) def get_followers(instance): return get_backend().get_followers(instance)
from django.db import models from django.utils.encoding import python_2_unicode_compatible from django.db.models.query import QuerySet + + from .backends import get_backend class FollowQuerySet(QuerySet): pass class FollowManager(models.Manager): def get_query_set(self): return FollowQuerySet(self.model) @python_2_unicode_compatible class Follow(models.Model): created_at = models.DateTimeField(auto_now_add=True) from_object_id = models.PositiveIntegerField() from_identifier = models.CharField(max_length=50, db_index=True) to_object_id = models.PositiveIntegerField() to_identifier = models.CharField(max_length=50, db_index=True) objects = FollowManager() def __str__(self): return '<%s: %d>' % (self.identifier, self.object_id) def follow(from_instance, to_instance): - pass + return get_backend().follow(from_instance, to_instance) def is_following(from_instance, to_instance): - pass + return get_backend().is_following(from_instance, to_instance) def unfollow(from_instance, to_instance): - pass + return get_backend().unfollow(from_instance, to_instance) def get_followings(instance): - pass + return get_backend().get_followings(instance) def get_followers(instance): - pass + return get_backend().get_followers(instance)
dc05182f04dcebf61d368fe9f834b37d75b59bfd
Lib/fontmake/errors.py
Lib/fontmake/errors.py
class FontmakeError(Exception): """Base class for all fontmake exceptions.""" pass class TTFAError(FontmakeError): def __init__(self, exitcode): self.exitcode = exitcode def __str__(self): return "ttfautohint command failed: error " + str(self.exitcode)
import os class FontmakeError(Exception): """Base class for all fontmake exceptions. This exception is intended to be chained to the original exception. The main purpose is to provide a source file trail that points to where the explosion came from. """ def __init__(self, msg, source_file): self.msg = msg self.source_trail = [source_file] def __str__(self): trail = " -> ".join( f"'{str(os.path.relpath(s))}'" for s in reversed(self.source_trail) if s is not None ) cause = str(self.__cause__) if self.__cause__ is not None else None message = "" if trail: message = f"In {trail}: " message += f"{self.msg}" if cause: message += f": {cause}" return message class TTFAError(FontmakeError): def __init__(self, exitcode, source_file): self.exitcode = exitcode self.source_trail = source_file def __str__(self): return ( f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': " f"error code {str(self.exitcode)}." )
Add source trail logic to FontmakeError and partly TTFAError
Add source trail logic to FontmakeError and partly TTFAError
Python
apache-2.0
googlei18n/fontmake,googlei18n/fontmake,googlefonts/fontmake,googlefonts/fontmake
+ import os + + class FontmakeError(Exception): - """Base class for all fontmake exceptions.""" + """Base class for all fontmake exceptions. - pass + This exception is intended to be chained to the original exception. The + main purpose is to provide a source file trail that points to where the + explosion came from. + """ + + def __init__(self, msg, source_file): + self.msg = msg + self.source_trail = [source_file] + + def __str__(self): + trail = " -> ".join( + f"'{str(os.path.relpath(s))}'" + for s in reversed(self.source_trail) + if s is not None + ) + cause = str(self.__cause__) if self.__cause__ is not None else None + + message = "" + if trail: + message = f"In {trail}: " + message += f"{self.msg}" + if cause: + message += f": {cause}" + + return message class TTFAError(FontmakeError): - def __init__(self, exitcode): + def __init__(self, exitcode, source_file): self.exitcode = exitcode + self.source_trail = source_file def __str__(self): - return "ttfautohint command failed: error " + str(self.exitcode) + return ( + f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': " + f"error code {str(self.exitcode)}." + )
Add source trail logic to FontmakeError and partly TTFAError
## Code Before: class FontmakeError(Exception): """Base class for all fontmake exceptions.""" pass class TTFAError(FontmakeError): def __init__(self, exitcode): self.exitcode = exitcode def __str__(self): return "ttfautohint command failed: error " + str(self.exitcode) ## Instruction: Add source trail logic to FontmakeError and partly TTFAError ## Code After: import os class FontmakeError(Exception): """Base class for all fontmake exceptions. This exception is intended to be chained to the original exception. The main purpose is to provide a source file trail that points to where the explosion came from. """ def __init__(self, msg, source_file): self.msg = msg self.source_trail = [source_file] def __str__(self): trail = " -> ".join( f"'{str(os.path.relpath(s))}'" for s in reversed(self.source_trail) if s is not None ) cause = str(self.__cause__) if self.__cause__ is not None else None message = "" if trail: message = f"In {trail}: " message += f"{self.msg}" if cause: message += f": {cause}" return message class TTFAError(FontmakeError): def __init__(self, exitcode, source_file): self.exitcode = exitcode self.source_trail = source_file def __str__(self): return ( f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': " f"error code {str(self.exitcode)}." )
+ import os + + class FontmakeError(Exception): - """Base class for all fontmake exceptions.""" ? --- + """Base class for all fontmake exceptions. - pass + This exception is intended to be chained to the original exception. The + main purpose is to provide a source file trail that points to where the + explosion came from. + """ + + def __init__(self, msg, source_file): + self.msg = msg + self.source_trail = [source_file] + + def __str__(self): + trail = " -> ".join( + f"'{str(os.path.relpath(s))}'" + for s in reversed(self.source_trail) + if s is not None + ) + cause = str(self.__cause__) if self.__cause__ is not None else None + + message = "" + if trail: + message = f"In {trail}: " + message += f"{self.msg}" + if cause: + message += f": {cause}" + + return message class TTFAError(FontmakeError): - def __init__(self, exitcode): + def __init__(self, exitcode, source_file): ? +++++++++++++ self.exitcode = exitcode + self.source_trail = source_file def __str__(self): - return "ttfautohint command failed: error " + str(self.exitcode) + return ( + f"ttfautohint failed for '{str(os.path.relpath(self.source_trail))}': " + f"error code {str(self.exitcode)}." + )
32bf828445ed897609b908dff435191287f922f4
bookie/views/stats.py
bookie/views/stats.py
"""Basic views with no home""" import logging from pyramid.view import view_config from bookie.bcelery import tasks from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ res = tasks.count_total.delay() # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
"""Basic views with no home""" import logging from pyramid.view import view_config from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
Clean up old code no longer used
Clean up old code no longer used
Python
agpl-3.0
adamlincoln/Bookie,charany1/Bookie,GreenLunar/Bookie,charany1/Bookie,skmezanul/Bookie,charany1/Bookie,adamlincoln/Bookie,GreenLunar/Bookie,pombredanne/Bookie,wangjun/Bookie,bookieio/Bookie,skmezanul/Bookie,pombredanne/Bookie,bookieio/Bookie,GreenLunar/Bookie,pombredanne/Bookie,teodesson/Bookie,skmezanul/Bookie,bookieio/Bookie,teodesson/Bookie,wangjun/Bookie,adamlincoln/Bookie,teodesson/Bookie,skmezanul/Bookie,wangjun/Bookie,GreenLunar/Bookie,teodesson/Bookie,adamlincoln/Bookie,bookieio/Bookie,wangjun/Bookie
"""Basic views with no home""" import logging from pyramid.view import view_config - from bookie.bcelery import tasks from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ - res = tasks.count_total.delay() - # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
Clean up old code no longer used
## Code Before: """Basic views with no home""" import logging from pyramid.view import view_config from bookie.bcelery import tasks from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ res = tasks.count_total.delay() # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } } ## Instruction: Clean up old code no longer used ## Code After: """Basic views with no home""" import logging from pyramid.view import view_config from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
"""Basic views with no home""" import logging from pyramid.view import view_config - from bookie.bcelery import tasks from bookie.models import BmarkMgr from bookie.models.auth import ActivationMgr from bookie.models.auth import UserMgr LOG = logging.getLogger(__name__) @view_config( route_name="dashboard", renderer="/stats/dashboard.mako") def dashboard(request): """A public dashboard of the system """ - res = tasks.count_total.delay() - # Generate some user data and stats user_count = UserMgr.count() pending_activations = ActivationMgr.count() # Generate some bookmark data. bookmark_count = BmarkMgr.count() unique_url_count = BmarkMgr.count(distinct=True) users_with_bookmarks = BmarkMgr.count(distinct_users=True) return { 'bookmark_data': { 'count': bookmark_count, 'unique_count': unique_url_count, }, 'user_data': { 'count': user_count, 'activations': pending_activations, 'with_bookmarks': users_with_bookmarks, } }
9d0ea4eaf8269350fabc3415545bebf4da4137a7
source/segue/backend/processor/background.py
source/segue/backend/processor/background.py
import multiprocessing from .base import Processor class BackgroundProcessor(Processor): '''Local background processor.''' def process(self, command, args=None, kw=None): '''Process *command* with *args* and *kw*.''' process = multiprocessing.Process(target=command, args=args, kwargs=kw) process.start() process.join()
import multiprocessing from .base import Processor class BackgroundProcessor(Processor): '''Local background processor.''' def process(self, command, args=None, kw=None): '''Process *command* with *args* and *kw*.''' if args is None: args = () if kw is None: kw = {} process = multiprocessing.Process(target=command, args=args, kwargs=kw) process.start() process.join()
Fix passing invalid None to multiprocessing Process class.
Fix passing invalid None to multiprocessing Process class.
Python
apache-2.0
4degrees/segue
import multiprocessing from .base import Processor class BackgroundProcessor(Processor): '''Local background processor.''' def process(self, command, args=None, kw=None): '''Process *command* with *args* and *kw*.''' + if args is None: + args = () + + if kw is None: + kw = {} + process = multiprocessing.Process(target=command, args=args, kwargs=kw) process.start() process.join()
Fix passing invalid None to multiprocessing Process class.
## Code Before: import multiprocessing from .base import Processor class BackgroundProcessor(Processor): '''Local background processor.''' def process(self, command, args=None, kw=None): '''Process *command* with *args* and *kw*.''' process = multiprocessing.Process(target=command, args=args, kwargs=kw) process.start() process.join() ## Instruction: Fix passing invalid None to multiprocessing Process class. ## Code After: import multiprocessing from .base import Processor class BackgroundProcessor(Processor): '''Local background processor.''' def process(self, command, args=None, kw=None): '''Process *command* with *args* and *kw*.''' if args is None: args = () if kw is None: kw = {} process = multiprocessing.Process(target=command, args=args, kwargs=kw) process.start() process.join()
import multiprocessing from .base import Processor class BackgroundProcessor(Processor): '''Local background processor.''' def process(self, command, args=None, kw=None): '''Process *command* with *args* and *kw*.''' + if args is None: + args = () + + if kw is None: + kw = {} + process = multiprocessing.Process(target=command, args=args, kwargs=kw) process.start() process.join()
6038bcd507c43eb86e04c6a32abf9b8249c8872e
tests/server/handlers/test_zip.py
tests/server/handlers/test_zip.py
import asyncio import io import zipfile from unittest import mock from tornado import testing from waterbutler.core import streams from tests import utils class TestZipHandler(utils.HandlerTestCase): def setUp(self): super().setUp() identity_future = asyncio.Future() identity_future.set_result({ 'auth': {}, 'credentials': {}, 'settings': {}, }) self.mock_identity = mock.Mock() self.mock_identity.return_value = identity_future self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity) self.identity_patcher.start() def tearDown(self): super().tearDown() self.identity_patcher.stop() @mock.patch('waterbutler.core.utils.make_provider') @testing.gen_test def test_download_stream(self, mock_make_provider): stream = asyncio.StreamReader() data = b'freddie brian john roger' stream.feed_data(data) stream.feed_eof() stream.size = len(data) stream.content_type = 'application/octet-stream' zipstream = streams.ZipStreamReader(('file.txt', stream)) mock_provider = utils.mock_provider_method(mock_make_provider, 'zip', zipstream) resp = yield self.http_client.fetch( self.get_url('/zip?provider=queenhub&path=freddie.png'), ) zip = zipfile.ZipFile(io.BytesIO(resp.body)) assert zip.testzip() is None assert zip.open('file.txt').read() == data
import asyncio import io import zipfile from unittest import mock from tornado import testing from waterbutler.core import streams from tests import utils class TestZipHandler(utils.HandlerTestCase): @testing.gen_test def test_download_stream(self): data = b'freddie brian john roger' stream = streams.StringStream(data) stream.content_type = 'application/octet-stream' zipstream = streams.ZipStreamReader(('file.txt', stream)) self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream) resp = yield self.http_client.fetch( self.get_url('/zip?provider=queenhub&path=/freddie.png'), ) zip = zipfile.ZipFile(io.BytesIO(resp.body)) assert zip.testzip() is None assert zip.open('file.txt').read() == data
Remove deprecated test setup and teardown code
Remove deprecated test setup and teardown code
Python
apache-2.0
rdhyee/waterbutler,kwierman/waterbutler,hmoco/waterbutler,CenterForOpenScience/waterbutler,cosenal/waterbutler,Ghalko/waterbutler,rafaeldelucena/waterbutler,felliott/waterbutler,icereval/waterbutler,RCOSDP/waterbutler,TomBaxter/waterbutler,chrisseto/waterbutler,Johnetordoff/waterbutler
import asyncio import io import zipfile from unittest import mock from tornado import testing from waterbutler.core import streams from tests import utils class TestZipHandler(utils.HandlerTestCase): - def setUp(self): - super().setUp() - identity_future = asyncio.Future() - identity_future.set_result({ - 'auth': {}, - 'credentials': {}, - 'settings': {}, - }) - self.mock_identity = mock.Mock() - self.mock_identity.return_value = identity_future - self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity) - self.identity_patcher.start() - - def tearDown(self): - super().tearDown() - self.identity_patcher.stop() - - @mock.patch('waterbutler.core.utils.make_provider') @testing.gen_test - def test_download_stream(self, mock_make_provider): + def test_download_stream(self): - stream = asyncio.StreamReader() data = b'freddie brian john roger' + stream = streams.StringStream(data) - stream.feed_data(data) - stream.feed_eof() - stream.size = len(data) stream.content_type = 'application/octet-stream' zipstream = streams.ZipStreamReader(('file.txt', stream)) + self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream) + - mock_provider = utils.mock_provider_method(mock_make_provider, - 'zip', - zipstream) resp = yield self.http_client.fetch( - self.get_url('/zip?provider=queenhub&path=freddie.png'), + self.get_url('/zip?provider=queenhub&path=/freddie.png'), ) zip = zipfile.ZipFile(io.BytesIO(resp.body)) assert zip.testzip() is None assert zip.open('file.txt').read() == data +
Remove deprecated test setup and teardown code
## Code Before: import asyncio import io import zipfile from unittest import mock from tornado import testing from waterbutler.core import streams from tests import utils class TestZipHandler(utils.HandlerTestCase): def setUp(self): super().setUp() identity_future = asyncio.Future() identity_future.set_result({ 'auth': {}, 'credentials': {}, 'settings': {}, }) self.mock_identity = mock.Mock() self.mock_identity.return_value = identity_future self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity) self.identity_patcher.start() def tearDown(self): super().tearDown() self.identity_patcher.stop() @mock.patch('waterbutler.core.utils.make_provider') @testing.gen_test def test_download_stream(self, mock_make_provider): stream = asyncio.StreamReader() data = b'freddie brian john roger' stream.feed_data(data) stream.feed_eof() stream.size = len(data) stream.content_type = 'application/octet-stream' zipstream = streams.ZipStreamReader(('file.txt', stream)) mock_provider = utils.mock_provider_method(mock_make_provider, 'zip', zipstream) resp = yield self.http_client.fetch( self.get_url('/zip?provider=queenhub&path=freddie.png'), ) zip = zipfile.ZipFile(io.BytesIO(resp.body)) assert zip.testzip() is None assert zip.open('file.txt').read() == data ## Instruction: Remove deprecated test setup and teardown code ## Code After: import asyncio import io import zipfile from unittest import mock from tornado import testing from waterbutler.core import streams from tests import utils class TestZipHandler(utils.HandlerTestCase): @testing.gen_test def test_download_stream(self): data = b'freddie brian john roger' stream = streams.StringStream(data) stream.content_type = 'application/octet-stream' zipstream = streams.ZipStreamReader(('file.txt', stream)) self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream) resp = yield self.http_client.fetch( self.get_url('/zip?provider=queenhub&path=/freddie.png'), ) zip = zipfile.ZipFile(io.BytesIO(resp.body)) assert zip.testzip() is None assert zip.open('file.txt').read() == data
import asyncio import io import zipfile from unittest import mock from tornado import testing from waterbutler.core import streams from tests import utils class TestZipHandler(utils.HandlerTestCase): - def setUp(self): - super().setUp() - identity_future = asyncio.Future() - identity_future.set_result({ - 'auth': {}, - 'credentials': {}, - 'settings': {}, - }) - self.mock_identity = mock.Mock() - self.mock_identity.return_value = identity_future - self.identity_patcher = mock.patch('waterbutler.server.handlers.core.get_identity', self.mock_identity) - self.identity_patcher.start() - - def tearDown(self): - super().tearDown() - self.identity_patcher.stop() - - @mock.patch('waterbutler.core.utils.make_provider') @testing.gen_test - def test_download_stream(self, mock_make_provider): ? -------------------- + def test_download_stream(self): - stream = asyncio.StreamReader() data = b'freddie brian john roger' + stream = streams.StringStream(data) - stream.feed_data(data) - stream.feed_eof() - stream.size = len(data) stream.content_type = 'application/octet-stream' zipstream = streams.ZipStreamReader(('file.txt', stream)) + self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream) + - mock_provider = utils.mock_provider_method(mock_make_provider, - 'zip', - zipstream) resp = yield self.http_client.fetch( - self.get_url('/zip?provider=queenhub&path=freddie.png'), + self.get_url('/zip?provider=queenhub&path=/freddie.png'), ? + ) zip = zipfile.ZipFile(io.BytesIO(resp.body)) assert zip.testzip() is None assert zip.open('file.txt').read() == data
bd0310663a4f646873119e6b01afe585d0ef40bb
lib/interpreters/ScssInterpreter.py
lib/interpreters/ScssInterpreter.py
import re from os import path from ..interpreter import * from ..SIMode import SIMode from ..utils import endswith class ScssInterpreter(Interpreter): def run(self): self.settings = { "extensions": [".scss"], "remove_extensions": [".scss"], "extra_extensions": [".jpg", ".png", ".gif", ".svg"], "ignore": [ "node_modules", ".git" ] } def parseModuleKey(self, value): if "/" in value: if value.startswith("./"): value = value[2:] if path.basename(value).startswith("_"): value = path.join(path.dirname(value), path.basename(value)[1:]) return super().parseModuleKey(value) def onSearchResultChosen(self, interpreted, option_key, value, mode=SIMode.REPLACE_MODE): if option_key == "extra_files": interpreted.handler_name = "file" super().onSearchResultChosen(interpreted, option_key, value, mode) def stringifyStatements(self, statements, handler_name=None, insert_type=Interpreted.IT_REPLACE): if handler_name == "file": return "url({0})".format(statements["module"]) return "@import \"{0}\";".format(statements["module"]) def getQueryObject(self, interpreted): return { "file": interpreted.statements["module"] }
import re from os import path from ..interpreter import * from ..SIMode import SIMode from ..utils import endswith class ScssInterpreter(Interpreter): def run(self): self.settings = { "extensions": [".scss"], "remove_extensions": [".scss"], "extra_extensions": [".jpg", ".png", ".gif", ".svg"], "ignore": [ "node_modules", ".git" ] } def parseModuleKey(self, value): if "/" in value: if value.startswith("./"): value = value[2:] if path.basename(value).startswith("_"): value = path.join(path.dirname(value), path.basename(value)[1:]) return super().parseModuleKey(value) def onSearchResultChosen(self, interpreted, option_key, value, mode=SIMode.REPLACE_MODE): if option_key == "extra_files": interpreted.handler_name = "file" super().onSearchResultChosen(interpreted, option_key, value, mode) def stringifyStatements(self, statements, handler_name=None, insert_type=Interpreted.IT_REPLACE): if handler_name == "file": return "url({0})".format(statements["module"]) if self.getSetting("single-quotes"): return "@import \'{0}\';".format(statements["module"]) return "@import \"{0}\";".format(statements["module"]) def getQueryObject(self, interpreted): return { "file": interpreted.statements["module"] }
Add single-quotes setting to scss
Add single-quotes setting to scss
Python
mit
vinpac/sublime-simple-import,vini175pa/sublime-simple-import,vini175pa/sublime-simple-import,vini175pa/simple-import-js,vini175pa/simple-import-js
import re from os import path from ..interpreter import * from ..SIMode import SIMode from ..utils import endswith class ScssInterpreter(Interpreter): def run(self): self.settings = { "extensions": [".scss"], "remove_extensions": [".scss"], "extra_extensions": [".jpg", ".png", ".gif", ".svg"], "ignore": [ "node_modules", ".git" ] } def parseModuleKey(self, value): if "/" in value: if value.startswith("./"): value = value[2:] if path.basename(value).startswith("_"): value = path.join(path.dirname(value), path.basename(value)[1:]) return super().parseModuleKey(value) def onSearchResultChosen(self, interpreted, option_key, value, mode=SIMode.REPLACE_MODE): if option_key == "extra_files": interpreted.handler_name = "file" super().onSearchResultChosen(interpreted, option_key, value, mode) def stringifyStatements(self, statements, handler_name=None, insert_type=Interpreted.IT_REPLACE): if handler_name == "file": return "url({0})".format(statements["module"]) + if self.getSetting("single-quotes"): + return "@import \'{0}\';".format(statements["module"]) + return "@import \"{0}\";".format(statements["module"]) def getQueryObject(self, interpreted): return { "file": interpreted.statements["module"] }
Add single-quotes setting to scss
## Code Before: import re from os import path from ..interpreter import * from ..SIMode import SIMode from ..utils import endswith class ScssInterpreter(Interpreter): def run(self): self.settings = { "extensions": [".scss"], "remove_extensions": [".scss"], "extra_extensions": [".jpg", ".png", ".gif", ".svg"], "ignore": [ "node_modules", ".git" ] } def parseModuleKey(self, value): if "/" in value: if value.startswith("./"): value = value[2:] if path.basename(value).startswith("_"): value = path.join(path.dirname(value), path.basename(value)[1:]) return super().parseModuleKey(value) def onSearchResultChosen(self, interpreted, option_key, value, mode=SIMode.REPLACE_MODE): if option_key == "extra_files": interpreted.handler_name = "file" super().onSearchResultChosen(interpreted, option_key, value, mode) def stringifyStatements(self, statements, handler_name=None, insert_type=Interpreted.IT_REPLACE): if handler_name == "file": return "url({0})".format(statements["module"]) return "@import \"{0}\";".format(statements["module"]) def getQueryObject(self, interpreted): return { "file": interpreted.statements["module"] } ## Instruction: Add single-quotes setting to scss ## Code After: import re from os import path from ..interpreter import * from ..SIMode import SIMode from ..utils import endswith class ScssInterpreter(Interpreter): def run(self): self.settings = { "extensions": [".scss"], "remove_extensions": [".scss"], "extra_extensions": [".jpg", ".png", ".gif", ".svg"], "ignore": [ "node_modules", ".git" ] } def parseModuleKey(self, value): if "/" in value: if value.startswith("./"): value = value[2:] if path.basename(value).startswith("_"): value = path.join(path.dirname(value), path.basename(value)[1:]) return super().parseModuleKey(value) def onSearchResultChosen(self, interpreted, option_key, value, mode=SIMode.REPLACE_MODE): if option_key == "extra_files": interpreted.handler_name = "file" super().onSearchResultChosen(interpreted, option_key, value, mode) def stringifyStatements(self, statements, handler_name=None, insert_type=Interpreted.IT_REPLACE): if handler_name == "file": return "url({0})".format(statements["module"]) if self.getSetting("single-quotes"): return "@import \'{0}\';".format(statements["module"]) return "@import \"{0}\";".format(statements["module"]) def getQueryObject(self, interpreted): return { "file": interpreted.statements["module"] }
import re from os import path from ..interpreter import * from ..SIMode import SIMode from ..utils import endswith class ScssInterpreter(Interpreter): def run(self): self.settings = { "extensions": [".scss"], "remove_extensions": [".scss"], "extra_extensions": [".jpg", ".png", ".gif", ".svg"], "ignore": [ "node_modules", ".git" ] } def parseModuleKey(self, value): if "/" in value: if value.startswith("./"): value = value[2:] if path.basename(value).startswith("_"): value = path.join(path.dirname(value), path.basename(value)[1:]) return super().parseModuleKey(value) def onSearchResultChosen(self, interpreted, option_key, value, mode=SIMode.REPLACE_MODE): if option_key == "extra_files": interpreted.handler_name = "file" super().onSearchResultChosen(interpreted, option_key, value, mode) def stringifyStatements(self, statements, handler_name=None, insert_type=Interpreted.IT_REPLACE): if handler_name == "file": return "url({0})".format(statements["module"]) + if self.getSetting("single-quotes"): + return "@import \'{0}\';".format(statements["module"]) + return "@import \"{0}\";".format(statements["module"]) def getQueryObject(self, interpreted): return { "file": interpreted.statements["module"] }
e23d5a64cfd5604f74cce583db3366f2cabb5e1f
tests/basics/builtin_minmax.py
tests/basics/builtin_minmax.py
print(min(0,1)) print(min(1,0)) print(min(0,-1)) print(min(-1,0)) print(max(0,1)) print(max(1,0)) print(max(0,-1)) print(max(-1,0)) print(min([1,2,4,0,-1,2])) print(max([1,2,4,0,-1,2])) # test with key function lst = [2, 1, 3, 4] print(min(lst, key=lambda x:x)) print(min(lst, key=lambda x:-x)) print(min(1, 2, 3, 4, key=lambda x:-x)) print(min(4, 3, 2, 1, key=lambda x:-x)) print(max(lst, key=lambda x:x)) print(max(lst, key=lambda x:-x)) print(max(1, 2, 3, 4, key=lambda x:-x)) print(max(4, 3, 2, 1, key=lambda x:-x)) # need at least 1 item in the iterable try: min([]) except ValueError: print("ValueError")
print(min(0,1)) print(min(1,0)) print(min(0,-1)) print(min(-1,0)) print(max(0,1)) print(max(1,0)) print(max(0,-1)) print(max(-1,0)) print(min([1,2,4,0,-1,2])) print(max([1,2,4,0,-1,2])) # test with key function lst = [2, 1, 3, 4] print(min(lst, key=lambda x:x)) print(min(lst, key=lambda x:-x)) print(min(1, 2, 3, 4, key=lambda x:-x)) print(min(4, 3, 2, 1, key=lambda x:-x)) print(max(lst, key=lambda x:x)) print(max(lst, key=lambda x:-x)) print(max(1, 2, 3, 4, key=lambda x:-x)) print(max(4, 3, 2, 1, key=lambda x:-x)) # need at least 1 item in the iterable try: min([]) except ValueError: print("ValueError") # 'default' tests print(min([1, 2, 3, 4, 5], default=-1)) print(min([], default=-1)) print(max([1, 2, 3, 4, 5], default=-1)) print(max([], default=-1))
Add min/max "default" agrument test
tests: Add min/max "default" agrument test
Python
mit
mpalomer/micropython,dinau/micropython,henriknelson/micropython,deshipu/micropython,blazewicz/micropython,supergis/micropython,MrSurly/micropython-esp32,henriknelson/micropython,torwag/micropython,matthewelse/micropython,swegener/micropython,dmazzella/micropython,turbinenreiter/micropython,Timmenem/micropython,tralamazza/micropython,ryannathans/micropython,oopy/micropython,infinnovation/micropython,AriZuu/micropython,martinribelotta/micropython,micropython/micropython-esp32,ernesto-g/micropython,bvernoux/micropython,misterdanb/micropython,galenhz/micropython,toolmacher/micropython,micropython/micropython-esp32,mpalomer/micropython,micropython/micropython-esp32,misterdanb/micropython,hiway/micropython,ganshun666/micropython,alex-march/micropython,pramasoul/micropython,cwyark/micropython,pozetroninc/micropython,Timmenem/micropython,adamkh/micropython,pozetroninc/micropython,adafruit/micropython,pozetroninc/micropython,infinnovation/micropython,adafruit/circuitpython,dxxb/micropython,dxxb/micropython,dinau/micropython,tobbad/micropython,EcmaXp/micropython,martinribelotta/micropython,misterdanb/micropython,tralamazza/micropython,TDAbboud/micropython,selste/micropython,hosaka/micropython,martinribelotta/micropython,adafruit/micropython,torwag/micropython,MrSurly/micropython,emfcamp/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,xhat/micropython,selste/micropython,pramasoul/micropython,henriknelson/micropython,kerneltask/micropython,adafruit/circuitpython,tuc-osg/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,selste/micropython,trezor/micropython,Timmenem/micropython,adafruit/micropython,dinau/micropython,selste/micropython,alex-march/micropython,pfalcon/micropython,alex-robbins/micropython,HenrikSolver/micropython,ryannathans/micropython,puuu/micropython,alex-robbins/micropython,supergis/micropython,adafruit/circuitpython,praemdonck/micropython,pfalcon/micropython,tobbad/micropython,infinnovation/micropython,ernesto-g/micropython,matthewelse/micropython,PappaPeppar/micropython,MrSurly/micropython,mhoffma/micropython,ganshun666/micropython,mianos/micropython,praemdonck/micropython,galenhz/micropython,dmazzella/micropython,misterdanb/micropython,neilh10/micropython,henriknelson/micropython,oopy/micropython,galenhz/micropython,micropython/micropython-esp32,pfalcon/micropython,danicampora/micropython,matthewelse/micropython,TDAbboud/micropython,toolmacher/micropython,mpalomer/micropython,swegener/micropython,hosaka/micropython,tuc-osg/micropython,turbinenreiter/micropython,galenhz/micropython,blazewicz/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,matthewelse/micropython,drrk/micropython,turbinenreiter/micropython,jmarcelino/pycom-micropython,oopy/micropython,chrisdearman/micropython,adafruit/circuitpython,mhoffma/micropython,lowRISC/micropython,redbear/micropython,ganshun666/micropython,mpalomer/micropython,xhat/micropython,alex-march/micropython,praemdonck/micropython,pfalcon/micropython,EcmaXp/micropython,matthewelse/micropython,xhat/micropython,redbear/micropython,mianos/micropython,TDAbboud/micropython,TDAbboud/micropython,redbear/micropython,torwag/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,neilh10/micropython,EcmaXp/micropython,xhat/micropython,blazewicz/micropython,alex-robbins/micropython,dxxb/micropython,oopy/micropython,trezor/micropython,martinribelotta/micropython,ganshun666/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,puuu/micropython,bvernoux/micropython,tobbad/micropython,mhoffma/micropython,emfcamp/micropython,infinnovation/micropython,danicampora/micropython,pramasoul/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,mhoffma/micropython,redbear/micropython,pozetroninc/micropython,dxxb/micropython,ernesto-g/micropython,lowRISC/micropython,swegener/micropython,EcmaXp/micropython,hosaka/micropython,alex-march/micropython,emfcamp/micropython,dinau/micropython,danicampora/micropython,chrisdearman/micropython,ryannathans/micropython,cwyark/micropython,trezor/micropython,MrSurly/micropython,adamkh/micropython,drrk/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,neilh10/micropython,trezor/micropython,HenrikSolver/micropython,mhoffma/micropython,toolmacher/micropython,deshipu/micropython,drrk/micropython,dmazzella/micropython,hosaka/micropython,tralamazza/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,tobbad/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,lowRISC/micropython,praemdonck/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,trezor/micropython,MrSurly/micropython,cwyark/micropython,selste/micropython,adafruit/circuitpython,kerneltask/micropython,turbinenreiter/micropython,swegener/micropython,HenrikSolver/micropython,praemdonck/micropython,alex-march/micropython,drrk/micropython,adafruit/micropython,HenrikSolver/micropython,pfalcon/micropython,cwyark/micropython,adamkh/micropython,torwag/micropython,galenhz/micropython,bvernoux/micropython,tobbad/micropython,ernesto-g/micropython,hosaka/micropython,kerneltask/micropython,tralamazza/micropython,TDAbboud/micropython,jmarcelino/pycom-micropython,neilh10/micropython,danicampora/micropython,HenrikSolver/micropython,EcmaXp/micropython,mpalomer/micropython,xhat/micropython,deshipu/micropython,pramasoul/micropython,puuu/micropython,adafruit/circuitpython,blazewicz/micropython,neilh10/micropython,AriZuu/micropython,adamkh/micropython,hiway/micropython,hiway/micropython,supergis/micropython,pramasoul/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,toolmacher/micropython,danicampora/micropython,mianos/micropython,Timmenem/micropython,drrk/micropython,puuu/micropython,torwag/micropython,dinau/micropython,adamkh/micropython,adafruit/micropython,hiway/micropython,blazewicz/micropython,turbinenreiter/micropython,martinribelotta/micropython,puuu/micropython,AriZuu/micropython,emfcamp/micropython,oopy/micropython,redbear/micropython,PappaPeppar/micropython,infinnovation/micropython,chrisdearman/micropython,henriknelson/micropython,ganshun666/micropython,jmarcelino/pycom-micropython,Timmenem/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,mianos/micropython,supergis/micropython,mianos/micropython,dxxb/micropython,cwyark/micropython,lowRISC/micropython,MrSurly/micropython-esp32,misterdanb/micropython,hiway/micropython,PappaPeppar/micropython,chrisdearman/micropython,ryannathans/micropython,MrSurly/micropython,AriZuu/micropython,deshipu/micropython,tuc-osg/micropython,dmazzella/micropython,jmarcelino/pycom-micropython,ernesto-g/micropython,toolmacher/micropython,matthewelse/micropython,PappaPeppar/micropython,deshipu/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,ryannathans/micropython,supergis/micropython
print(min(0,1)) print(min(1,0)) print(min(0,-1)) print(min(-1,0)) print(max(0,1)) print(max(1,0)) print(max(0,-1)) print(max(-1,0)) print(min([1,2,4,0,-1,2])) print(max([1,2,4,0,-1,2])) # test with key function lst = [2, 1, 3, 4] print(min(lst, key=lambda x:x)) print(min(lst, key=lambda x:-x)) print(min(1, 2, 3, 4, key=lambda x:-x)) print(min(4, 3, 2, 1, key=lambda x:-x)) print(max(lst, key=lambda x:x)) print(max(lst, key=lambda x:-x)) print(max(1, 2, 3, 4, key=lambda x:-x)) print(max(4, 3, 2, 1, key=lambda x:-x)) # need at least 1 item in the iterable try: min([]) except ValueError: print("ValueError") + # 'default' tests + print(min([1, 2, 3, 4, 5], default=-1)) + print(min([], default=-1)) + print(max([1, 2, 3, 4, 5], default=-1)) + print(max([], default=-1)) +
Add min/max "default" agrument test
## Code Before: print(min(0,1)) print(min(1,0)) print(min(0,-1)) print(min(-1,0)) print(max(0,1)) print(max(1,0)) print(max(0,-1)) print(max(-1,0)) print(min([1,2,4,0,-1,2])) print(max([1,2,4,0,-1,2])) # test with key function lst = [2, 1, 3, 4] print(min(lst, key=lambda x:x)) print(min(lst, key=lambda x:-x)) print(min(1, 2, 3, 4, key=lambda x:-x)) print(min(4, 3, 2, 1, key=lambda x:-x)) print(max(lst, key=lambda x:x)) print(max(lst, key=lambda x:-x)) print(max(1, 2, 3, 4, key=lambda x:-x)) print(max(4, 3, 2, 1, key=lambda x:-x)) # need at least 1 item in the iterable try: min([]) except ValueError: print("ValueError") ## Instruction: Add min/max "default" agrument test ## Code After: print(min(0,1)) print(min(1,0)) print(min(0,-1)) print(min(-1,0)) print(max(0,1)) print(max(1,0)) print(max(0,-1)) print(max(-1,0)) print(min([1,2,4,0,-1,2])) print(max([1,2,4,0,-1,2])) # test with key function lst = [2, 1, 3, 4] print(min(lst, key=lambda x:x)) print(min(lst, key=lambda x:-x)) print(min(1, 2, 3, 4, key=lambda x:-x)) print(min(4, 3, 2, 1, key=lambda x:-x)) print(max(lst, key=lambda x:x)) print(max(lst, key=lambda x:-x)) print(max(1, 2, 3, 4, key=lambda x:-x)) print(max(4, 3, 2, 1, key=lambda x:-x)) # need at least 1 item in the iterable try: min([]) except ValueError: print("ValueError") # 'default' tests print(min([1, 2, 3, 4, 5], default=-1)) print(min([], default=-1)) print(max([1, 2, 3, 4, 5], default=-1)) print(max([], default=-1))
print(min(0,1)) print(min(1,0)) print(min(0,-1)) print(min(-1,0)) print(max(0,1)) print(max(1,0)) print(max(0,-1)) print(max(-1,0)) print(min([1,2,4,0,-1,2])) print(max([1,2,4,0,-1,2])) # test with key function lst = [2, 1, 3, 4] print(min(lst, key=lambda x:x)) print(min(lst, key=lambda x:-x)) print(min(1, 2, 3, 4, key=lambda x:-x)) print(min(4, 3, 2, 1, key=lambda x:-x)) print(max(lst, key=lambda x:x)) print(max(lst, key=lambda x:-x)) print(max(1, 2, 3, 4, key=lambda x:-x)) print(max(4, 3, 2, 1, key=lambda x:-x)) # need at least 1 item in the iterable try: min([]) except ValueError: print("ValueError") + + # 'default' tests + print(min([1, 2, 3, 4, 5], default=-1)) + print(min([], default=-1)) + print(max([1, 2, 3, 4, 5], default=-1)) + print(max([], default=-1))
f325b02c66810cff9e3ace8b31e7f3a7b410342f
awx/wsgi.py
awx/wsgi.py
import logging from django.core.wsgi import get_wsgi_application from awx import prepare_env from awx import __version__ as tower_version """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ # Prepare the AWX environment. prepare_env() logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
import logging from awx import __version__ as tower_version # Prepare the AWX environment. from awx import prepare_env prepare_env() from django.core.wsgi import get_wsgi_application """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
Fix import error by calling prepare_env first
Fix import error by calling prepare_env first
Python
apache-2.0
wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx,snahelou/awx,wwitzel3/awx,snahelou/awx
import logging + from awx import __version__ as tower_version + + # Prepare the AWX environment. + from awx import prepare_env + prepare_env() + from django.core.wsgi import get_wsgi_application - from awx import prepare_env - from awx import __version__ as tower_version """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ - - # Prepare the AWX environment. - prepare_env() logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
Fix import error by calling prepare_env first
## Code Before: import logging from django.core.wsgi import get_wsgi_application from awx import prepare_env from awx import __version__ as tower_version """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ # Prepare the AWX environment. prepare_env() logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application() ## Instruction: Fix import error by calling prepare_env first ## Code After: import logging from awx import __version__ as tower_version # Prepare the AWX environment. from awx import prepare_env prepare_env() from django.core.wsgi import get_wsgi_application """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
import logging + from awx import __version__ as tower_version + + # Prepare the AWX environment. + from awx import prepare_env + prepare_env() + from django.core.wsgi import get_wsgi_application - from awx import prepare_env - from awx import __version__ as tower_version """ WSGI config for AWX project. It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/dev/howto/deployment/wsgi/ """ - - # Prepare the AWX environment. - prepare_env() logger = logging.getLogger('awx.main.models.jobs') try: fd = open("/var/lib/awx/.tower_version", "r") if fd.read().strip() != tower_version: raise Exception() except Exception: logger.error("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") raise Exception("Missing or incorrect metadata for Tower version. Ensure Tower was installed using the setup playbook.") # Return the default Django WSGI application. application = get_wsgi_application()
1a9c9b60d8e0b69b5d196ff8323befd6d9e330aa
make_mozilla/events/models.py
make_mozilla/events/models.py
from django.contrib.gis.db import models from django.contrib.gis import geos from datetime import datetime class Venue(models.Model): name = models.CharField(max_length=255) street_address = models.TextField() country = models.CharField(max_length=255) location = models.PointField(blank=True) objects = models.GeoManager() def __init__(self, *args, **kwargs): super(Venue, self).__init__(*args, **kwargs) if self.location is None: self.location = geos.Point(0, 0) @property def latitude(self): return self.location.x @latitude.setter def latitude(self, value): self.location.x = value @property def longitude(self): return self.location.y @longitude.setter def longitude(self, value): self.location.y = value class Event(models.Model): name = models.CharField(max_length=255) event_url = models.CharField(max_length=255, blank = True) venue = models.ForeignKey(Venue) start = models.DateTimeField(null = True, blank = True) end = models.DateTimeField(null = True, blank = True) source_id = models.CharField(max_length=255) organiser_email = models.CharField(max_length=255) objects = models.GeoManager() @classmethod def upcoming(self): return self.objects.filter(start__gte = datetime.now())
from django.contrib.gis.db import models from django.contrib.gis import geos from datetime import datetime class Venue(models.Model): name = models.CharField(max_length=255) street_address = models.TextField() country = models.CharField(max_length=255) location = models.PointField(blank=True) objects = models.GeoManager() def __init__(self, *args, **kwargs): super(Venue, self).__init__(*args, **kwargs) if self.location is None: self.location = geos.Point(0, 0) @property def latitude(self): return self.location.x @latitude.setter def latitude(self, value): self.location.x = value @property def longitude(self): return self.location.y @longitude.setter def longitude(self, value): self.location.y = value class Event(models.Model): name = models.CharField(max_length = 255) event_url = models.CharField(max_length = 255, blank = True) venue = models.ForeignKey(Venue) start = models.DateTimeField(null = True, blank = True) end = models.DateTimeField(null = True, blank = True) source_id = models.CharField(max_length = 255, blank = True) organiser_email = models.CharField(max_length = 255) objects = models.GeoManager() @classmethod def upcoming(self): return self.objects.filter(start__gte = datetime.now())
Allow blank source_id in event, tweak formatting of the code
Allow blank source_id in event, tweak formatting of the code
Python
bsd-3-clause
mozilla/make.mozilla.org,mozilla/make.mozilla.org,mozilla/make.mozilla.org,mozilla/make.mozilla.org
from django.contrib.gis.db import models from django.contrib.gis import geos from datetime import datetime class Venue(models.Model): name = models.CharField(max_length=255) street_address = models.TextField() country = models.CharField(max_length=255) location = models.PointField(blank=True) objects = models.GeoManager() def __init__(self, *args, **kwargs): super(Venue, self).__init__(*args, **kwargs) if self.location is None: self.location = geos.Point(0, 0) @property def latitude(self): return self.location.x @latitude.setter def latitude(self, value): self.location.x = value @property def longitude(self): return self.location.y @longitude.setter def longitude(self, value): self.location.y = value class Event(models.Model): - name = models.CharField(max_length=255) + name = models.CharField(max_length = 255) - event_url = models.CharField(max_length=255, blank = True) + event_url = models.CharField(max_length = 255, blank = True) venue = models.ForeignKey(Venue) start = models.DateTimeField(null = True, blank = True) end = models.DateTimeField(null = True, blank = True) - source_id = models.CharField(max_length=255) + source_id = models.CharField(max_length = 255, blank = True) - organiser_email = models.CharField(max_length=255) + organiser_email = models.CharField(max_length = 255) objects = models.GeoManager() @classmethod def upcoming(self): return self.objects.filter(start__gte = datetime.now())
Allow blank source_id in event, tweak formatting of the code
## Code Before: from django.contrib.gis.db import models from django.contrib.gis import geos from datetime import datetime class Venue(models.Model): name = models.CharField(max_length=255) street_address = models.TextField() country = models.CharField(max_length=255) location = models.PointField(blank=True) objects = models.GeoManager() def __init__(self, *args, **kwargs): super(Venue, self).__init__(*args, **kwargs) if self.location is None: self.location = geos.Point(0, 0) @property def latitude(self): return self.location.x @latitude.setter def latitude(self, value): self.location.x = value @property def longitude(self): return self.location.y @longitude.setter def longitude(self, value): self.location.y = value class Event(models.Model): name = models.CharField(max_length=255) event_url = models.CharField(max_length=255, blank = True) venue = models.ForeignKey(Venue) start = models.DateTimeField(null = True, blank = True) end = models.DateTimeField(null = True, blank = True) source_id = models.CharField(max_length=255) organiser_email = models.CharField(max_length=255) objects = models.GeoManager() @classmethod def upcoming(self): return self.objects.filter(start__gte = datetime.now()) ## Instruction: Allow blank source_id in event, tweak formatting of the code ## Code After: from django.contrib.gis.db import models from django.contrib.gis import geos from datetime import datetime class Venue(models.Model): name = models.CharField(max_length=255) street_address = models.TextField() country = models.CharField(max_length=255) location = models.PointField(blank=True) objects = models.GeoManager() def __init__(self, *args, **kwargs): super(Venue, self).__init__(*args, **kwargs) if self.location is None: self.location = geos.Point(0, 0) @property def latitude(self): return self.location.x @latitude.setter def latitude(self, value): self.location.x = value @property def longitude(self): return self.location.y @longitude.setter def longitude(self, value): self.location.y = value class Event(models.Model): name = models.CharField(max_length = 255) event_url = models.CharField(max_length = 255, blank = True) venue = models.ForeignKey(Venue) start = models.DateTimeField(null = True, blank = True) end = models.DateTimeField(null = True, blank = True) source_id = models.CharField(max_length = 255, blank = True) organiser_email = models.CharField(max_length = 255) objects = models.GeoManager() @classmethod def upcoming(self): return self.objects.filter(start__gte = datetime.now())
from django.contrib.gis.db import models from django.contrib.gis import geos from datetime import datetime class Venue(models.Model): name = models.CharField(max_length=255) street_address = models.TextField() country = models.CharField(max_length=255) location = models.PointField(blank=True) objects = models.GeoManager() def __init__(self, *args, **kwargs): super(Venue, self).__init__(*args, **kwargs) if self.location is None: self.location = geos.Point(0, 0) @property def latitude(self): return self.location.x @latitude.setter def latitude(self, value): self.location.x = value @property def longitude(self): return self.location.y @longitude.setter def longitude(self, value): self.location.y = value class Event(models.Model): - name = models.CharField(max_length=255) + name = models.CharField(max_length = 255) ? + + - event_url = models.CharField(max_length=255, blank = True) + event_url = models.CharField(max_length = 255, blank = True) ? + + venue = models.ForeignKey(Venue) start = models.DateTimeField(null = True, blank = True) end = models.DateTimeField(null = True, blank = True) - source_id = models.CharField(max_length=255) + source_id = models.CharField(max_length = 255, blank = True) ? + + ++++++++++++++ - organiser_email = models.CharField(max_length=255) + organiser_email = models.CharField(max_length = 255) ? + + objects = models.GeoManager() @classmethod def upcoming(self): return self.objects.filter(start__gte = datetime.now())
5b282d9322a676b4185fcd253f338a342ec5e5ce
.config/i3/py3status/playerctlbar.py
.config/i3/py3status/playerctlbar.py
import subprocess def run(*cmdlist): return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode() def player_args(players): if not players: return 'playerctl', else: return 'playerctl', '-p', players def get_status(players): status = run(*player_args(players), 'status')[:-1] if status in ('Playing', 'Paused'): return status return '' def get_info(players, fmt): args = 'metadata', '--format', f'{fmt}' return run(*player_args(players), *args).strip() class Py3status: players = '' format = '{{ artist }} / {{ title }}' def spotbar(self): text_format = "[[ {info} ]]|[ {status} ]" params = {'status': get_status(self.players)} if params['status'] == 'Playing': params['info'] = get_info(self.players, self.format) if params['info'] == '/ -': params['info'] = None return { 'full_text': self.py3.safe_format(text_format, params), 'cached_until': self.py3.time_in(seconds=1) } def on_click(self, event): if event['button'] == 1: run('playerctl', 'play-pause') if __name__ == '__main__': from py3status.module_test import module_test module_test(Py3status)
import subprocess def run(*cmdlist): return subprocess.run( cmdlist, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout.decode() def player_args(players): if not players: return 'playerctl', else: return 'playerctl', '-p', players def get_status(players): status = run(*player_args(players), 'status')[:-1] if status in ('Playing', 'Paused'): return status return '' def get_info(players, fmt): args = 'metadata', '--format', f'{fmt}' return run(*player_args(players), *args).strip() class Py3status: players = '' format = '{{ artist }} / {{ title }}' def spotbar(self): text_format = "[[ {info} ]]|[ {status} ]" params = {'status': get_status(self.players)} if params['status'] == 'Playing': params['info'] = get_info(self.players, self.format) if params['info'] == '/ -': params['info'] = None return { 'full_text': self.py3.safe_format(text_format, params), 'cached_until': self.py3.time_in(seconds=1) } def on_click(self, event): if event['button'] == 1: run('playerctl', 'play-pause') if __name__ == '__main__': from py3status.module_test import module_test module_test(Py3status)
Fix stderr from playerctl bar
Fix stderr from playerctl bar
Python
unlicense
louisswarren/dotfiles,louisswarren/dotfiles
import subprocess def run(*cmdlist): - return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode() + return subprocess.run( + cmdlist, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL).stdout.decode() def player_args(players): if not players: return 'playerctl', else: return 'playerctl', '-p', players def get_status(players): status = run(*player_args(players), 'status')[:-1] if status in ('Playing', 'Paused'): return status return '' def get_info(players, fmt): args = 'metadata', '--format', f'{fmt}' return run(*player_args(players), *args).strip() class Py3status: players = '' format = '{{ artist }} / {{ title }}' def spotbar(self): text_format = "[[ {info} ]]|[ {status} ]" params = {'status': get_status(self.players)} if params['status'] == 'Playing': params['info'] = get_info(self.players, self.format) if params['info'] == '/ -': params['info'] = None return { 'full_text': self.py3.safe_format(text_format, params), 'cached_until': self.py3.time_in(seconds=1) } def on_click(self, event): if event['button'] == 1: run('playerctl', 'play-pause') if __name__ == '__main__': from py3status.module_test import module_test module_test(Py3status)
Fix stderr from playerctl bar
## Code Before: import subprocess def run(*cmdlist): return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode() def player_args(players): if not players: return 'playerctl', else: return 'playerctl', '-p', players def get_status(players): status = run(*player_args(players), 'status')[:-1] if status in ('Playing', 'Paused'): return status return '' def get_info(players, fmt): args = 'metadata', '--format', f'{fmt}' return run(*player_args(players), *args).strip() class Py3status: players = '' format = '{{ artist }} / {{ title }}' def spotbar(self): text_format = "[[ {info} ]]|[ {status} ]" params = {'status': get_status(self.players)} if params['status'] == 'Playing': params['info'] = get_info(self.players, self.format) if params['info'] == '/ -': params['info'] = None return { 'full_text': self.py3.safe_format(text_format, params), 'cached_until': self.py3.time_in(seconds=1) } def on_click(self, event): if event['button'] == 1: run('playerctl', 'play-pause') if __name__ == '__main__': from py3status.module_test import module_test module_test(Py3status) ## Instruction: Fix stderr from playerctl bar ## Code After: import subprocess def run(*cmdlist): return subprocess.run( cmdlist, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL).stdout.decode() def player_args(players): if not players: return 'playerctl', else: return 'playerctl', '-p', players def get_status(players): status = run(*player_args(players), 'status')[:-1] if status in ('Playing', 'Paused'): return status return '' def get_info(players, fmt): args = 'metadata', '--format', f'{fmt}' return run(*player_args(players), *args).strip() class Py3status: players = '' format = '{{ artist }} / {{ title }}' def spotbar(self): text_format = "[[ {info} ]]|[ {status} ]" params = {'status': get_status(self.players)} if params['status'] == 'Playing': params['info'] = get_info(self.players, self.format) if params['info'] == '/ -': params['info'] = None return { 'full_text': self.py3.safe_format(text_format, params), 'cached_until': self.py3.time_in(seconds=1) } def on_click(self, event): if event['button'] == 1: run('playerctl', 'play-pause') if __name__ == '__main__': from py3status.module_test import module_test module_test(Py3status)
import subprocess def run(*cmdlist): - return subprocess.run(cmdlist, stdout=subprocess.PIPE).stdout.decode() + return subprocess.run( + cmdlist, + stdout=subprocess.PIPE, + stderr=subprocess.DEVNULL).stdout.decode() def player_args(players): if not players: return 'playerctl', else: return 'playerctl', '-p', players def get_status(players): status = run(*player_args(players), 'status')[:-1] if status in ('Playing', 'Paused'): return status return '' def get_info(players, fmt): args = 'metadata', '--format', f'{fmt}' return run(*player_args(players), *args).strip() class Py3status: players = '' format = '{{ artist }} / {{ title }}' def spotbar(self): text_format = "[[ {info} ]]|[ {status} ]" params = {'status': get_status(self.players)} if params['status'] == 'Playing': params['info'] = get_info(self.players, self.format) if params['info'] == '/ -': params['info'] = None return { 'full_text': self.py3.safe_format(text_format, params), 'cached_until': self.py3.time_in(seconds=1) } def on_click(self, event): if event['button'] == 1: run('playerctl', 'play-pause') if __name__ == '__main__': from py3status.module_test import module_test module_test(Py3status)
b2de891e75dc84e809b9c35222e6bc8fe44c3d37
test_arg.py
test_arg.py
from __future__ import print_function, division import unittest as ut import numpy as np import ARG.arg as arg class ARGTestCase(ut.TestCase): def test_truisms(self): """Test parameter class""" param = arg.ARGparams() self.assertTrue(isinstance(param.scale, float)) self.assertTrue(isinstance(param.rho, float)) self.assertTrue(isinstance(param.delta, float)) self.assertTrue(isinstance(param.beta(), float)) if __name__ == '__main__': ut.main()
from __future__ import print_function, division import unittest as ut import numpy as np import ARG.arg as arg class ARGTestCase(ut.TestCase): def test_param_class(self): """Test parameter class.""" param = arg.ARGparams() self.assertIsInstance(param.scale, float) self.assertIsInstance(param.rho, float) self.assertIsInstance(param.delta, float) self.assertIsInstance(param.beta(), float) def test_abc_functions(self): """Test functions a, b, c of ARG model.""" argmodel = arg.ARG() uarg = np.linspace(-50, 100, 100) self.assertIsInstance(argmodel.afun(uarg), np.ndarray) self.assertIsInstance(argmodel.bfun(uarg), np.ndarray) self.assertIsInstance(argmodel.cfun(uarg), np.ndarray) if __name__ == '__main__': ut.main()
Test functions a, b, c
Test functions a, b, c
Python
mit
khrapovs/argamma
from __future__ import print_function, division import unittest as ut import numpy as np import ARG.arg as arg class ARGTestCase(ut.TestCase): - def test_truisms(self): + def test_param_class(self): - """Test parameter class""" + """Test parameter class.""" + param = arg.ARGparams() - self.assertTrue(isinstance(param.scale, float)) + self.assertIsInstance(param.scale, float) - self.assertTrue(isinstance(param.rho, float)) + self.assertIsInstance(param.rho, float) - self.assertTrue(isinstance(param.delta, float)) + self.assertIsInstance(param.delta, float) - self.assertTrue(isinstance(param.beta(), float)) + self.assertIsInstance(param.beta(), float) + + def test_abc_functions(self): + """Test functions a, b, c of ARG model.""" + + argmodel = arg.ARG() + uarg = np.linspace(-50, 100, 100) + self.assertIsInstance(argmodel.afun(uarg), np.ndarray) + self.assertIsInstance(argmodel.bfun(uarg), np.ndarray) + self.assertIsInstance(argmodel.cfun(uarg), np.ndarray) if __name__ == '__main__': ut.main()
Test functions a, b, c
## Code Before: from __future__ import print_function, division import unittest as ut import numpy as np import ARG.arg as arg class ARGTestCase(ut.TestCase): def test_truisms(self): """Test parameter class""" param = arg.ARGparams() self.assertTrue(isinstance(param.scale, float)) self.assertTrue(isinstance(param.rho, float)) self.assertTrue(isinstance(param.delta, float)) self.assertTrue(isinstance(param.beta(), float)) if __name__ == '__main__': ut.main() ## Instruction: Test functions a, b, c ## Code After: from __future__ import print_function, division import unittest as ut import numpy as np import ARG.arg as arg class ARGTestCase(ut.TestCase): def test_param_class(self): """Test parameter class.""" param = arg.ARGparams() self.assertIsInstance(param.scale, float) self.assertIsInstance(param.rho, float) self.assertIsInstance(param.delta, float) self.assertIsInstance(param.beta(), float) def test_abc_functions(self): """Test functions a, b, c of ARG model.""" argmodel = arg.ARG() uarg = np.linspace(-50, 100, 100) self.assertIsInstance(argmodel.afun(uarg), np.ndarray) self.assertIsInstance(argmodel.bfun(uarg), np.ndarray) self.assertIsInstance(argmodel.cfun(uarg), np.ndarray) if __name__ == '__main__': ut.main()
from __future__ import print_function, division import unittest as ut import numpy as np import ARG.arg as arg class ARGTestCase(ut.TestCase): - def test_truisms(self): ? ^ ^^ - + def test_param_class(self): ? ^^ ^^^^^^ - """Test parameter class""" + """Test parameter class.""" ? + + param = arg.ARGparams() - self.assertTrue(isinstance(param.scale, float)) ? ^^^^^^ ^ - + self.assertIsInstance(param.scale, float) ? ^ ^ - self.assertTrue(isinstance(param.rho, float)) ? ^^^^^^ ^ - + self.assertIsInstance(param.rho, float) ? ^ ^ - self.assertTrue(isinstance(param.delta, float)) ? ^^^^^^ ^ - + self.assertIsInstance(param.delta, float) ? ^ ^ - self.assertTrue(isinstance(param.beta(), float)) ? ^^^^^^ ^ - + self.assertIsInstance(param.beta(), float) ? ^ ^ + + def test_abc_functions(self): + """Test functions a, b, c of ARG model.""" + + argmodel = arg.ARG() + uarg = np.linspace(-50, 100, 100) + self.assertIsInstance(argmodel.afun(uarg), np.ndarray) + self.assertIsInstance(argmodel.bfun(uarg), np.ndarray) + self.assertIsInstance(argmodel.cfun(uarg), np.ndarray) if __name__ == '__main__': ut.main()
8dc7035d10f648489bbdfd3087a65f0355e1a72c
tests/test_mapping.py
tests/test_mapping.py
from unittest import TestCase from prudent.mapping import Mapping class MappingTest(TestCase): def setUp(self): self.mapping = Mapping([(1, 2), (2, 3), (3, 4)]) def test_iter(self): keys = [1, 2, 3] for _ in range(2): assert list(self.mapping) == keys def test_contains(self): assert 1 in self.mapping assert 1 in self.mapping assert 3 in self.mapping def test_getitem(self): assert self.mapping[1] == 2 assert self.mapping[3] == 4 assert self.mapping[2] == 3 def test_len(self): assert len(self.mapping) == 0 self.mapping[3] assert len(self.mapping) == 3
from unittest import TestCase from prudent.mapping import Mapping class MappingTest(TestCase): def setUp(self): self.mapping = Mapping([(1, 2), (2, 3), (3, 4)]) def test_iter_preserves_keys(self): keys = [1, 2, 3] for _ in range(2): assert list(self.mapping) == keys def test_contains(self): assert 1 in self.mapping assert 1 in self.mapping assert 3 in self.mapping def test_getitem(self): assert self.mapping[1] == 2 assert self.mapping[3] == 4 assert self.mapping[2] == 3 def test_len(self): assert len(self.mapping) == 0 self.mapping[3] assert len(self.mapping) == 3
Use a more descriptive test case name
Use a more descriptive test case name
Python
mit
eugene-eeo/prudent
from unittest import TestCase from prudent.mapping import Mapping class MappingTest(TestCase): def setUp(self): self.mapping = Mapping([(1, 2), (2, 3), (3, 4)]) - def test_iter(self): + def test_iter_preserves_keys(self): keys = [1, 2, 3] for _ in range(2): assert list(self.mapping) == keys def test_contains(self): assert 1 in self.mapping assert 1 in self.mapping assert 3 in self.mapping def test_getitem(self): assert self.mapping[1] == 2 assert self.mapping[3] == 4 assert self.mapping[2] == 3 def test_len(self): assert len(self.mapping) == 0 self.mapping[3] assert len(self.mapping) == 3
Use a more descriptive test case name
## Code Before: from unittest import TestCase from prudent.mapping import Mapping class MappingTest(TestCase): def setUp(self): self.mapping = Mapping([(1, 2), (2, 3), (3, 4)]) def test_iter(self): keys = [1, 2, 3] for _ in range(2): assert list(self.mapping) == keys def test_contains(self): assert 1 in self.mapping assert 1 in self.mapping assert 3 in self.mapping def test_getitem(self): assert self.mapping[1] == 2 assert self.mapping[3] == 4 assert self.mapping[2] == 3 def test_len(self): assert len(self.mapping) == 0 self.mapping[3] assert len(self.mapping) == 3 ## Instruction: Use a more descriptive test case name ## Code After: from unittest import TestCase from prudent.mapping import Mapping class MappingTest(TestCase): def setUp(self): self.mapping = Mapping([(1, 2), (2, 3), (3, 4)]) def test_iter_preserves_keys(self): keys = [1, 2, 3] for _ in range(2): assert list(self.mapping) == keys def test_contains(self): assert 1 in self.mapping assert 1 in self.mapping assert 3 in self.mapping def test_getitem(self): assert self.mapping[1] == 2 assert self.mapping[3] == 4 assert self.mapping[2] == 3 def test_len(self): assert len(self.mapping) == 0 self.mapping[3] assert len(self.mapping) == 3
from unittest import TestCase from prudent.mapping import Mapping class MappingTest(TestCase): def setUp(self): self.mapping = Mapping([(1, 2), (2, 3), (3, 4)]) - def test_iter(self): + def test_iter_preserves_keys(self): ? +++++++++++++++ keys = [1, 2, 3] for _ in range(2): assert list(self.mapping) == keys def test_contains(self): assert 1 in self.mapping assert 1 in self.mapping assert 3 in self.mapping def test_getitem(self): assert self.mapping[1] == 2 assert self.mapping[3] == 4 assert self.mapping[2] == 3 def test_len(self): assert len(self.mapping) == 0 self.mapping[3] assert len(self.mapping) == 3
377fa94c2963a9c2522164ff374431dbe836217e
indra/sources/rlimsp/api.py
indra/sources/rlimsp/api.py
__all__ = ['process_pmc'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc' class RLIMSP_Error(Exception): pass def process_pmc(pmcid, with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- pmcid : str A PMCID, with the prefix PMC, of the paper to be "read". with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid) else: resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
__all__ = ['process_from_webservice'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/' class RLIMSP_Error(Exception): pass def process_from_webservice(id_val, id_type='pmcid', source='pmc', with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- id_val : str A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to be "read". id_type : str Either 'pmid' or 'pmcid'. The default is 'pmcid'. source : str Either 'pmc' or 'medline', whether you want pmc fulltext or medline abstracts. with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: fmt = '%s.normed/%s/%s' else: fmt = '%s/%s/%s' resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val)) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
Add capability to read pmids and get medline.
Add capability to read pmids and get medline.
Python
bsd-2-clause
sorgerlab/belpy,bgyori/indra,johnbachman/belpy,johnbachman/indra,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra,pvtodorov/indra,pvtodorov/indra,johnbachman/indra,johnbachman/belpy,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra
- __all__ = ['process_pmc'] + __all__ = ['process_from_webservice'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) - RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc' + RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/' class RLIMSP_Error(Exception): pass - def process_pmc(pmcid, with_grounding=True): + def process_from_webservice(id_val, id_type='pmcid', source='pmc', + with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- - pmcid : str + id_val : str - A PMCID, with the prefix PMC, of the paper to be "read". + A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to + be "read". + id_type : str + Either 'pmid' or 'pmcid'. The default is 'pmcid'. + source : str + Either 'pmc' or 'medline', whether you want pmc fulltext or medline + abstracts. with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: - resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid) + fmt = '%s.normed/%s/%s' else: - resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid) + fmt = '%s/%s/%s' + + resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val)) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
Add capability to read pmids and get medline.
## Code Before: __all__ = ['process_pmc'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc' class RLIMSP_Error(Exception): pass def process_pmc(pmcid, with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- pmcid : str A PMCID, with the prefix PMC, of the paper to be "read". with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid) else: resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp ## Instruction: Add capability to read pmids and get medline. ## Code After: __all__ = ['process_from_webservice'] import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/' class RLIMSP_Error(Exception): pass def process_from_webservice(id_val, id_type='pmcid', source='pmc', with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- id_val : str A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to be "read". id_type : str Either 'pmid' or 'pmcid'. The default is 'pmcid'. source : str Either 'pmc' or 'medline', whether you want pmc fulltext or medline abstracts. with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: fmt = '%s.normed/%s/%s' else: fmt = '%s/%s/%s' resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val)) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
- __all__ = ['process_pmc'] ? ^ + __all__ = ['process_from_webservice'] ? ^^^ +++++++++ + import logging import requests from .processor import RlimspProcessor logger = logging.getLogger(__name__) - RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/pmc' ? --- + RLIMSP_URL = 'https://research.bioinformatics.udel.edu/itextmine/api/data/rlims/' class RLIMSP_Error(Exception): pass - def process_pmc(pmcid, with_grounding=True): + def process_from_webservice(id_val, id_type='pmcid', source='pmc', + with_grounding=True): """Get an output from RLIMS-p for the given pmic id. Parameters ---------- - pmcid : str ? --- + id_val : str ? ++++ - A PMCID, with the prefix PMC, of the paper to be "read". + A PMCID, with the prefix PMC, or pmid, with no prefix, of the paper to + be "read". + id_type : str + Either 'pmid' or 'pmcid'. The default is 'pmcid'. + source : str + Either 'pmc' or 'medline', whether you want pmc fulltext or medline + abstracts. with_grounding : bool The RLIMS-P web service provides two endpoints, one pre-grounded, the other not so much. The grounded endpoint returns far less content, and may perform some grounding that can be handled by the grounding mapper. """ if with_grounding: - resp = requests.get(RLIMSP_URL + '.normed/pmcid/%s' % pmcid) + fmt = '%s.normed/%s/%s' else: - resp = requests.get(RLIMSP_URL + '/pmcid/%s' % pmcid) + fmt = '%s/%s/%s' + + resp = requests.get(RLIMSP_URL + fmt % (source, id_type, id_val)) if resp.status_code != 200: raise RLIMSP_Error("Bad status code: %d - %s" % (resp.status_code, resp.reason)) rp = RlimspProcessor(resp.json()) return rp
f7da89f1a2a24414778b9b53df77cdac3285a4a7
API/chat/migrations/0001_squashed_0002_auto_20150707_1647.py
API/chat/migrations/0001_squashed_0002_auto_20150707_1647.py
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): replaces = [(b'chat', '0001_squashed_0008_auto_20150702_1437'), (b'chat', '0002_auto_20150707_1647')] dependencies = [ ] operations = [ migrations.CreateModel( name='Channel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.TextField(max_length=2000)), ('datetime', models.DateTimeField()), ('channel', models.ForeignKey(to='chat.Channel')), ('username', models.CharField(max_length=20)), ], ), ]
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Channel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.TextField(max_length=2000)), ('datetime', models.DateTimeField()), ('channel', models.ForeignKey(to='chat.Channel')), ('username', models.CharField(max_length=20)), ], ), ]
Revert "Revert "[HOTFIX] Remove replaces line on 0001_squashed""
Revert "Revert "[HOTFIX] Remove replaces line on 0001_squashed""
Python
mit
VitSalis/ting,dionyziz/ting,mbalamat/ting,odyvarv/ting-1,VitSalis/ting,sirodoht/ting,odyvarv/ting-1,sirodoht/ting,VitSalis/ting,sirodoht/ting,gtklocker/ting,dionyziz/ting,odyvarv/ting-1,odyvarv/ting-1,mbalamat/ting,mbalamat/ting,gtklocker/ting,gtklocker/ting,gtklocker/ting,sirodoht/ting,dionyziz/ting,dionyziz/ting,mbalamat/ting,VitSalis/ting
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): - - replaces = [(b'chat', '0001_squashed_0008_auto_20150702_1437'), (b'chat', '0002_auto_20150707_1647')] dependencies = [ ] operations = [ migrations.CreateModel( name='Channel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.TextField(max_length=2000)), ('datetime', models.DateTimeField()), ('channel', models.ForeignKey(to='chat.Channel')), ('username', models.CharField(max_length=20)), ], ), ]
Revert "Revert "[HOTFIX] Remove replaces line on 0001_squashed""
## Code Before: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): replaces = [(b'chat', '0001_squashed_0008_auto_20150702_1437'), (b'chat', '0002_auto_20150707_1647')] dependencies = [ ] operations = [ migrations.CreateModel( name='Channel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.TextField(max_length=2000)), ('datetime', models.DateTimeField()), ('channel', models.ForeignKey(to='chat.Channel')), ('username', models.CharField(max_length=20)), ], ), ] ## Instruction: Revert "Revert "[HOTFIX] Remove replaces line on 0001_squashed"" ## Code After: from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ] operations = [ migrations.CreateModel( name='Channel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.TextField(max_length=2000)), ('datetime', models.DateTimeField()), ('channel', models.ForeignKey(to='chat.Channel')), ('username', models.CharField(max_length=20)), ], ), ]
from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): - - replaces = [(b'chat', '0001_squashed_0008_auto_20150702_1437'), (b'chat', '0002_auto_20150707_1647')] dependencies = [ ] operations = [ migrations.CreateModel( name='Channel', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('name', models.CharField(max_length=20)), ], ), migrations.CreateModel( name='Message', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('text', models.TextField(max_length=2000)), ('datetime', models.DateTimeField()), ('channel', models.ForeignKey(to='chat.Channel')), ('username', models.CharField(max_length=20)), ], ), ]
865651b0d23274d0dcbd9e3123ea9497a06172cf
docker_scripts/lib/common.py
docker_scripts/lib/common.py
import docker import os import sys import requests DEFAULT_TIMEOUT_SECONDS = 600 def docker_client(): # Default timeout 10 minutes try: timeout = int(os.getenv('DOCKER_TIMEOUT', 600)) except ValueError as e: print("Provided timeout value: %s cannot be parsed as integer, exiting." % os.getenv('DOCKER_TIMEOUT')) sys.exit(1) if not timeout > 0: print( "Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout) sys.exit(1) # Default base url for the connection base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock') try: client = docker.Client(base_url=base_url, timeout=timeout) except docker.errors.DockerException as e: print("Error while creating the Docker client: %s" % e) print( "Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) if client and valid_docker_connection(client): return client else: print( "Could not connect to the Docker daemon, please make sure the Docker daemon is running.") if os.environ.get('DOCKER_CONNECTION'): print( "If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) def valid_docker_connection(client): try: return client.ping() except requests.exceptions.ConnectionError: return False
import docker import os import sys import requests DEFAULT_TIMEOUT_SECONDS = 600 def docker_client(): # Default timeout 10 minutes try: timeout = int(os.getenv('DOCKER_TIMEOUT', 600)) except ValueError as e: print("Provided timeout value: %s cannot be parsed as integer, exiting." % os.getenv('DOCKER_TIMEOUT')) sys.exit(1) if not timeout > 0: print( "Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout) sys.exit(1) # Default base url for the connection base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock') try: client = docker.AutoVersionClient(base_url=base_url, timeout=timeout) except docker.errors.DockerException as e: print("Error while creating the Docker client: %s" % e) print( "Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) if client and valid_docker_connection(client): return client else: print( "Could not connect to the Docker daemon, please make sure the Docker daemon is running.") if os.environ.get('DOCKER_CONNECTION'): print( "If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) def valid_docker_connection(client): try: return client.ping() except requests.exceptions.ConnectionError: return False
Use AutoVersionClient to fix client incompatibity issues
Use AutoVersionClient to fix client incompatibity issues Fixes #35
Python
mit
lichia/docker-scripts,jpopelka/docker-scripts,goldmann/docker-scripts,goldmann/docker-squash,TomasTomecek/docker-scripts
import docker import os import sys import requests DEFAULT_TIMEOUT_SECONDS = 600 def docker_client(): # Default timeout 10 minutes try: timeout = int(os.getenv('DOCKER_TIMEOUT', 600)) except ValueError as e: print("Provided timeout value: %s cannot be parsed as integer, exiting." % os.getenv('DOCKER_TIMEOUT')) sys.exit(1) if not timeout > 0: print( "Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout) sys.exit(1) # Default base url for the connection base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock') try: - client = docker.Client(base_url=base_url, timeout=timeout) + client = docker.AutoVersionClient(base_url=base_url, timeout=timeout) except docker.errors.DockerException as e: print("Error while creating the Docker client: %s" % e) print( "Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) if client and valid_docker_connection(client): return client else: print( "Could not connect to the Docker daemon, please make sure the Docker daemon is running.") if os.environ.get('DOCKER_CONNECTION'): print( "If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) def valid_docker_connection(client): try: return client.ping() except requests.exceptions.ConnectionError: return False
Use AutoVersionClient to fix client incompatibity issues
## Code Before: import docker import os import sys import requests DEFAULT_TIMEOUT_SECONDS = 600 def docker_client(): # Default timeout 10 minutes try: timeout = int(os.getenv('DOCKER_TIMEOUT', 600)) except ValueError as e: print("Provided timeout value: %s cannot be parsed as integer, exiting." % os.getenv('DOCKER_TIMEOUT')) sys.exit(1) if not timeout > 0: print( "Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout) sys.exit(1) # Default base url for the connection base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock') try: client = docker.Client(base_url=base_url, timeout=timeout) except docker.errors.DockerException as e: print("Error while creating the Docker client: %s" % e) print( "Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) if client and valid_docker_connection(client): return client else: print( "Could not connect to the Docker daemon, please make sure the Docker daemon is running.") if os.environ.get('DOCKER_CONNECTION'): print( "If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) def valid_docker_connection(client): try: return client.ping() except requests.exceptions.ConnectionError: return False ## Instruction: Use AutoVersionClient to fix client incompatibity issues ## Code After: import docker import os import sys import requests DEFAULT_TIMEOUT_SECONDS = 600 def docker_client(): # Default timeout 10 minutes try: timeout = int(os.getenv('DOCKER_TIMEOUT', 600)) except ValueError as e: print("Provided timeout value: %s cannot be parsed as integer, exiting." % os.getenv('DOCKER_TIMEOUT')) sys.exit(1) if not timeout > 0: print( "Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout) sys.exit(1) # Default base url for the connection base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock') try: client = docker.AutoVersionClient(base_url=base_url, timeout=timeout) except docker.errors.DockerException as e: print("Error while creating the Docker client: %s" % e) print( "Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) if client and valid_docker_connection(client): return client else: print( "Could not connect to the Docker daemon, please make sure the Docker daemon is running.") if os.environ.get('DOCKER_CONNECTION'): print( "If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) def valid_docker_connection(client): try: return client.ping() except requests.exceptions.ConnectionError: return False
import docker import os import sys import requests DEFAULT_TIMEOUT_SECONDS = 600 def docker_client(): # Default timeout 10 minutes try: timeout = int(os.getenv('DOCKER_TIMEOUT', 600)) except ValueError as e: print("Provided timeout value: %s cannot be parsed as integer, exiting." % os.getenv('DOCKER_TIMEOUT')) sys.exit(1) if not timeout > 0: print( "Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout) sys.exit(1) # Default base url for the connection base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock') try: - client = docker.Client(base_url=base_url, timeout=timeout) + client = docker.AutoVersionClient(base_url=base_url, timeout=timeout) ? +++++++++++ except docker.errors.DockerException as e: print("Error while creating the Docker client: %s" % e) print( "Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) if client and valid_docker_connection(client): return client else: print( "Could not connect to the Docker daemon, please make sure the Docker daemon is running.") if os.environ.get('DOCKER_CONNECTION'): print( "If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.") sys.exit(1) def valid_docker_connection(client): try: return client.ping() except requests.exceptions.ConnectionError: return False
b1a28600e6b97ab020c69ff410aebd962b4e1e93
testproject/tablib_test/tests.py
testproject/tablib_test/tests.py
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1])
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
Test that specifying fields and exclude in ModelDataset.Meta works.
Test that specifying fields and exclude in ModelDataset.Meta works.
Python
mit
joshourisman/django-tablib,ebrelsford/django-tablib,joshourisman/django-tablib,ebrelsford/django-tablib
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) + def test_meta_fields(self): + class TestModelDataset(ModelDataset): + class Meta: + model = TestModel + fields = ['field1'] + + data = TestModelDataset() + + self.assertEqual(len(data.headers), 1) + self.assertFalse('id' in data.headers) + self.assertTrue('field1' in data.headers) + + def test_meta_exclude(self): + class TestModelDataset(ModelDataset): + class Meta: + model = TestModel + exclude = ['id'] + + data = TestModelDataset() + + self.assertEqual(len(data.headers), 1) + self.assertFalse('id' in data.headers) + self.assertTrue('field1' in data.headers) + + def test_meta_both(self): + class TestModelDataset(ModelDataset): + class Meta: + model = TestModel + fields = ['id', 'field1'] + exclude = ['id'] + + data = TestModelDataset() + + self.assertEqual(len(data.headers), 1) + self.assertFalse('id' in data.headers) + self.assertTrue('field1' in data.headers) +
Test that specifying fields and exclude in ModelDataset.Meta works.
## Code Before: from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) ## Instruction: Test that specifying fields and exclude in ModelDataset.Meta works. ## Code After: from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) def test_meta_fields(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['field1'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_exclude(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers) def test_meta_both(self): class TestModelDataset(ModelDataset): class Meta: model = TestModel fields = ['id', 'field1'] exclude = ['id'] data = TestModelDataset() self.assertEqual(len(data.headers), 1) self.assertFalse('id' in data.headers) self.assertTrue('field1' in data.headers)
from django.test import TestCase from django_tablib import ModelDataset, Field from .models import TestModel class DjangoTablibTestCase(TestCase): def setUp(self): TestModel.objects.create(field1='value') def test_declarative_fields(self): class TestModelDataset(ModelDataset): field1 = Field(header='Field 1') field2 = Field(attribute='field1') class Meta: model = TestModel data = TestModelDataset() self.assertEqual(len(data.headers), 3) self.assertTrue('id' in data.headers) self.assertFalse('field1' in data.headers) self.assertTrue('field2' in data.headers) self.assertTrue('Field 1' in data.headers) self.assertEqual(data[0][0], data[0][1]) + + def test_meta_fields(self): + class TestModelDataset(ModelDataset): + class Meta: + model = TestModel + fields = ['field1'] + + data = TestModelDataset() + + self.assertEqual(len(data.headers), 1) + self.assertFalse('id' in data.headers) + self.assertTrue('field1' in data.headers) + + def test_meta_exclude(self): + class TestModelDataset(ModelDataset): + class Meta: + model = TestModel + exclude = ['id'] + + data = TestModelDataset() + + self.assertEqual(len(data.headers), 1) + self.assertFalse('id' in data.headers) + self.assertTrue('field1' in data.headers) + + def test_meta_both(self): + class TestModelDataset(ModelDataset): + class Meta: + model = TestModel + fields = ['id', 'field1'] + exclude = ['id'] + + data = TestModelDataset() + + self.assertEqual(len(data.headers), 1) + self.assertFalse('id' in data.headers) + self.assertTrue('field1' in data.headers)
40c97fa33c8739bd27b03891782b542217534904
ognskylines/commands/database.py
ognskylines/commands/database.py
from ognskylines.dbutils import engine from ognskylines.model import Base from manager import Manager manager = Manager() @manager.command def init(): """Initialize the database.""" Base.metadata.create_all(engine) print('Done.') @manager.command def drop(sure=0): """Drop all tables.""" if sure: Base.metadata.drop_all(engine) print('Dropped all tables.') else:
from ognskylines.dbutils import engine from ognskylines.model import Base from manager import Manager manager = Manager() @manager.command def init(): """Initialize the database.""" Base.metadata.create_all(engine) print('Done.') @manager.command def drop(sure='n'): """Drop all tables.""" if sure == 'y': Base.metadata.drop_all(engine) print('Dropped all tables.') else: print("Add argument '--sure y' to drop all tables.")
Change confirmation flag to '--sure y'
CLI: Change confirmation flag to '--sure y'
Python
agpl-3.0
kerel-fs/ogn-skylines-gateway,kerel-fs/ogn-skylines-gateway
from ognskylines.dbutils import engine from ognskylines.model import Base from manager import Manager manager = Manager() @manager.command def init(): """Initialize the database.""" Base.metadata.create_all(engine) print('Done.') @manager.command - def drop(sure=0): + def drop(sure='n'): """Drop all tables.""" - if sure: + if sure == 'y': Base.metadata.drop_all(engine) print('Dropped all tables.') else: + print("Add argument '--sure y' to drop all tables.")
Change confirmation flag to '--sure y'
## Code Before: from ognskylines.dbutils import engine from ognskylines.model import Base from manager import Manager manager = Manager() @manager.command def init(): """Initialize the database.""" Base.metadata.create_all(engine) print('Done.') @manager.command def drop(sure=0): """Drop all tables.""" if sure: Base.metadata.drop_all(engine) print('Dropped all tables.') else: ## Instruction: Change confirmation flag to '--sure y' ## Code After: from ognskylines.dbutils import engine from ognskylines.model import Base from manager import Manager manager = Manager() @manager.command def init(): """Initialize the database.""" Base.metadata.create_all(engine) print('Done.') @manager.command def drop(sure='n'): """Drop all tables.""" if sure == 'y': Base.metadata.drop_all(engine) print('Dropped all tables.') else: print("Add argument '--sure y' to drop all tables.")
from ognskylines.dbutils import engine from ognskylines.model import Base from manager import Manager manager = Manager() @manager.command def init(): """Initialize the database.""" Base.metadata.create_all(engine) print('Done.') @manager.command - def drop(sure=0): ? ^ + def drop(sure='n'): ? ^^^ """Drop all tables.""" - if sure: + if sure == 'y': ? +++++++ Base.metadata.drop_all(engine) print('Dropped all tables.') else: + print("Add argument '--sure y' to drop all tables.")
cd5c56583c84b2b0fd05d743578193b7b681151c
nn/embedding/embeddings.py
nn/embedding/embeddings.py
import tensorflow as tf from ..flags import FLAGS from ..variable import variable def embeddings(*, id_space_size, embedding_size, name=None): return variable([id_space_size, embedding_size], name=name) def word_embeddings(name="word_embeddings"): if FLAGS.word_embeddings is None: return embeddings(id_space_size=FLAGS.word_space_size, embedding_size=FLAGS.word_embedding_size, name=name) return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type), name=name)
import tensorflow as tf from ..flags import FLAGS from ..variable import variable from ..util import func_scope @func_scope() def embeddings(*, id_space_size, embedding_size, name=None): return variable([id_space_size, embedding_size], name=name) @func_scope() def word_embeddings(name="word_embeddings"): if FLAGS.word_embeddings is None: return embeddings(id_space_size=FLAGS.word_space_size, embedding_size=FLAGS.word_embedding_size, name=name) return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type), name=name)
Add func_scope to embedding functions
Add func_scope to embedding functions
Python
unlicense
raviqqe/tensorflow-extenteten,raviqqe/tensorflow-extenteten
import tensorflow as tf from ..flags import FLAGS from ..variable import variable + from ..util import func_scope + @func_scope() def embeddings(*, id_space_size, embedding_size, name=None): return variable([id_space_size, embedding_size], name=name) + @func_scope() def word_embeddings(name="word_embeddings"): if FLAGS.word_embeddings is None: return embeddings(id_space_size=FLAGS.word_space_size, embedding_size=FLAGS.word_embedding_size, name=name) return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type), name=name)
Add func_scope to embedding functions
## Code Before: import tensorflow as tf from ..flags import FLAGS from ..variable import variable def embeddings(*, id_space_size, embedding_size, name=None): return variable([id_space_size, embedding_size], name=name) def word_embeddings(name="word_embeddings"): if FLAGS.word_embeddings is None: return embeddings(id_space_size=FLAGS.word_space_size, embedding_size=FLAGS.word_embedding_size, name=name) return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type), name=name) ## Instruction: Add func_scope to embedding functions ## Code After: import tensorflow as tf from ..flags import FLAGS from ..variable import variable from ..util import func_scope @func_scope() def embeddings(*, id_space_size, embedding_size, name=None): return variable([id_space_size, embedding_size], name=name) @func_scope() def word_embeddings(name="word_embeddings"): if FLAGS.word_embeddings is None: return embeddings(id_space_size=FLAGS.word_space_size, embedding_size=FLAGS.word_embedding_size, name=name) return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type), name=name)
import tensorflow as tf from ..flags import FLAGS from ..variable import variable + from ..util import func_scope + @func_scope() def embeddings(*, id_space_size, embedding_size, name=None): return variable([id_space_size, embedding_size], name=name) + @func_scope() def word_embeddings(name="word_embeddings"): if FLAGS.word_embeddings is None: return embeddings(id_space_size=FLAGS.word_space_size, embedding_size=FLAGS.word_embedding_size, name=name) return tf.Variable(tf.cast(FLAGS.word_embeddings, FLAGS.float_type), name=name)
d47ba3167b60710efe07e40113150b53c88e7d85
tests/test_highlighter.py
tests/test_highlighter.py
import pytest from rich.highlighter import NullHighlighter def test_wrong_type(): highlighter = NullHighlighter() with pytest.raises(TypeError): highlighter([])
"""Tests for the higlighter classes.""" import pytest from rich.highlighter import NullHighlighter, ReprHighlighter from rich.text import Span, Text def test_wrong_type(): highlighter = NullHighlighter() with pytest.raises(TypeError): highlighter([]) @pytest.mark.parametrize( "style_name, test_str", [ ("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen ("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen ("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon ("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon ("repr.eui48", "0123.4567.89AB"), # 3x4 dot ("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot ("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase ("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase ("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case ("repr.eui48", "0-00-1-01-2-02"), # dropped zero ], ) def test_highlight_regex(style_name: str, test_str: str): """Tests for the regular expressions used in ReprHighlighter.""" text = Text(test_str) highlighter = ReprHighlighter() highlighter.highlight(text) assert text._spans[-1] == Span(0, len(test_str), style_name)
Add tests for EUI-48 and EUI-64 in ReprHighlighter
Add tests for EUI-48 and EUI-64 in ReprHighlighter
Python
mit
willmcgugan/rich
+ """Tests for the higlighter classes.""" import pytest + - from rich.highlighter import NullHighlighter + from rich.highlighter import NullHighlighter, ReprHighlighter + from rich.text import Span, Text def test_wrong_type(): highlighter = NullHighlighter() with pytest.raises(TypeError): highlighter([]) + + @pytest.mark.parametrize( + "style_name, test_str", + [ + ("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen + ("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen + ("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon + ("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon + ("repr.eui48", "0123.4567.89AB"), # 3x4 dot + ("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot + ("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase + ("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase + ("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case + ("repr.eui48", "0-00-1-01-2-02"), # dropped zero + ], + ) + def test_highlight_regex(style_name: str, test_str: str): + """Tests for the regular expressions used in ReprHighlighter.""" + text = Text(test_str) + highlighter = ReprHighlighter() + highlighter.highlight(text) + assert text._spans[-1] == Span(0, len(test_str), style_name) +
Add tests for EUI-48 and EUI-64 in ReprHighlighter
## Code Before: import pytest from rich.highlighter import NullHighlighter def test_wrong_type(): highlighter = NullHighlighter() with pytest.raises(TypeError): highlighter([]) ## Instruction: Add tests for EUI-48 and EUI-64 in ReprHighlighter ## Code After: """Tests for the higlighter classes.""" import pytest from rich.highlighter import NullHighlighter, ReprHighlighter from rich.text import Span, Text def test_wrong_type(): highlighter = NullHighlighter() with pytest.raises(TypeError): highlighter([]) @pytest.mark.parametrize( "style_name, test_str", [ ("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen ("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen ("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon ("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon ("repr.eui48", "0123.4567.89AB"), # 3x4 dot ("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot ("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase ("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase ("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case ("repr.eui48", "0-00-1-01-2-02"), # dropped zero ], ) def test_highlight_regex(style_name: str, test_str: str): """Tests for the regular expressions used in ReprHighlighter.""" text = Text(test_str) highlighter = ReprHighlighter() highlighter.highlight(text) assert text._spans[-1] == Span(0, len(test_str), style_name)
+ """Tests for the higlighter classes.""" import pytest + - from rich.highlighter import NullHighlighter + from rich.highlighter import NullHighlighter, ReprHighlighter ? +++++++++++++++++ + from rich.text import Span, Text def test_wrong_type(): highlighter = NullHighlighter() with pytest.raises(TypeError): highlighter([]) + + + @pytest.mark.parametrize( + "style_name, test_str", + [ + ("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen + ("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen + ("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon + ("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon + ("repr.eui48", "0123.4567.89AB"), # 3x4 dot + ("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot + ("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase + ("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase + ("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case + ("repr.eui48", "0-00-1-01-2-02"), # dropped zero + ], + ) + def test_highlight_regex(style_name: str, test_str: str): + """Tests for the regular expressions used in ReprHighlighter.""" + text = Text(test_str) + highlighter = ReprHighlighter() + highlighter.highlight(text) + assert text._spans[-1] == Span(0, len(test_str), style_name)
d95f2059a753855d373332df0b748d52bba0210d
main.py
main.py
import menus import auth import click def main(): """Main function""" credentials = auth.authenticate_user() if credentials: menus.main_menu(credentials) else: click.echo("Bye!") if __name__ == '__main__': main()
import menus import auth import click def main(): """Main function""" credentials = auth.authenticate_user() if credentials: menus.main_menu(credentials) click.echo("Bye!") if __name__ == '__main__': main()
Fix to ensure program echos "bye" whenever program is quit
Fix to ensure program echos "bye" whenever program is quit
Python
mit
amrishparmar/mal_cl_interface
import menus import auth import click def main(): """Main function""" credentials = auth.authenticate_user() if credentials: menus.main_menu(credentials) - else: + - click.echo("Bye!") + click.echo("Bye!") if __name__ == '__main__': main()
Fix to ensure program echos "bye" whenever program is quit
## Code Before: import menus import auth import click def main(): """Main function""" credentials = auth.authenticate_user() if credentials: menus.main_menu(credentials) else: click.echo("Bye!") if __name__ == '__main__': main() ## Instruction: Fix to ensure program echos "bye" whenever program is quit ## Code After: import menus import auth import click def main(): """Main function""" credentials = auth.authenticate_user() if credentials: menus.main_menu(credentials) click.echo("Bye!") if __name__ == '__main__': main()
import menus import auth import click def main(): """Main function""" credentials = auth.authenticate_user() if credentials: menus.main_menu(credentials) - else: + - click.echo("Bye!") ? ---- + click.echo("Bye!") if __name__ == '__main__': main()
4c2b4d10beac508747364680d9e9a5d7c3488f97
confab/api.py
confab/api.py
from confab.diff import diff from confab.generate import generate from confab.pull import pull from confab.push import push
# core from confab.conffiles import ConfFiles # jinja2 environment loading from confab.loaders import load_environment_from_dir, load_environment_from_package # data loading from confab.data import load_data_from_dir # fabric tasks from confab.diff import diff from confab.generate import generate from confab.pull import pull from confab.push import push
Add loaders and ConfFiles model to public API.
Add loaders and ConfFiles model to public API.
Python
apache-2.0
locationlabs/confab
+ # core + from confab.conffiles import ConfFiles + + # jinja2 environment loading + from confab.loaders import load_environment_from_dir, load_environment_from_package + + # data loading + from confab.data import load_data_from_dir + + # fabric tasks from confab.diff import diff from confab.generate import generate from confab.pull import pull from confab.push import push
Add loaders and ConfFiles model to public API.
## Code Before: from confab.diff import diff from confab.generate import generate from confab.pull import pull from confab.push import push ## Instruction: Add loaders and ConfFiles model to public API. ## Code After: # core from confab.conffiles import ConfFiles # jinja2 environment loading from confab.loaders import load_environment_from_dir, load_environment_from_package # data loading from confab.data import load_data_from_dir # fabric tasks from confab.diff import diff from confab.generate import generate from confab.pull import pull from confab.push import push
+ # core + from confab.conffiles import ConfFiles + + # jinja2 environment loading + from confab.loaders import load_environment_from_dir, load_environment_from_package + + # data loading + from confab.data import load_data_from_dir + + # fabric tasks from confab.diff import diff from confab.generate import generate from confab.pull import pull from confab.push import push
73e15928a8427eb5a6e4a886660b9493e50cd699
currencies/models.py
currencies/models.py
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1, blank=True) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1, blank=True) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_base = models.BooleanField(_('base'), default=False, help_text=_('Make this the base currency against which rates are calculated.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default user currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the base and default currencies are unique if self.is_base: Currency.objects.filter(is_base=True).update(is_base=False) if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
Add a Currency.is_base field (currently unused)
Add a Currency.is_base field (currently unused)
Python
bsd-3-clause
pathakamit88/django-currencies,panosl/django-currencies,ydaniv/django-currencies,mysociety/django-currencies,panosl/django-currencies,barseghyanartur/django-currencies,bashu/django-simple-currencies,pathakamit88/django-currencies,ydaniv/django-currencies,marcosalcazar/django-currencies,jmp0xf/django-currencies,racitup/django-currencies,mysociety/django-currencies,marcosalcazar/django-currencies,bashu/django-simple-currencies,racitup/django-currencies
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1, blank=True) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) + is_base = models.BooleanField(_('base'), default=False, + help_text=_('Make this the base currency against which rates are calculated.')) is_default = models.BooleanField(_('default'), default=False, - help_text=_('Make this the default currency.')) + help_text=_('Make this the default user currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): - # Make sure the default currency is unique + # Make sure the base and default currencies are unique + if self.is_base: + Currency.objects.filter(is_base=True).update(is_base=False) if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
Add a Currency.is_base field (currently unused)
## Code Before: from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1, blank=True) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the default currency is unique if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs) ## Instruction: Add a Currency.is_base field (currently unused) ## Code After: from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1, blank=True) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) is_base = models.BooleanField(_('base'), default=False, help_text=_('Make this the base currency against which rates are calculated.')) is_default = models.BooleanField(_('default'), default=False, help_text=_('Make this the default user currency.')) class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): # Make sure the base and default currencies are unique if self.is_base: Currency.objects.filter(is_base=True).update(is_base=False) if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
from django.db import models from django.utils.translation import gettext_lazy as _ class Currency(models.Model): code = models.CharField(_('code'), max_length=3) name = models.CharField(_('name'), max_length=35) symbol = models.CharField(_('symbol'), max_length=1, blank=True) factor = models.DecimalField(_('factor'), max_digits=10, decimal_places=4, help_text=_('Specifies the difference of the currency to default one.')) is_active = models.BooleanField(_('active'), default=True, help_text=_('The currency will be available.')) + is_base = models.BooleanField(_('base'), default=False, + help_text=_('Make this the base currency against which rates are calculated.')) is_default = models.BooleanField(_('default'), default=False, - help_text=_('Make this the default currency.')) + help_text=_('Make this the default user currency.')) ? +++++ class Meta: verbose_name = _('currency') verbose_name_plural = _('currencies') def __unicode__(self): return self.code def save(self, **kwargs): - # Make sure the default currency is unique ? ^ ^^ + # Make sure the base and default currencies are unique ? +++++++++ ^^^ ^^^ + if self.is_base: + Currency.objects.filter(is_base=True).update(is_base=False) if self.is_default: Currency.objects.filter(is_default=True).update(is_default=False) super(Currency, self).save(**kwargs)
5a92773a1d9c40e745026ca318ae21bfce2d4fb6
flaskext/cache/backends.py
flaskext/cache/backends.py
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache, GAEMemcachedCache, FileSystemCache) def null(app, args, kwargs): return NullCache() def simple(app, args, kwargs): kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return SimpleCache(*args, **kwargs) def memcached(app, args, kwargs): args.append(app.config['CACHE_MEMCACHED_SERVERS']) kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return MemcachedCache(*args, **kwargs) def gaememcached(app, args, kwargs): kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return GAEMemcachedCache(*args, **kwargs) def filesystem(app, args, kwargs): args.append(app.config['CACHE_DIR']) kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return FileSystemCache(*args, **kwargs) # RedisCache is supported since Werkzeug 0.7. try: from werkzeug.contrib.cache import RedisCache except ImportError: pass else: def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), password=app.config.get('CACHE_REDIS_PASSWORD', None) )) return RedisCache(*args, **kwargs)
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache, GAEMemcachedCache, FileSystemCache) def null(app, args, kwargs): return NullCache() def simple(app, args, kwargs): kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return SimpleCache(*args, **kwargs) def memcached(app, args, kwargs): args.append(app.config['CACHE_MEMCACHED_SERVERS']) kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return MemcachedCache(*args, **kwargs) def gaememcached(app, args, kwargs): kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return GAEMemcachedCache(*args, **kwargs) def filesystem(app, args, kwargs): args.append(app.config['CACHE_DIR']) kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return FileSystemCache(*args, **kwargs) # RedisCache is supported since Werkzeug 0.7. try: from werkzeug.contrib.cache import RedisCache except ImportError: pass else: def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), )) password = app.config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password return RedisCache(*args, **kwargs)
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
Python
bsd-3-clause
kazeeki/mezmorize,kazeeki/mezmorize,j-fuentes/flask-cache,ordbogen/flask-cache,j-fuentes/flask-cache,thadeusb/flask-cache,alexey-sveshnikov/flask-cache,ordbogen/flask-cache,alexey-sveshnikov/flask-cache,thadeusb/flask-cache,gerasim13/flask-cache,gerasim13/flask-cache
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache, GAEMemcachedCache, FileSystemCache) def null(app, args, kwargs): return NullCache() def simple(app, args, kwargs): kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return SimpleCache(*args, **kwargs) def memcached(app, args, kwargs): args.append(app.config['CACHE_MEMCACHED_SERVERS']) kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return MemcachedCache(*args, **kwargs) def gaememcached(app, args, kwargs): kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return GAEMemcachedCache(*args, **kwargs) def filesystem(app, args, kwargs): args.append(app.config['CACHE_DIR']) kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return FileSystemCache(*args, **kwargs) # RedisCache is supported since Werkzeug 0.7. try: from werkzeug.contrib.cache import RedisCache except ImportError: pass else: def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), - password=app.config.get('CACHE_REDIS_PASSWORD', None) )) + password = app.config.get('CACHE_REDIS_PASSWORD') + if password: + kwargs['password'] = password + return RedisCache(*args, **kwargs)
Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug.
## Code Before: from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache, GAEMemcachedCache, FileSystemCache) def null(app, args, kwargs): return NullCache() def simple(app, args, kwargs): kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return SimpleCache(*args, **kwargs) def memcached(app, args, kwargs): args.append(app.config['CACHE_MEMCACHED_SERVERS']) kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return MemcachedCache(*args, **kwargs) def gaememcached(app, args, kwargs): kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return GAEMemcachedCache(*args, **kwargs) def filesystem(app, args, kwargs): args.append(app.config['CACHE_DIR']) kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return FileSystemCache(*args, **kwargs) # RedisCache is supported since Werkzeug 0.7. try: from werkzeug.contrib.cache import RedisCache except ImportError: pass else: def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), password=app.config.get('CACHE_REDIS_PASSWORD', None) )) return RedisCache(*args, **kwargs) ## Instruction: Make CACHE_REDIS_PASSWORD really optional, because it does not work with older Werkzeug. ## Code After: from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache, GAEMemcachedCache, FileSystemCache) def null(app, args, kwargs): return NullCache() def simple(app, args, kwargs): kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return SimpleCache(*args, **kwargs) def memcached(app, args, kwargs): args.append(app.config['CACHE_MEMCACHED_SERVERS']) kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return MemcachedCache(*args, **kwargs) def gaememcached(app, args, kwargs): kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return GAEMemcachedCache(*args, **kwargs) def filesystem(app, args, kwargs): args.append(app.config['CACHE_DIR']) kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return FileSystemCache(*args, **kwargs) # RedisCache is supported since Werkzeug 0.7. try: from werkzeug.contrib.cache import RedisCache except ImportError: pass else: def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), )) password = app.config.get('CACHE_REDIS_PASSWORD') if password: kwargs['password'] = password return RedisCache(*args, **kwargs)
from werkzeug.contrib.cache import (NullCache, SimpleCache, MemcachedCache, GAEMemcachedCache, FileSystemCache) def null(app, args, kwargs): return NullCache() def simple(app, args, kwargs): kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return SimpleCache(*args, **kwargs) def memcached(app, args, kwargs): args.append(app.config['CACHE_MEMCACHED_SERVERS']) kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return MemcachedCache(*args, **kwargs) def gaememcached(app, args, kwargs): kwargs.update(dict(key_prefix=app.config['CACHE_KEY_PREFIX'])) return GAEMemcachedCache(*args, **kwargs) def filesystem(app, args, kwargs): args.append(app.config['CACHE_DIR']) kwargs.update(dict(threshold=app.config['CACHE_THRESHOLD'])) return FileSystemCache(*args, **kwargs) # RedisCache is supported since Werkzeug 0.7. try: from werkzeug.contrib.cache import RedisCache except ImportError: pass else: def redis(app, args, kwargs): kwargs.update(dict( host=app.config.get('CACHE_REDIS_HOST', 'localhost'), port=app.config.get('CACHE_REDIS_PORT', 6379), - password=app.config.get('CACHE_REDIS_PASSWORD', None) )) + password = app.config.get('CACHE_REDIS_PASSWORD') + if password: + kwargs['password'] = password + return RedisCache(*args, **kwargs)
a92118d7ee6acde57ab9853186c43a5c6748e8a6
tracpro/__init__.py
tracpro/__init__.py
from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa __version__ = "1.0.0"
from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa VERSION = (1, 0, 0, "dev") def get_version(version): assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)" major, minor, micro, state = version assert isinstance(major, int), "Major version must be an integer." assert isinstance(minor, int), "Minor version must be an integer." assert isinstance(micro, int), "Micro version must be an integer." assert state in ('final', 'dev'), "State must be either final or dev." if state == 'final': return "{}.{}.{}".format(major, minor, micro) else: return "{}.{}.{}.{}".format(major, minor, micro, state) __version__ = get_version(VERSION)
Use tuple to represent version
Use tuple to represent version
Python
bsd-3-clause
rapidpro/tracpro,xkmato/tracpro,xkmato/tracpro,xkmato/tracpro,xkmato/tracpro,rapidpro/tracpro,rapidpro/tracpro
from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa - __version__ = "1.0.0" + VERSION = (1, 0, 0, "dev") + + def get_version(version): + assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)" + + major, minor, micro, state = version + + assert isinstance(major, int), "Major version must be an integer." + assert isinstance(minor, int), "Minor version must be an integer." + assert isinstance(micro, int), "Micro version must be an integer." + assert state in ('final', 'dev'), "State must be either final or dev." + + if state == 'final': + return "{}.{}.{}".format(major, minor, micro) + else: + return "{}.{}.{}.{}".format(major, minor, micro, state) + + + __version__ = get_version(VERSION) +
Use tuple to represent version
## Code Before: from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa __version__ = "1.0.0" ## Instruction: Use tuple to represent version ## Code After: from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa VERSION = (1, 0, 0, "dev") def get_version(version): assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)" major, minor, micro, state = version assert isinstance(major, int), "Major version must be an integer." assert isinstance(minor, int), "Minor version must be an integer." assert isinstance(micro, int), "Micro version must be an integer." assert state in ('final', 'dev'), "State must be either final or dev." if state == 'final': return "{}.{}.{}".format(major, minor, micro) else: return "{}.{}.{}.{}".format(major, minor, micro, state) __version__ = get_version(VERSION)
from __future__ import absolute_import # This will make sure the app is always imported when # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa - __version__ = "1.0.0" + VERSION = (1, 0, 0, "dev") + + + def get_version(version): + assert len(version) == 4, "Version must be formatted as (major, minor, micro, state)" + + major, minor, micro, state = version + + assert isinstance(major, int), "Major version must be an integer." + assert isinstance(minor, int), "Minor version must be an integer." + assert isinstance(micro, int), "Micro version must be an integer." + assert state in ('final', 'dev'), "State must be either final or dev." + + if state == 'final': + return "{}.{}.{}".format(major, minor, micro) + else: + return "{}.{}.{}.{}".format(major, minor, micro, state) + + + __version__ = get_version(VERSION)
2033c71a84f03e7e8d40c567e632afd2e013aad3
url/__init__.py
url/__init__.py
'''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
'''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
Drop unused import of sys.
Drop unused import of sys.
Python
mit
seomoz/url-py,seomoz/url-py
'''This is a module for dealing with urls. In particular, sanitizing them.''' - - import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
Drop unused import of sys.
## Code Before: '''This is a module for dealing with urls. In particular, sanitizing them.''' import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding) ## Instruction: Drop unused import of sys. ## Code After: '''This is a module for dealing with urls. In particular, sanitizing them.''' from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
'''This is a module for dealing with urls. In particular, sanitizing them.''' - - import sys from six import text_type if text_type == str: from .url import UnicodeURL as URL else: from .url import StringURL as URL from .url import set_psl def parse(url, encoding='utf-8'): '''Parse the provided url string and return an URL object''' return URL.parse(url, encoding)
d5049edc8567cebf936bb07847906c5400f9a6d9
ceph_deploy/tests/unit/hosts/test_suse.py
ceph_deploy/tests/unit/hosts/test_suse.py
from ceph_deploy.hosts import suse class TestSuseInit(object): def setup(self): self.host = suse def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_11(self): self.host.release = '11' init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_12(self): self.host.release = '12' init_type = self.host.choose_init() assert init_type == "systemd" def test_choose_init_openSUSE_13_1(self): self.host.release = '13.1' init_type = self.host.choose_init() assert init_type == "systemd"
from ceph_deploy.hosts import suse from ceph_deploy.hosts.suse.install import map_components class TestSuseInit(object): def setup(self): self.host = suse def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_11(self): self.host.release = '11' init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_12(self): self.host.release = '12' init_type = self.host.choose_init() assert init_type == "systemd" def test_choose_init_openSUSE_13_1(self): self.host.release = '13.1' init_type = self.host.choose_init() assert init_type == "systemd" class TestSuseMapComponents(object): def test_valid(self): pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw']) assert 'ceph' in pkgs assert 'ceph-common' in pkgs assert 'ceph-radosgw' in pkgs assert 'ceph-osd' not in pkgs def test_invalid(self): pkgs = map_components(['not-provided', 'ceph-mon']) assert 'not-provided' not in pkgs assert 'ceph' in pkgs
Add tests for component to SUSE package mapping
Add tests for component to SUSE package mapping Signed-off-by: David Disseldorp <589a549dc9f982d9f46aeeb82a09ab6d87ccf1d8@suse.de>
Python
mit
zhouyuan/ceph-deploy,shenhequnying/ceph-deploy,ceph/ceph-deploy,ghxandsky/ceph-deploy,zhouyuan/ceph-deploy,imzhulei/ceph-deploy,SUSE/ceph-deploy,Vicente-Cheng/ceph-deploy,ceph/ceph-deploy,branto1/ceph-deploy,trhoden/ceph-deploy,trhoden/ceph-deploy,osynge/ceph-deploy,ghxandsky/ceph-deploy,SUSE/ceph-deploy,branto1/ceph-deploy,codenrhoden/ceph-deploy,isyippee/ceph-deploy,isyippee/ceph-deploy,Vicente-Cheng/ceph-deploy,shenhequnying/ceph-deploy,osynge/ceph-deploy,imzhulei/ceph-deploy,codenrhoden/ceph-deploy
from ceph_deploy.hosts import suse + from ceph_deploy.hosts.suse.install import map_components class TestSuseInit(object): def setup(self): self.host = suse def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_11(self): self.host.release = '11' init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_12(self): self.host.release = '12' init_type = self.host.choose_init() assert init_type == "systemd" def test_choose_init_openSUSE_13_1(self): self.host.release = '13.1' init_type = self.host.choose_init() assert init_type == "systemd" + class TestSuseMapComponents(object): + def test_valid(self): + pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw']) + assert 'ceph' in pkgs + assert 'ceph-common' in pkgs + assert 'ceph-radosgw' in pkgs + assert 'ceph-osd' not in pkgs + + def test_invalid(self): + pkgs = map_components(['not-provided', 'ceph-mon']) + assert 'not-provided' not in pkgs + assert 'ceph' in pkgs +
Add tests for component to SUSE package mapping
## Code Before: from ceph_deploy.hosts import suse class TestSuseInit(object): def setup(self): self.host = suse def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_11(self): self.host.release = '11' init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_12(self): self.host.release = '12' init_type = self.host.choose_init() assert init_type == "systemd" def test_choose_init_openSUSE_13_1(self): self.host.release = '13.1' init_type = self.host.choose_init() assert init_type == "systemd" ## Instruction: Add tests for component to SUSE package mapping ## Code After: from ceph_deploy.hosts import suse from ceph_deploy.hosts.suse.install import map_components class TestSuseInit(object): def setup(self): self.host = suse def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_11(self): self.host.release = '11' init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_12(self): self.host.release = '12' init_type = self.host.choose_init() assert init_type == "systemd" def test_choose_init_openSUSE_13_1(self): self.host.release = '13.1' init_type = self.host.choose_init() assert init_type == "systemd" class TestSuseMapComponents(object): def test_valid(self): pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw']) assert 'ceph' in pkgs assert 'ceph-common' in pkgs assert 'ceph-radosgw' in pkgs assert 'ceph-osd' not in pkgs def test_invalid(self): pkgs = map_components(['not-provided', 'ceph-mon']) assert 'not-provided' not in pkgs assert 'ceph' in pkgs
from ceph_deploy.hosts import suse + from ceph_deploy.hosts.suse.install import map_components class TestSuseInit(object): def setup(self): self.host = suse def test_choose_init_default(self): self.host.release = None init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_11(self): self.host.release = '11' init_type = self.host.choose_init() assert init_type == "sysvinit" def test_choose_init_SLE_12(self): self.host.release = '12' init_type = self.host.choose_init() assert init_type == "systemd" def test_choose_init_openSUSE_13_1(self): self.host.release = '13.1' init_type = self.host.choose_init() assert init_type == "systemd" + + class TestSuseMapComponents(object): + def test_valid(self): + pkgs = map_components(['ceph-osd', 'ceph-common', 'ceph-radosgw']) + assert 'ceph' in pkgs + assert 'ceph-common' in pkgs + assert 'ceph-radosgw' in pkgs + assert 'ceph-osd' not in pkgs + + def test_invalid(self): + pkgs = map_components(['not-provided', 'ceph-mon']) + assert 'not-provided' not in pkgs + assert 'ceph' in pkgs
00922099d6abb03a0dbcca19781eb586d367eab0
skimage/measure/__init__.py
skimage/measure/__init__.py
from .find_contours import find_contours from ._regionprops import regionprops from .find_contours import find_contours from ._structural_similarity import ssim
from .find_contours import find_contours from ._regionprops import regionprops from ._structural_similarity import ssim
Remove double import of find contours.
BUG: Remove double import of find contours.
Python
bsd-3-clause
robintw/scikit-image,WarrenWeckesser/scikits-image,ofgulban/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,SamHames/scikit-image,chintak/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,dpshelio/scikit-image,chintak/scikit-image,rjeli/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,pratapvardhan/scikit-image,bsipocz/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,michaelaye/scikit-image,michaelaye/scikit-image,jwiggins/scikit-image,pratapvardhan/scikit-image,keflavich/scikit-image,chriscrosscutler/scikit-image,Britefury/scikit-image,dpshelio/scikit-image,bennlich/scikit-image,bsipocz/scikit-image,blink1073/scikit-image,GaZ3ll3/scikit-image,paalge/scikit-image,almarklein/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,Hiyorimi/scikit-image,emon10005/scikit-image,emmanuelle/scikits.image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,almarklein/scikit-image,warmspringwinds/scikit-image,Midafi/scikit-image,youprofit/scikit-image,chintak/scikit-image,newville/scikit-image,Britefury/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,jwiggins/scikit-image,chriscrosscutler/scikit-image,michaelpacer/scikit-image,emmanuelle/scikits.image,juliusbierk/scikit-image,SamHames/scikit-image,robintw/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,Midafi/scikit-image,emmanuelle/scikits.image,vighneshbirodkar/scikit-image,newville/scikit-image,blink1073/scikit-image,michaelpacer/scikit-image,emmanuelle/scikits.image,oew1v07/scikit-image,emon10005/scikit-image,youprofit/scikit-image,ajaybhat/scikit-image,paalge/scikit-image,rjeli/scikit-image,warmspringwinds/scikit-image,paalge/scikit-image,keflavich/scikit-image,ClinicalGraphics/scikit-image,GaZ3ll3/scikit-image,SamHames/scikit-image
from .find_contours import find_contours from ._regionprops import regionprops - from .find_contours import find_contours from ._structural_similarity import ssim
Remove double import of find contours.
## Code Before: from .find_contours import find_contours from ._regionprops import regionprops from .find_contours import find_contours from ._structural_similarity import ssim ## Instruction: Remove double import of find contours. ## Code After: from .find_contours import find_contours from ._regionprops import regionprops from ._structural_similarity import ssim
from .find_contours import find_contours from ._regionprops import regionprops - from .find_contours import find_contours from ._structural_similarity import ssim
2b88f8f458781bd88f559f1a5a966fd5050414a0
tests/merchandise/music/test_models.py
tests/merchandise/music/test_models.py
import pytest from components.merchandise.music.models import Album, Single from components.merchandise.music.factories import (AlbumFactory, BaseFactory, SingleFactory) @pytest.mark.django_db class TestAlbums(object): def test_album_factory(self): album = AlbumFactory() assert isinstance(album, Album) assert 'album' in album.romanized_name assert album.identifier == 'album' @pytest.mark.django_db class TestSingles(object): def test_single_factory(self): single = SingleFactory() assert isinstance(single, Single) assert 'single' in single.romanized_name assert single.identifier == 'single'
import pytest from components.merchandise.music.models import Album, Single from components.merchandise.music.factories import (AlbumFactory, BaseFactory, SingleFactory) @pytest.mark.django_db def test_album_factory(): factory = AlbumFactory() assert isinstance(factory, Album) assert 'album' in factory.romanized_name assert factory.identifier == 'album' @pytest.mark.django_db def test_single_factory(): factory = SingleFactory() assert isinstance(factory, Single) assert 'single' in factory.romanized_name assert factory.identifier == 'single'
Remove the class surrounding the music tests. Staying strictly functional.
Remove the class surrounding the music tests. Staying strictly functional.
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
import pytest from components.merchandise.music.models import Album, Single from components.merchandise.music.factories import (AlbumFactory, BaseFactory, SingleFactory) @pytest.mark.django_db - class TestAlbums(object): - def test_album_factory(self): + def test_album_factory(): - album = AlbumFactory() + factory = AlbumFactory() - assert isinstance(album, Album) + assert isinstance(factory, Album) - assert 'album' in album.romanized_name + assert 'album' in factory.romanized_name - assert album.identifier == 'album' + assert factory.identifier == 'album' @pytest.mark.django_db - class TestSingles(object): - def test_single_factory(self): + def test_single_factory(): - single = SingleFactory() + factory = SingleFactory() - assert isinstance(single, Single) + assert isinstance(factory, Single) - assert 'single' in single.romanized_name + assert 'single' in factory.romanized_name - assert single.identifier == 'single' + assert factory.identifier == 'single'
Remove the class surrounding the music tests. Staying strictly functional.
## Code Before: import pytest from components.merchandise.music.models import Album, Single from components.merchandise.music.factories import (AlbumFactory, BaseFactory, SingleFactory) @pytest.mark.django_db class TestAlbums(object): def test_album_factory(self): album = AlbumFactory() assert isinstance(album, Album) assert 'album' in album.romanized_name assert album.identifier == 'album' @pytest.mark.django_db class TestSingles(object): def test_single_factory(self): single = SingleFactory() assert isinstance(single, Single) assert 'single' in single.romanized_name assert single.identifier == 'single' ## Instruction: Remove the class surrounding the music tests. Staying strictly functional. ## Code After: import pytest from components.merchandise.music.models import Album, Single from components.merchandise.music.factories import (AlbumFactory, BaseFactory, SingleFactory) @pytest.mark.django_db def test_album_factory(): factory = AlbumFactory() assert isinstance(factory, Album) assert 'album' in factory.romanized_name assert factory.identifier == 'album' @pytest.mark.django_db def test_single_factory(): factory = SingleFactory() assert isinstance(factory, Single) assert 'single' in factory.romanized_name assert factory.identifier == 'single'
import pytest from components.merchandise.music.models import Album, Single from components.merchandise.music.factories import (AlbumFactory, BaseFactory, SingleFactory) @pytest.mark.django_db - class TestAlbums(object): - def test_album_factory(self): ? ---- ---- + def test_album_factory(): - album = AlbumFactory() ? ^^^^ ^^^^ + factory = AlbumFactory() ? ^ ^^^^^ - assert isinstance(album, Album) ? ---- ^^^^ + assert isinstance(factory, Album) ? + ^^^^^ - assert 'album' in album.romanized_name ? ---- ^^^^ + assert 'album' in factory.romanized_name ? + ^^^^^ - assert album.identifier == 'album' ? ---- ^^^^ + assert factory.identifier == 'album' ? + ^^^^^ @pytest.mark.django_db - class TestSingles(object): - def test_single_factory(self): ? ---- ---- + def test_single_factory(): - single = SingleFactory() + factory = SingleFactory() - assert isinstance(single, Single) ? ---- ^^^^^^ + assert isinstance(factory, Single) ? ^^^^^^^ - assert 'single' in single.romanized_name ? ---- ^^^^^^ + assert 'single' in factory.romanized_name ? ^^^^^^^ - assert single.identifier == 'single' ? ---- ^^^^^^ + assert factory.identifier == 'single' ? ^^^^^^^
92da4abbcf1551d87192b627b3c5f44f2fe82e91
quickplots/textsize.py
quickplots/textsize.py
"""Functions for working out what font_size text needs to be""" def get_font_size(s, width, height): return 10
"""Functions for working out what font_size text needs to be""" def get_font_size(s, width, height): return int(height)
Make very basic font size calculator
Make very basic font size calculator
Python
mit
samirelanduk/quickplots
"""Functions for working out what font_size text needs to be""" def get_font_size(s, width, height): - return 10 + return int(height)
Make very basic font size calculator
## Code Before: """Functions for working out what font_size text needs to be""" def get_font_size(s, width, height): return 10 ## Instruction: Make very basic font size calculator ## Code After: """Functions for working out what font_size text needs to be""" def get_font_size(s, width, height): return int(height)
"""Functions for working out what font_size text needs to be""" def get_font_size(s, width, height): - return 10 + return int(height)
920c1cd03645bd04df59bdb1f52aab07c710746b
fabtools/__init__.py
fabtools/__init__.py
import fabtools.arch import fabtools.cron import fabtools.deb import fabtools.files import fabtools.git import fabtools.group import fabtools.mysql import fabtools.network import fabtools.nginx import fabtools.nodejs import fabtools.openvz import fabtools.pkg import fabtools.postgres import fabtools.python import fabtools.python_distribute import fabtools.rpm import fabtools.service import fabtools.shorewall import fabtools.ssh import fabtools.supervisor import fabtools.system import fabtools.user import fabtools.require icanhaz = require
import fabtools.arch import fabtools.cron import fabtools.deb import fabtools.disk import fabtools.files import fabtools.git import fabtools.group import fabtools.mysql import fabtools.network import fabtools.nginx import fabtools.nodejs import fabtools.openvz import fabtools.pkg import fabtools.postgres import fabtools.python import fabtools.python_distribute import fabtools.rpm import fabtools.service import fabtools.shorewall import fabtools.ssh import fabtools.supervisor import fabtools.system import fabtools.user import fabtools.require icanhaz = require
Add missing import for new disk module
Add missing import for new disk module
Python
bsd-2-clause
ahnjungho/fabtools,badele/fabtools,wagigi/fabtools-python,fabtools/fabtools,davidcaste/fabtools,AMOSoft/fabtools,prologic/fabtools,ronnix/fabtools,n0n0x/fabtools-python,pombredanne/fabtools,sociateru/fabtools,hagai26/fabtools,bitmonk/fabtools
import fabtools.arch import fabtools.cron import fabtools.deb + import fabtools.disk import fabtools.files import fabtools.git import fabtools.group import fabtools.mysql import fabtools.network import fabtools.nginx import fabtools.nodejs import fabtools.openvz import fabtools.pkg import fabtools.postgres import fabtools.python import fabtools.python_distribute import fabtools.rpm import fabtools.service import fabtools.shorewall import fabtools.ssh import fabtools.supervisor import fabtools.system import fabtools.user import fabtools.require icanhaz = require
Add missing import for new disk module
## Code Before: import fabtools.arch import fabtools.cron import fabtools.deb import fabtools.files import fabtools.git import fabtools.group import fabtools.mysql import fabtools.network import fabtools.nginx import fabtools.nodejs import fabtools.openvz import fabtools.pkg import fabtools.postgres import fabtools.python import fabtools.python_distribute import fabtools.rpm import fabtools.service import fabtools.shorewall import fabtools.ssh import fabtools.supervisor import fabtools.system import fabtools.user import fabtools.require icanhaz = require ## Instruction: Add missing import for new disk module ## Code After: import fabtools.arch import fabtools.cron import fabtools.deb import fabtools.disk import fabtools.files import fabtools.git import fabtools.group import fabtools.mysql import fabtools.network import fabtools.nginx import fabtools.nodejs import fabtools.openvz import fabtools.pkg import fabtools.postgres import fabtools.python import fabtools.python_distribute import fabtools.rpm import fabtools.service import fabtools.shorewall import fabtools.ssh import fabtools.supervisor import fabtools.system import fabtools.user import fabtools.require icanhaz = require
import fabtools.arch import fabtools.cron import fabtools.deb + import fabtools.disk import fabtools.files import fabtools.git import fabtools.group import fabtools.mysql import fabtools.network import fabtools.nginx import fabtools.nodejs import fabtools.openvz import fabtools.pkg import fabtools.postgres import fabtools.python import fabtools.python_distribute import fabtools.rpm import fabtools.service import fabtools.shorewall import fabtools.ssh import fabtools.supervisor import fabtools.system import fabtools.user import fabtools.require icanhaz = require
4b93e5aa8c0ce90189fb852e75ee213d3be0d01a
flicks/base/urls.py
flicks/base/urls.py
from django.conf.urls.defaults import patterns, url from flicks.base import views urlpatterns = patterns('', url(r'^/?$', views.home, name='flicks.base.home'), url(r'^strings/?$', views.strings, name='flicks.base.strings'), )
from django.conf.urls.defaults import patterns, url from flicks.base import views urlpatterns = patterns('', url(r'^/?$', views.home, name='flicks.base.home'), url(r'^faq/?$', views.faq, name='flicks.base.faq'), url(r'^strings/?$', views.strings, name='flicks.base.strings'), )
Add back in FAQ url that was removed accidentally.
Add back in FAQ url that was removed accidentally.
Python
bsd-3-clause
mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks
from django.conf.urls.defaults import patterns, url from flicks.base import views urlpatterns = patterns('', url(r'^/?$', views.home, name='flicks.base.home'), + url(r'^faq/?$', views.faq, name='flicks.base.faq'), url(r'^strings/?$', views.strings, name='flicks.base.strings'), )
Add back in FAQ url that was removed accidentally.
## Code Before: from django.conf.urls.defaults import patterns, url from flicks.base import views urlpatterns = patterns('', url(r'^/?$', views.home, name='flicks.base.home'), url(r'^strings/?$', views.strings, name='flicks.base.strings'), ) ## Instruction: Add back in FAQ url that was removed accidentally. ## Code After: from django.conf.urls.defaults import patterns, url from flicks.base import views urlpatterns = patterns('', url(r'^/?$', views.home, name='flicks.base.home'), url(r'^faq/?$', views.faq, name='flicks.base.faq'), url(r'^strings/?$', views.strings, name='flicks.base.strings'), )
from django.conf.urls.defaults import patterns, url from flicks.base import views urlpatterns = patterns('', url(r'^/?$', views.home, name='flicks.base.home'), + url(r'^faq/?$', views.faq, name='flicks.base.faq'), url(r'^strings/?$', views.strings, name='flicks.base.strings'), )
009a9f401fd0c1dba6702c1114a73b77f38b9ce3
bin/parse.py
bin/parse.py
import json import sys result = {} INDEX = { "AREA": 5 + 3 * 3 + 3 * 3, "CITY": 5 + 3 * 3, "CODE": 5 } for line in open("ORIGIN.txt"): code = line[:INDEX["CODE"]] city = line[INDEX["CODE"]: INDEX["CITY"]] if not city in result: result[city] = {} area = line[INDEX["CITY"]: INDEX["AREA"]] if not area in result[city]: result[city][area] = {} line = line[line.find(" "):].strip() road = line[:line.find(" ")] condition = line[line.find(" "):].replace(" ", "").strip() json.dump(result, open("zipcode.json", "w"), ensure_ascii=False, indent=4 ) sys.exit(0)
import json import sys result = {} INDEX = { "AREA": 5 + 3 * 3 + 3 * 3, "CITY": 5 + 3 * 3, "CODE": 5 } for line in open("ORIGIN.txt"): code = line[:INDEX["CODE"]] city = line[INDEX["CODE"]: INDEX["CITY"]] if not city in result: result[city] = {} area_index = INDEX["AREA"] if line[INDEX["AREA"]] != " ": area_index += 3 area = line[INDEX["CITY"]: area_index].strip() if not area in result[city]: result[city][area] = {} line = line[area_index:].strip() road = line.split(" ")[0] if len(line.split(" ")) == 1: road = line[:-3] if not road in result[city][area]: result[city][area][road] = {} condition = line[line.find(" "):].replace(" ", "").strip() json.dump(result, open("zipcode.json", "w"), ensure_ascii=False, indent=4 ) sys.exit(0)
Fix name parsing for roads
Fix name parsing for roads
Python
mit
lihengl/dizu-api,lihengl/dizu-api
import json import sys result = {} INDEX = { "AREA": 5 + 3 * 3 + 3 * 3, "CITY": 5 + 3 * 3, "CODE": 5 } for line in open("ORIGIN.txt"): code = line[:INDEX["CODE"]] city = line[INDEX["CODE"]: INDEX["CITY"]] if not city in result: result[city] = {} - area = line[INDEX["CITY"]: INDEX["AREA"]] + area_index = INDEX["AREA"] + if line[INDEX["AREA"]] != " ": area_index += 3 + area = line[INDEX["CITY"]: area_index].strip() if not area in result[city]: result[city][area] = {} - line = line[line.find(" "):].strip() + line = line[area_index:].strip() - road = line[:line.find(" ")] + road = line.split(" ")[0] + if len(line.split(" ")) == 1: road = line[:-3] + if not road in result[city][area]: result[city][area][road] = {} + condition = line[line.find(" "):].replace(" ", "").strip() json.dump(result, open("zipcode.json", "w"), ensure_ascii=False, indent=4 ) sys.exit(0)
Fix name parsing for roads
## Code Before: import json import sys result = {} INDEX = { "AREA": 5 + 3 * 3 + 3 * 3, "CITY": 5 + 3 * 3, "CODE": 5 } for line in open("ORIGIN.txt"): code = line[:INDEX["CODE"]] city = line[INDEX["CODE"]: INDEX["CITY"]] if not city in result: result[city] = {} area = line[INDEX["CITY"]: INDEX["AREA"]] if not area in result[city]: result[city][area] = {} line = line[line.find(" "):].strip() road = line[:line.find(" ")] condition = line[line.find(" "):].replace(" ", "").strip() json.dump(result, open("zipcode.json", "w"), ensure_ascii=False, indent=4 ) sys.exit(0) ## Instruction: Fix name parsing for roads ## Code After: import json import sys result = {} INDEX = { "AREA": 5 + 3 * 3 + 3 * 3, "CITY": 5 + 3 * 3, "CODE": 5 } for line in open("ORIGIN.txt"): code = line[:INDEX["CODE"]] city = line[INDEX["CODE"]: INDEX["CITY"]] if not city in result: result[city] = {} area_index = INDEX["AREA"] if line[INDEX["AREA"]] != " ": area_index += 3 area = line[INDEX["CITY"]: area_index].strip() if not area in result[city]: result[city][area] = {} line = line[area_index:].strip() road = line.split(" ")[0] if len(line.split(" ")) == 1: road = line[:-3] if not road in result[city][area]: result[city][area][road] = {} condition = line[line.find(" "):].replace(" ", "").strip() json.dump(result, open("zipcode.json", "w"), ensure_ascii=False, indent=4 ) sys.exit(0)
import json import sys result = {} INDEX = { "AREA": 5 + 3 * 3 + 3 * 3, "CITY": 5 + 3 * 3, "CODE": 5 } for line in open("ORIGIN.txt"): code = line[:INDEX["CODE"]] city = line[INDEX["CODE"]: INDEX["CITY"]] if not city in result: result[city] = {} - area = line[INDEX["CITY"]: INDEX["AREA"]] + area_index = INDEX["AREA"] + if line[INDEX["AREA"]] != " ": area_index += 3 + area = line[INDEX["CITY"]: area_index].strip() if not area in result[city]: result[city][area] = {} - line = line[line.find(" "):].strip() ? ^^^ ^^ ^^^^^ + line = line[area_index:].strip() ? ^^ ^^ ^^ - road = line[:line.find(" ")] ? ^^ ^^^^^^^ + road = line.split(" ")[0] ? ^^^ ^ ++ + if len(line.split(" ")) == 1: road = line[:-3] + if not road in result[city][area]: result[city][area][road] = {} + condition = line[line.find(" "):].replace(" ", "").strip() json.dump(result, open("zipcode.json", "w"), ensure_ascii=False, indent=4 ) sys.exit(0)
d604128015826444be4585c7204030840e9efc88
tests/test_java.py
tests/test_java.py
def test_java_exists(Command): version_result = Command("java -version") assert version_result.rc == 0
def test_java_exists(Command): version_result = Command("java -version") assert version_result.rc == 0 def test_java_certs_exist(File): assert File("/etc/ssl/certs/java/cacerts").exists
Add test to make sure SSL certs are installed.
Add test to make sure SSL certs are installed.
Python
apache-2.0
azavea/ansible-java,flibbertigibbet/ansible-java
def test_java_exists(Command): version_result = Command("java -version") assert version_result.rc == 0 + + def test_java_certs_exist(File): + assert File("/etc/ssl/certs/java/cacerts").exists +
Add test to make sure SSL certs are installed.
## Code Before: def test_java_exists(Command): version_result = Command("java -version") assert version_result.rc == 0 ## Instruction: Add test to make sure SSL certs are installed. ## Code After: def test_java_exists(Command): version_result = Command("java -version") assert version_result.rc == 0 def test_java_certs_exist(File): assert File("/etc/ssl/certs/java/cacerts").exists
def test_java_exists(Command): version_result = Command("java -version") assert version_result.rc == 0 + + + def test_java_certs_exist(File): + assert File("/etc/ssl/certs/java/cacerts").exists
eec72133d9245a4857c9a8954e235948a5fd9938
pokedex.py
pokedex.py
import json class NationalDex: def __init__(self, pathToNationalDex): dexfile = open(pathToNationalDex, 'r') self.dexdata = json.load(dexfile) self.numberOfPokemon = len(self.dexdata.keys()) self.pokemonNames = [] self.pokemonSlugs = [] for i in range (1, self.numberOfPokemon+1): dexKey = str(i).zfill(3) name = self.dexdata[dexKey]['name']['eng'] slug = self.dexdata[dexKey]['slug']['eng'] self.pokemonNames.append(name) self.pokemonSlugs.append(slug) def pokemonNameForNumber(self, number): return self.pokemon[number]
import json class NationalDex: def __init__(self, pathToNationalDex): dexfile = open(pathToNationalDex, 'r') self.dexdata = json.load(dexfile) self.numberOfPokemon = len(self.dexdata.keys()) self.pokemonNames = [] self.pokemonSlugs = [] for i in range (1, self.numberOfPokemon+1): dexKey = str(i).zfill(3) name = self.dexdata[dexKey]['name']['eng'] slug = self.dexdata[dexKey]['slug']['eng'] self.pokemonNames.append(name) self.pokemonSlugs.append(slug)
Remove unused method for getting Pokémon names
Remove unused method for getting Pokémon names
Python
bsd-2-clause
peterhajas/LivingDex,peterhajas/LivingDex,peterhajas/LivingDex,peterhajas/LivingDex
import json class NationalDex: def __init__(self, pathToNationalDex): dexfile = open(pathToNationalDex, 'r') self.dexdata = json.load(dexfile) self.numberOfPokemon = len(self.dexdata.keys()) self.pokemonNames = [] self.pokemonSlugs = [] for i in range (1, self.numberOfPokemon+1): dexKey = str(i).zfill(3) name = self.dexdata[dexKey]['name']['eng'] slug = self.dexdata[dexKey]['slug']['eng'] self.pokemonNames.append(name) self.pokemonSlugs.append(slug) - def pokemonNameForNumber(self, number): - return self.pokemon[number] -
Remove unused method for getting Pokémon names
## Code Before: import json class NationalDex: def __init__(self, pathToNationalDex): dexfile = open(pathToNationalDex, 'r') self.dexdata = json.load(dexfile) self.numberOfPokemon = len(self.dexdata.keys()) self.pokemonNames = [] self.pokemonSlugs = [] for i in range (1, self.numberOfPokemon+1): dexKey = str(i).zfill(3) name = self.dexdata[dexKey]['name']['eng'] slug = self.dexdata[dexKey]['slug']['eng'] self.pokemonNames.append(name) self.pokemonSlugs.append(slug) def pokemonNameForNumber(self, number): return self.pokemon[number] ## Instruction: Remove unused method for getting Pokémon names ## Code After: import json class NationalDex: def __init__(self, pathToNationalDex): dexfile = open(pathToNationalDex, 'r') self.dexdata = json.load(dexfile) self.numberOfPokemon = len(self.dexdata.keys()) self.pokemonNames = [] self.pokemonSlugs = [] for i in range (1, self.numberOfPokemon+1): dexKey = str(i).zfill(3) name = self.dexdata[dexKey]['name']['eng'] slug = self.dexdata[dexKey]['slug']['eng'] self.pokemonNames.append(name) self.pokemonSlugs.append(slug)
import json class NationalDex: def __init__(self, pathToNationalDex): dexfile = open(pathToNationalDex, 'r') self.dexdata = json.load(dexfile) self.numberOfPokemon = len(self.dexdata.keys()) self.pokemonNames = [] self.pokemonSlugs = [] for i in range (1, self.numberOfPokemon+1): dexKey = str(i).zfill(3) name = self.dexdata[dexKey]['name']['eng'] slug = self.dexdata[dexKey]['slug']['eng'] self.pokemonNames.append(name) self.pokemonSlugs.append(slug) - def pokemonNameForNumber(self, number): - return self.pokemon[number] -
4744f3b3e5193ad66a4bba64d8a8d8c4e328fdcc
pychat.py
pychat.py
from lib.login import Login def pychat(): login = Login login() if __name__ == '__main__': pychat()
from lib.login import Login from lib.client import Client from Tkinter import * class PyChat(object): def __init__(self): self.root = Tk() self.root.geometry("300x275+400+100") def login(self): self.login = Login(self.root, self.create_client) def run(self): self.root.mainloop() def create_client(self): credentials = self.login.login_credentials() credentials['root'] = self.root self.reset() self.client = Client(**credentials) def reset(self): for element in self.root.winfo_children(): element.destroy() if __name__ == '__main__': pychat = PyChat() pychat.login() pychat.run()
Rework to have a central root window controlled from a top class
Rework to have a central root window controlled from a top class
Python
mit
tijko/PyChat
from lib.login import Login + from lib.client import Client + from Tkinter import * - def pychat(): - login = Login - login() + class PyChat(object): + + def __init__(self): + self.root = Tk() + self.root.geometry("300x275+400+100") + + def login(self): + self.login = Login(self.root, self.create_client) + + def run(self): + self.root.mainloop() + + def create_client(self): + credentials = self.login.login_credentials() + credentials['root'] = self.root + self.reset() + self.client = Client(**credentials) + + def reset(self): + for element in self.root.winfo_children(): + element.destroy() if __name__ == '__main__': + pychat = PyChat() + pychat.login() - pychat() + pychat.run()
Rework to have a central root window controlled from a top class
## Code Before: from lib.login import Login def pychat(): login = Login login() if __name__ == '__main__': pychat() ## Instruction: Rework to have a central root window controlled from a top class ## Code After: from lib.login import Login from lib.client import Client from Tkinter import * class PyChat(object): def __init__(self): self.root = Tk() self.root.geometry("300x275+400+100") def login(self): self.login = Login(self.root, self.create_client) def run(self): self.root.mainloop() def create_client(self): credentials = self.login.login_credentials() credentials['root'] = self.root self.reset() self.client = Client(**credentials) def reset(self): for element in self.root.winfo_children(): element.destroy() if __name__ == '__main__': pychat = PyChat() pychat.login() pychat.run()
from lib.login import Login + from lib.client import Client + from Tkinter import * - def pychat(): - login = Login - login() + class PyChat(object): + + def __init__(self): + self.root = Tk() + self.root.geometry("300x275+400+100") + + def login(self): + self.login = Login(self.root, self.create_client) + + def run(self): + self.root.mainloop() + + def create_client(self): + credentials = self.login.login_credentials() + credentials['root'] = self.root + self.reset() + self.client = Client(**credentials) + + def reset(self): + for element in self.root.winfo_children(): + element.destroy() if __name__ == '__main__': + pychat = PyChat() + pychat.login() - pychat() + pychat.run() ? ++++
8d8863fe178b085c6ce7500996f9c2d2c8f159f6
umibukela/csv_export.py
umibukela/csv_export.py
from collections import OrderedDict def form_questions(form): d = OrderedDict() children = form['children'] for child in children: if 'pathstr' in child and 'control' not in child: d.update({child['pathstr']: ''}) elif 'children' in child: for minor in child['children']: if 'pathstr' in minor: d.update({minor['pathstr']: ''}) if 'Contact_number' in d: del d['Contact_number'] if 'Full_name' in d: del d['Full_name'] if 'Monitor_name' in d: del d['Monitor_name'] if 'phonenumber' in d: del d['phonenumber'] if 'capturer' in d: del d['capturer'] if 'surveyor' in d: del d['surveyor'] if 'Monitor_Name' in d: del d['Monitor_Name'] if 'phone_number' in d: del d['phone_number'] return d def export_row(answer, fields): obj = answer.answers for k in fields.keys(): try: fields[k] = obj[k] except KeyError: del fields[k] return fields
from collections import OrderedDict def form_questions(form): d = OrderedDict() children = form['children'] for child in children: if 'pathstr' in child and 'control' not in child and child['type'] != 'group': d.update({child['pathstr']: ''}) elif 'children' in child: for minor in child['children']: if 'pathstr' in minor: d.update({minor['pathstr']: ''}) if 'Contact_number' in d: del d['Contact_number'] if 'Full_name' in d: del d['Full_name'] if 'Monitor_name' in d: del d['Monitor_name'] if 'phonenumber' in d: del d['phonenumber'] if 'capturer' in d: del d['capturer'] if 'surveyor' in d: del d['surveyor'] if 'Monitor_Name' in d: del d['Monitor_Name'] if 'phone_number' in d: del d['phone_number'] return d def export_row(answer, fields): obj = answer.answers for k in fields.keys(): try: fields[k] = obj[k] except KeyError: del fields[k] return fields
Make sure correct type is excluded
Make sure correct type is excluded
Python
mit
Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela,Code4SA/umibukela
from collections import OrderedDict def form_questions(form): d = OrderedDict() children = form['children'] for child in children: - if 'pathstr' in child and 'control' not in child: + if 'pathstr' in child and 'control' not in child and child['type'] != 'group': d.update({child['pathstr']: ''}) elif 'children' in child: for minor in child['children']: if 'pathstr' in minor: d.update({minor['pathstr']: ''}) if 'Contact_number' in d: del d['Contact_number'] if 'Full_name' in d: del d['Full_name'] if 'Monitor_name' in d: del d['Monitor_name'] if 'phonenumber' in d: del d['phonenumber'] if 'capturer' in d: del d['capturer'] if 'surveyor' in d: del d['surveyor'] if 'Monitor_Name' in d: del d['Monitor_Name'] if 'phone_number' in d: del d['phone_number'] - return d def export_row(answer, fields): obj = answer.answers for k in fields.keys(): try: fields[k] = obj[k] except KeyError: del fields[k] return fields
Make sure correct type is excluded
## Code Before: from collections import OrderedDict def form_questions(form): d = OrderedDict() children = form['children'] for child in children: if 'pathstr' in child and 'control' not in child: d.update({child['pathstr']: ''}) elif 'children' in child: for minor in child['children']: if 'pathstr' in minor: d.update({minor['pathstr']: ''}) if 'Contact_number' in d: del d['Contact_number'] if 'Full_name' in d: del d['Full_name'] if 'Monitor_name' in d: del d['Monitor_name'] if 'phonenumber' in d: del d['phonenumber'] if 'capturer' in d: del d['capturer'] if 'surveyor' in d: del d['surveyor'] if 'Monitor_Name' in d: del d['Monitor_Name'] if 'phone_number' in d: del d['phone_number'] return d def export_row(answer, fields): obj = answer.answers for k in fields.keys(): try: fields[k] = obj[k] except KeyError: del fields[k] return fields ## Instruction: Make sure correct type is excluded ## Code After: from collections import OrderedDict def form_questions(form): d = OrderedDict() children = form['children'] for child in children: if 'pathstr' in child and 'control' not in child and child['type'] != 'group': d.update({child['pathstr']: ''}) elif 'children' in child: for minor in child['children']: if 'pathstr' in minor: d.update({minor['pathstr']: ''}) if 'Contact_number' in d: del d['Contact_number'] if 'Full_name' in d: del d['Full_name'] if 'Monitor_name' in d: del d['Monitor_name'] if 'phonenumber' in d: del d['phonenumber'] if 'capturer' in d: del d['capturer'] if 'surveyor' in d: del d['surveyor'] if 'Monitor_Name' in d: del d['Monitor_Name'] if 'phone_number' in d: del d['phone_number'] return d def export_row(answer, fields): obj = answer.answers for k in fields.keys(): try: fields[k] = obj[k] except KeyError: del fields[k] return fields
from collections import OrderedDict def form_questions(form): d = OrderedDict() children = form['children'] for child in children: - if 'pathstr' in child and 'control' not in child: + if 'pathstr' in child and 'control' not in child and child['type'] != 'group': ? +++++++++++++++++++++++++++++ d.update({child['pathstr']: ''}) elif 'children' in child: for minor in child['children']: if 'pathstr' in minor: d.update({minor['pathstr']: ''}) if 'Contact_number' in d: del d['Contact_number'] if 'Full_name' in d: del d['Full_name'] if 'Monitor_name' in d: del d['Monitor_name'] if 'phonenumber' in d: del d['phonenumber'] if 'capturer' in d: del d['capturer'] if 'surveyor' in d: del d['surveyor'] if 'Monitor_Name' in d: del d['Monitor_Name'] if 'phone_number' in d: del d['phone_number'] - return d def export_row(answer, fields): obj = answer.answers for k in fields.keys(): try: fields[k] = obj[k] except KeyError: del fields[k] return fields
90ca340883077f57ba63127db058a8d244ec6f4c
molecule/ui/tests/conftest.py
molecule/ui/tests/conftest.py
import pytest from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions import time from webdriver_manager.chrome import ChromeDriverManager from webdriver_manager.utils import ChromeType @pytest.fixture(scope="session") def chromedriver(): try: options = Options() options.headless = True options.add_argument('--no-sandbox') options.add_argument('--disable-dev-shm-usage') options.add_argument("--disable-gpu") driver = webdriver.Chrome(ChromeDriverManager(chrome_type=ChromeType.CHROMIUM).install(), options=options) url = 'http://localhost:9000' driver.get(url + "/gettingstarted") WebDriverWait(driver, 30).until(expected_conditions.title_contains('Sign in')) #Login to Graylog uid_field = driver.find_element_by_name("username") uid_field.clear() uid_field.send_keys("admin") password_field = driver.find_element_by_name("password") password_field.clear() password_field.send_keys("admin") password_field.send_keys(Keys.RETURN) WebDriverWait(driver, 30).until(expected_conditions.title_contains('Getting started')) #Run tests yield driver finally: driver.quit()
import pytest from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions import time from webdriver_manager.chrome import ChromeDriverManager @pytest.fixture(scope="session") def chromedriver(): try: options = Options() options.headless = True options.add_argument('--no-sandbox') options.add_argument('--disable-dev-shm-usage') options.add_argument("--disable-gpu") driver = webdriver.Chrome(ChromeDriverManager().install(), options=options) url = 'http://localhost:9000' driver.get(url + "/gettingstarted") WebDriverWait(driver, 30).until(expected_conditions.title_contains('Sign in')) #Login to Graylog uid_field = driver.find_element_by_name("username") uid_field.clear() uid_field.send_keys("admin") password_field = driver.find_element_by_name("password") password_field.clear() password_field.send_keys("admin") password_field.send_keys(Keys.RETURN) WebDriverWait(driver, 30).until(expected_conditions.title_contains('Getting started')) #Run tests yield driver finally: driver.quit()
Switch UI tests back to google chrome.
Switch UI tests back to google chrome.
Python
apache-2.0
Graylog2/graylog-ansible-role
import pytest from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions import time from webdriver_manager.chrome import ChromeDriverManager - from webdriver_manager.utils import ChromeType @pytest.fixture(scope="session") def chromedriver(): try: options = Options() options.headless = True options.add_argument('--no-sandbox') options.add_argument('--disable-dev-shm-usage') options.add_argument("--disable-gpu") - driver = webdriver.Chrome(ChromeDriverManager(chrome_type=ChromeType.CHROMIUM).install(), options=options) + driver = webdriver.Chrome(ChromeDriverManager().install(), options=options) url = 'http://localhost:9000' driver.get(url + "/gettingstarted") WebDriverWait(driver, 30).until(expected_conditions.title_contains('Sign in')) #Login to Graylog uid_field = driver.find_element_by_name("username") uid_field.clear() uid_field.send_keys("admin") password_field = driver.find_element_by_name("password") password_field.clear() password_field.send_keys("admin") password_field.send_keys(Keys.RETURN) WebDriverWait(driver, 30).until(expected_conditions.title_contains('Getting started')) #Run tests yield driver finally: driver.quit()
Switch UI tests back to google chrome.
## Code Before: import pytest from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions import time from webdriver_manager.chrome import ChromeDriverManager from webdriver_manager.utils import ChromeType @pytest.fixture(scope="session") def chromedriver(): try: options = Options() options.headless = True options.add_argument('--no-sandbox') options.add_argument('--disable-dev-shm-usage') options.add_argument("--disable-gpu") driver = webdriver.Chrome(ChromeDriverManager(chrome_type=ChromeType.CHROMIUM).install(), options=options) url = 'http://localhost:9000' driver.get(url + "/gettingstarted") WebDriverWait(driver, 30).until(expected_conditions.title_contains('Sign in')) #Login to Graylog uid_field = driver.find_element_by_name("username") uid_field.clear() uid_field.send_keys("admin") password_field = driver.find_element_by_name("password") password_field.clear() password_field.send_keys("admin") password_field.send_keys(Keys.RETURN) WebDriverWait(driver, 30).until(expected_conditions.title_contains('Getting started')) #Run tests yield driver finally: driver.quit() ## Instruction: Switch UI tests back to google chrome. ## Code After: import pytest from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions import time from webdriver_manager.chrome import ChromeDriverManager @pytest.fixture(scope="session") def chromedriver(): try: options = Options() options.headless = True options.add_argument('--no-sandbox') options.add_argument('--disable-dev-shm-usage') options.add_argument("--disable-gpu") driver = webdriver.Chrome(ChromeDriverManager().install(), options=options) url = 'http://localhost:9000' driver.get(url + "/gettingstarted") WebDriverWait(driver, 30).until(expected_conditions.title_contains('Sign in')) #Login to Graylog uid_field = driver.find_element_by_name("username") uid_field.clear() uid_field.send_keys("admin") password_field = driver.find_element_by_name("password") password_field.clear() password_field.send_keys("admin") password_field.send_keys(Keys.RETURN) WebDriverWait(driver, 30).until(expected_conditions.title_contains('Getting started')) #Run tests yield driver finally: driver.quit()
import pytest from selenium import webdriver from selenium.webdriver.chrome.options import Options from selenium.webdriver.common.keys import Keys from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions import time from webdriver_manager.chrome import ChromeDriverManager - from webdriver_manager.utils import ChromeType @pytest.fixture(scope="session") def chromedriver(): try: options = Options() options.headless = True options.add_argument('--no-sandbox') options.add_argument('--disable-dev-shm-usage') options.add_argument("--disable-gpu") - driver = webdriver.Chrome(ChromeDriverManager(chrome_type=ChromeType.CHROMIUM).install(), options=options) ? ------------------------------- + driver = webdriver.Chrome(ChromeDriverManager().install(), options=options) url = 'http://localhost:9000' driver.get(url + "/gettingstarted") WebDriverWait(driver, 30).until(expected_conditions.title_contains('Sign in')) #Login to Graylog uid_field = driver.find_element_by_name("username") uid_field.clear() uid_field.send_keys("admin") password_field = driver.find_element_by_name("password") password_field.clear() password_field.send_keys("admin") password_field.send_keys(Keys.RETURN) WebDriverWait(driver, 30).until(expected_conditions.title_contains('Getting started')) #Run tests yield driver finally: driver.quit()
edd716204f1fc3337d46b74ed5708d5d0533f586
km3pipe/__init__.py
km3pipe/__init__.py
from __future__ import division, absolute_import, print_function try: __KM3PIPE_SETUP__ except NameError: __KM3PIPE_SETUP__ = False from km3pipe.__version__ import version, version_info # noqa if not __KM3PIPE_SETUP__: from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa Geometry, AanetGeometry) from km3pipe import io # noqa from km3pipe import utils # noqa from km3pipe import srv # noqa from km3pipe.srv import srv_event # noqa from km3pipe.io import GenericPump, read_hdf5 # noqa __author__ = "Tamas Gal and Moritz Lotze" __copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration." __credits__ = ["Thomas Heid"] __license__ = "MIT" __version__ = version __maintainer__ = "Tamas Gal and Moritz Lotze" __email__ = "tgal@km3net.de" __status__ = "Development"
from __future__ import division, absolute_import, print_function try: __KM3PIPE_SETUP__ except NameError: __KM3PIPE_SETUP__ = False from km3pipe.__version__ import version, version_info # noqa if not __KM3PIPE_SETUP__: from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa Geometry, AanetGeometry) from km3pipe import io # noqa from km3pipe import utils # noqa from km3pipe import srv # noqa from km3pipe.srv import srv_event # noqa from km3pipe.io import GenericPump, read_hdf5 # noqa import os mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle' __author__ = "Tamas Gal and Moritz Lotze" __copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration." __credits__ = ["Thomas Heid"] __license__ = "MIT" __version__ = version __maintainer__ = "Tamas Gal and Moritz Lotze" __email__ = "tgal@km3net.de" __status__ = "Development"
Use better name for matplotlib style
Use better name for matplotlib style
Python
mit
tamasgal/km3pipe,tamasgal/km3pipe
from __future__ import division, absolute_import, print_function try: __KM3PIPE_SETUP__ except NameError: __KM3PIPE_SETUP__ = False from km3pipe.__version__ import version, version_info # noqa if not __KM3PIPE_SETUP__: from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa Geometry, AanetGeometry) from km3pipe import io # noqa from km3pipe import utils # noqa from km3pipe import srv # noqa from km3pipe.srv import srv_event # noqa from km3pipe.io import GenericPump, read_hdf5 # noqa + import os + + mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle' + __author__ = "Tamas Gal and Moritz Lotze" __copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration." __credits__ = ["Thomas Heid"] __license__ = "MIT" __version__ = version __maintainer__ = "Tamas Gal and Moritz Lotze" __email__ = "tgal@km3net.de" __status__ = "Development"
Use better name for matplotlib style
## Code Before: from __future__ import division, absolute_import, print_function try: __KM3PIPE_SETUP__ except NameError: __KM3PIPE_SETUP__ = False from km3pipe.__version__ import version, version_info # noqa if not __KM3PIPE_SETUP__: from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa Geometry, AanetGeometry) from km3pipe import io # noqa from km3pipe import utils # noqa from km3pipe import srv # noqa from km3pipe.srv import srv_event # noqa from km3pipe.io import GenericPump, read_hdf5 # noqa __author__ = "Tamas Gal and Moritz Lotze" __copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration." __credits__ = ["Thomas Heid"] __license__ = "MIT" __version__ = version __maintainer__ = "Tamas Gal and Moritz Lotze" __email__ = "tgal@km3net.de" __status__ = "Development" ## Instruction: Use better name for matplotlib style ## Code After: from __future__ import division, absolute_import, print_function try: __KM3PIPE_SETUP__ except NameError: __KM3PIPE_SETUP__ = False from km3pipe.__version__ import version, version_info # noqa if not __KM3PIPE_SETUP__: from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa Geometry, AanetGeometry) from km3pipe import io # noqa from km3pipe import utils # noqa from km3pipe import srv # noqa from km3pipe.srv import srv_event # noqa from km3pipe.io import GenericPump, read_hdf5 # noqa import os mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle' __author__ = "Tamas Gal and Moritz Lotze" __copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration." __credits__ = ["Thomas Heid"] __license__ = "MIT" __version__ = version __maintainer__ = "Tamas Gal and Moritz Lotze" __email__ = "tgal@km3net.de" __status__ = "Development"
from __future__ import division, absolute_import, print_function try: __KM3PIPE_SETUP__ except NameError: __KM3PIPE_SETUP__ = False from km3pipe.__version__ import version, version_info # noqa if not __KM3PIPE_SETUP__: from km3pipe.core import (Pipeline, Module, Pump, Blob, Run, # noqa Geometry, AanetGeometry) from km3pipe import io # noqa from km3pipe import utils # noqa from km3pipe import srv # noqa from km3pipe.srv import srv_event # noqa from km3pipe.io import GenericPump, read_hdf5 # noqa + import os + + mplstyle = os.path.dirname(kp.__file__) + '/kp-data/km3pipe.mplstyle' + __author__ = "Tamas Gal and Moritz Lotze" __copyright__ = "Copyright 2016, Tamas Gal and the KM3NeT collaboration." __credits__ = ["Thomas Heid"] __license__ = "MIT" __version__ = version __maintainer__ = "Tamas Gal and Moritz Lotze" __email__ = "tgal@km3net.de" __status__ = "Development"
dd646b7573c1e2bb41f60723e02aa6ddf58d59f6
kobo/apps/help/permissions.py
kobo/apps/help/permissions.py
from rest_framework import permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif request.data.keys() == ['interactions']: # Allow any authenticated user to update their own interactions return True # Sorry, buddy. return False
from rest_framework import exceptions, permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif list(request.data) == ['interactions']: # Allow any authenticated user to update their own interactions return True else: formatted_fields = ', '.join( [f'`{x}`' for x in request.data.keys()] ) raise exceptions.PermissionDenied( detail=( 'You may update only `interactions`, but your request ' f'contained {formatted_fields}.' ) ) # Sorry, buddy. return False
Fix Python 2-to-3 bug in in-app messages
Fix Python 2-to-3 bug in in-app messages …so that the permission check for `PATCH`ing `interactions` does not always fail. Fixes #2762
Python
agpl-3.0
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
- from rest_framework import permissions + from rest_framework import exceptions, permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True - elif request.data.keys() == ['interactions']: + elif list(request.data) == ['interactions']: # Allow any authenticated user to update their own interactions return True + else: + formatted_fields = ', '.join( + [f'`{x}`' for x in request.data.keys()] + ) + raise exceptions.PermissionDenied( + detail=( + 'You may update only `interactions`, but your request ' + f'contained {formatted_fields}.' + ) + ) # Sorry, buddy. return False
Fix Python 2-to-3 bug in in-app messages
## Code Before: from rest_framework import permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif request.data.keys() == ['interactions']: # Allow any authenticated user to update their own interactions return True # Sorry, buddy. return False ## Instruction: Fix Python 2-to-3 bug in in-app messages ## Code After: from rest_framework import exceptions, permissions class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True elif list(request.data) == ['interactions']: # Allow any authenticated user to update their own interactions return True else: formatted_fields = ', '.join( [f'`{x}`' for x in request.data.keys()] ) raise exceptions.PermissionDenied( detail=( 'You may update only `interactions`, but your request ' f'contained {formatted_fields}.' ) ) # Sorry, buddy. return False
- from rest_framework import permissions + from rest_framework import exceptions, permissions ? ++++++++++++ class InAppMessagePermissions(permissions.BasePermission): def has_permission(self, request, view): if not request.user.is_authenticated: # Deny access to anonymous users return False if request.user.is_superuser: # Allow superusers to do anything return True if request.method in permissions.SAFE_METHODS: # Allow read-only access to any authenticated user return True elif request.method == 'PATCH': if not request.data: # A `PATCH` with no data is a check to see what's allowed, or # that's what the DRF "Browsable API" does, at least. We'll # wave it through for authenticated users return True - elif request.data.keys() == ['interactions']: ? ------ + elif list(request.data) == ['interactions']: ? +++++ # Allow any authenticated user to update their own interactions return True + else: + formatted_fields = ', '.join( + [f'`{x}`' for x in request.data.keys()] + ) + raise exceptions.PermissionDenied( + detail=( + 'You may update only `interactions`, but your request ' + f'contained {formatted_fields}.' + ) + ) # Sorry, buddy. return False
2181d63c279965e4e694cae508a236f51d66d49b
data_structures/bitorrent/server/announce/torrent.py
data_structures/bitorrent/server/announce/torrent.py
import struct import socket import time from trackpy.vendors.redis import redis class Torrent(object): def __init__(self, info_hash): self.info = redis.hgetall(info_hash) self.info_hash = info_hash def can_announce(self, peer_id): timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0) if not timestamp: return True now = int(time.time()) return False if now - timestamp > 5 * 60 else True def set_announce(self, peer_id): redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time())) @property def peers(self): return redis.smembers('%s_peers' % self.info_hash) @peers.setter def peers(self, peer): redis.sadd('%s_peers' % self.info_hash, peer) @property def seeders(self): return self.info['seeders'] if 'seeders' in self.info else [] @property def leechers(self): return self.info['leecher'] if 'leechers' in self.info else []
import struct import socket import time from trackpy.vendors.redis import redis class Torrent(object): def __init__(self, info_hash): self.info = redis.hgetall(info_hash) self.info_hash = info_hash def can_announce(self, peer_id): timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0) if not timestamp: return True now = int(time.time()) return False if now - timestamp > 5 * 60 else True def set_announce(self, peer_id): redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time())) @property def peers(self): return redis.smembers('%s_peers' % self.info_hash) @peers.setter def peers(self, peer): redis.sadd('%s_peers' % self.info_hash, peer) @property def seeders(self): return self.info['seeders'] if 'seeders' in self.info else 0 @property def leechers(self): return self.info['leecher'] if 'leechers' in self.info else 0 @property def binary_peers(self): binary_peers = '' for peer in self.peers: ip = peer.split(':')[0] port = peer.split(':')[1] ip = struct.unpack("!I", socket.inet_aton(ip))[0] binary_peers += struct.pack('!ih', ip, int(port)) return binary_peers
Implement binary representation of peers
Implement binary representation of peers
Python
apache-2.0
vtemian/university_projects,vtemian/university_projects,vtemian/university_projects
import struct import socket import time from trackpy.vendors.redis import redis class Torrent(object): def __init__(self, info_hash): self.info = redis.hgetall(info_hash) self.info_hash = info_hash def can_announce(self, peer_id): timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0) if not timestamp: return True now = int(time.time()) return False if now - timestamp > 5 * 60 else True def set_announce(self, peer_id): redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time())) @property def peers(self): return redis.smembers('%s_peers' % self.info_hash) @peers.setter def peers(self, peer): redis.sadd('%s_peers' % self.info_hash, peer) @property def seeders(self): - return self.info['seeders'] if 'seeders' in self.info else [] + return self.info['seeders'] if 'seeders' in self.info else 0 @property def leechers(self): - return self.info['leecher'] if 'leechers' in self.info else [] + return self.info['leecher'] if 'leechers' in self.info else 0 + @property + def binary_peers(self): + binary_peers = '' + for peer in self.peers: + ip = peer.split(':')[0] + port = peer.split(':')[1] + + ip = struct.unpack("!I", socket.inet_aton(ip))[0] + binary_peers += struct.pack('!ih', ip, int(port)) + return binary_peers +
Implement binary representation of peers
## Code Before: import struct import socket import time from trackpy.vendors.redis import redis class Torrent(object): def __init__(self, info_hash): self.info = redis.hgetall(info_hash) self.info_hash = info_hash def can_announce(self, peer_id): timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0) if not timestamp: return True now = int(time.time()) return False if now - timestamp > 5 * 60 else True def set_announce(self, peer_id): redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time())) @property def peers(self): return redis.smembers('%s_peers' % self.info_hash) @peers.setter def peers(self, peer): redis.sadd('%s_peers' % self.info_hash, peer) @property def seeders(self): return self.info['seeders'] if 'seeders' in self.info else [] @property def leechers(self): return self.info['leecher'] if 'leechers' in self.info else [] ## Instruction: Implement binary representation of peers ## Code After: import struct import socket import time from trackpy.vendors.redis import redis class Torrent(object): def __init__(self, info_hash): self.info = redis.hgetall(info_hash) self.info_hash = info_hash def can_announce(self, peer_id): timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0) if not timestamp: return True now = int(time.time()) return False if now - timestamp > 5 * 60 else True def set_announce(self, peer_id): redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time())) @property def peers(self): return redis.smembers('%s_peers' % self.info_hash) @peers.setter def peers(self, peer): redis.sadd('%s_peers' % self.info_hash, peer) @property def seeders(self): return self.info['seeders'] if 'seeders' in self.info else 0 @property def leechers(self): return self.info['leecher'] if 'leechers' in self.info else 0 @property def binary_peers(self): binary_peers = '' for peer in self.peers: ip = peer.split(':')[0] port = peer.split(':')[1] ip = struct.unpack("!I", socket.inet_aton(ip))[0] binary_peers += struct.pack('!ih', ip, int(port)) return binary_peers
import struct import socket import time from trackpy.vendors.redis import redis class Torrent(object): def __init__(self, info_hash): self.info = redis.hgetall(info_hash) self.info_hash = info_hash def can_announce(self, peer_id): timestamp = int(redis.get("%s_%s" % (self.info_hash, peer_id)) or 0) if not timestamp: return True now = int(time.time()) return False if now - timestamp > 5 * 60 else True def set_announce(self, peer_id): redis.set("%s_%s" % (self.info_hash, peer_id), int(time.time())) @property def peers(self): return redis.smembers('%s_peers' % self.info_hash) @peers.setter def peers(self, peer): redis.sadd('%s_peers' % self.info_hash, peer) @property def seeders(self): - return self.info['seeders'] if 'seeders' in self.info else [] ? ^^ + return self.info['seeders'] if 'seeders' in self.info else 0 ? ^ @property def leechers(self): - return self.info['leecher'] if 'leechers' in self.info else [] ? ^^ + return self.info['leecher'] if 'leechers' in self.info else 0 ? ^ + + @property + def binary_peers(self): + binary_peers = '' + for peer in self.peers: + ip = peer.split(':')[0] + port = peer.split(':')[1] + + ip = struct.unpack("!I", socket.inet_aton(ip))[0] + binary_peers += struct.pack('!ih', ip, int(port)) + return binary_peers
8258848f42142a9b539e3674e3cdaae2ffac09a1
app/main/views/root.py
app/main/views/root.py
import os import markdown import yaml from flask import render_template, render_template_string, session from .. import main from application import application @main.route('/', methods=['GET']) def root(): return render_template('index.html') @main.route('/patterns/') @main.route('/patterns/<pattern>') def patterns(pattern = 'index'): sections = [] pattern_name = 'grid-system' if (pattern == 'index') else pattern for root, dirs, files in os.walk('app/templates/patterns/components'): for file in files: if file.endswith('.html'): with open(os.path.join(root, file), 'r') as f: title = file.replace('.html', '') sections.append({ 'title': title, 'current': True if (title == pattern) else False }) return render_template('patterns/index.html', sections=sections, pattern_include='patterns/components/' + pattern_name + '.html', title=pattern)
import os import markdown import yaml from flask import render_template, render_template_string, session from .. import main from application import application @main.route('/', methods=['GET']) def root(): return render_template('index.html') @main.route('/patterns/') @main.route('/patterns/<pattern>') def patterns(pattern='index'): sections = [] pattern_name = 'grid-system' if (pattern == 'index') else pattern for root, dirs, files in os.walk('app/templates/patterns/components'): for file in files: if file.endswith('.html'): with open(os.path.join(root, file), 'r') as f: title = file.replace('.html', '') sections.append({ 'title': title, 'current': True if (title == pattern) else False }) return render_template('patterns/index.html', sections=sections, pattern_include='patterns/components/' + pattern_name + '.html', title=pattern)
Fix to pass python linter, this slipped through as the master branch wasnt protected
Fix to pass python linter, this slipped through as the master branch wasnt protected
Python
mit
ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner,ONSdigital/eq-survey-runner
import os import markdown import yaml from flask import render_template, render_template_string, session from .. import main from application import application + @main.route('/', methods=['GET']) def root(): return render_template('index.html') + @main.route('/patterns/') @main.route('/patterns/<pattern>') - def patterns(pattern = 'index'): + def patterns(pattern='index'): sections = [] pattern_name = 'grid-system' if (pattern == 'index') else pattern - for root, dirs, files in os.walk('app/templates/patterns/components'): for file in files: if file.endswith('.html'): - with open(os.path.join(root, file), 'r') as f: + with open(os.path.join(root, file), 'r') as f: - title = file.replace('.html', '') + title = file.replace('.html', '') - sections.append({ + sections.append({ - 'title': title, + 'title': title, - 'current': True if (title == pattern) else False + 'current': True if (title == pattern) else False - }) + }) - return render_template('patterns/index.html', sections=sections, pattern_include='patterns/components/' + pattern_name + '.html', title=pattern)
Fix to pass python linter, this slipped through as the master branch wasnt protected
## Code Before: import os import markdown import yaml from flask import render_template, render_template_string, session from .. import main from application import application @main.route('/', methods=['GET']) def root(): return render_template('index.html') @main.route('/patterns/') @main.route('/patterns/<pattern>') def patterns(pattern = 'index'): sections = [] pattern_name = 'grid-system' if (pattern == 'index') else pattern for root, dirs, files in os.walk('app/templates/patterns/components'): for file in files: if file.endswith('.html'): with open(os.path.join(root, file), 'r') as f: title = file.replace('.html', '') sections.append({ 'title': title, 'current': True if (title == pattern) else False }) return render_template('patterns/index.html', sections=sections, pattern_include='patterns/components/' + pattern_name + '.html', title=pattern) ## Instruction: Fix to pass python linter, this slipped through as the master branch wasnt protected ## Code After: import os import markdown import yaml from flask import render_template, render_template_string, session from .. import main from application import application @main.route('/', methods=['GET']) def root(): return render_template('index.html') @main.route('/patterns/') @main.route('/patterns/<pattern>') def patterns(pattern='index'): sections = [] pattern_name = 'grid-system' if (pattern == 'index') else pattern for root, dirs, files in os.walk('app/templates/patterns/components'): for file in files: if file.endswith('.html'): with open(os.path.join(root, file), 'r') as f: title = file.replace('.html', '') sections.append({ 'title': title, 'current': True if (title == pattern) else False }) return render_template('patterns/index.html', sections=sections, pattern_include='patterns/components/' + pattern_name + '.html', title=pattern)
import os import markdown import yaml from flask import render_template, render_template_string, session from .. import main from application import application + @main.route('/', methods=['GET']) def root(): return render_template('index.html') + @main.route('/patterns/') @main.route('/patterns/<pattern>') - def patterns(pattern = 'index'): ? - - + def patterns(pattern='index'): sections = [] pattern_name = 'grid-system' if (pattern == 'index') else pattern - for root, dirs, files in os.walk('app/templates/patterns/components'): for file in files: if file.endswith('.html'): - with open(os.path.join(root, file), 'r') as f: + with open(os.path.join(root, file), 'r') as f: ? ++ - title = file.replace('.html', '') + title = file.replace('.html', '') ? ++++ - sections.append({ + sections.append({ ? ++++ - 'title': title, + 'title': title, ? ++++++ - 'current': True if (title == pattern) else False + 'current': True if (title == pattern) else False ? ++++++ - }) + }) ? ++++ - return render_template('patterns/index.html', sections=sections, pattern_include='patterns/components/' + pattern_name + '.html', title=pattern)
1a39eea8225ebdf7f654df9ba5b87479e9dbc867
minify.py
minify.py
import sys import re args = sys.argv[1:] def minify(filepath, comments=False): """ Minifies/uglifies file :param file_: comments: Boolean. If False, deletes comments during output. :return: Minified string. """ pattern = re.compile(r""" \s | # matches all whitespace characters OR ( # /\* # /* [ \w\s (?=\*) :@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\) ]* # AND \*/ # */ ) # | //.*\n # OR any character from // until end-line (inclusive) """, re.VERBOSE) with open(filepath, "r") as file_: temp = [] for line in file_: temp.append(line) output = ''.join(temp) return pattern.sub('', output) if __name__ == "__main__": print(minify('./test/stylesheet.css'))
import sys import re args = sys.argv[1:] def minify(input_path, output_path, comments=False): """ Minifies/uglifies file args: input_path: input file path output_path: write-out file path comments: Boolean. If False, deletes comments during output. returns: Minified string. example: `$ python minify.py ./src/styles.css ./src/output.css` """ pattern = re.compile(r""" \s | # matches all whitespace characters OR ( # /\* # /* AND [ # 0 or more of any character \w\s # (?=\*) # (positive lookahead: doesn't make * part of the match) :@!"'~,#%&-=;<>` # \.\^\$\+\{\[\]\\\| # ]* # \*/ # AND */ ) # | //.*\n # OR any character from // until end-line (inclusive) """, re.VERBOSE) # read file and apply regex: with open(input_path, "r") as file_in: temp = [] for line in file_in: temp.append(line) output = ''.join(temp) output = pattern.sub('', output) # write to file: # (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit) with open(output_path, "w+") as file_out: file_out.write(output) ############################# # Main # ############################# if __name__ == "__main__": # specify input and output paths in args: minify(args[0], args[1])
Update regex. Add file write-out.
feat: Update regex. Add file write-out. Regex is bugged, however. Need to fix regex pattern.
Python
apache-2.0
Deesus/Punt
import sys import re args = sys.argv[1:] - def minify(filepath, comments=False): + def minify(input_path, output_path, comments=False): """ Minifies/uglifies file - :param - file_: + args: + input_path: input file path + output_path: write-out file path comments: Boolean. If False, deletes comments during output. - :return: + returns: Minified string. + example: + `$ python minify.py ./src/styles.css ./src/output.css` """ pattern = re.compile(r""" - \s | # matches all whitespace characters OR + \s | # matches all whitespace characters OR + ( # + /\* # /* AND + [ # 0 or more of any character - ( # + \w\s # + (?=\*) # (positive lookahead: doesn't make * part of the match) + :@!"'~,#%&-=;<>` # + \.\^\$\+\{\[\]\\\| # - /\* # /* - [ - \w\s - (?=\*) - :@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\) - ]* # AND - \*/ # */ - ) # + ]* # + \*/ # AND */ + ) # - | //.*\n # OR any character from // until end-line (inclusive) + | //.*\n # OR any character from // until end-line (inclusive) """, re.VERBOSE) + # read file and apply regex: - with open(filepath, "r") as file_: + with open(input_path, "r") as file_in: temp = [] - for line in file_: + for line in file_in: temp.append(line) output = ''.join(temp) - return pattern.sub('', output) + output = pattern.sub('', output) + # write to file: + # (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit) + with open(output_path, "w+") as file_out: + file_out.write(output) + ############################# + # Main # + ############################# if __name__ == "__main__": - print(minify('./test/stylesheet.css')) + # specify input and output paths in args: + minify(args[0], args[1])
Update regex. Add file write-out.
## Code Before: import sys import re args = sys.argv[1:] def minify(filepath, comments=False): """ Minifies/uglifies file :param file_: comments: Boolean. If False, deletes comments during output. :return: Minified string. """ pattern = re.compile(r""" \s | # matches all whitespace characters OR ( # /\* # /* [ \w\s (?=\*) :@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\) ]* # AND \*/ # */ ) # | //.*\n # OR any character from // until end-line (inclusive) """, re.VERBOSE) with open(filepath, "r") as file_: temp = [] for line in file_: temp.append(line) output = ''.join(temp) return pattern.sub('', output) if __name__ == "__main__": print(minify('./test/stylesheet.css')) ## Instruction: Update regex. Add file write-out. ## Code After: import sys import re args = sys.argv[1:] def minify(input_path, output_path, comments=False): """ Minifies/uglifies file args: input_path: input file path output_path: write-out file path comments: Boolean. If False, deletes comments during output. returns: Minified string. example: `$ python minify.py ./src/styles.css ./src/output.css` """ pattern = re.compile(r""" \s | # matches all whitespace characters OR ( # /\* # /* AND [ # 0 or more of any character \w\s # (?=\*) # (positive lookahead: doesn't make * part of the match) :@!"'~,#%&-=;<>` # \.\^\$\+\{\[\]\\\| # ]* # \*/ # AND */ ) # | //.*\n # OR any character from // until end-line (inclusive) """, re.VERBOSE) # read file and apply regex: with open(input_path, "r") as file_in: temp = [] for line in file_in: temp.append(line) output = ''.join(temp) output = pattern.sub('', output) # write to file: # (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit) with open(output_path, "w+") as file_out: file_out.write(output) ############################# # Main # ############################# if __name__ == "__main__": # specify input and output paths in args: minify(args[0], args[1])
import sys import re args = sys.argv[1:] - def minify(filepath, comments=False): ? - ^^ + def minify(input_path, output_path, comments=False): ? ^^^^^^^^^^^^^^^^^^ """ Minifies/uglifies file - :param - file_: + args: + input_path: input file path + output_path: write-out file path comments: Boolean. If False, deletes comments during output. - :return: ? - + returns: ? + Minified string. + example: + `$ python minify.py ./src/styles.css ./src/output.css` """ pattern = re.compile(r""" - \s | # matches all whitespace characters OR ? -- ^ + \s | # matches all whitespace characters OR ? ++++++++ ^^^ - ( # - /\* # /* - [ - \w\s - (?=\*) - :@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\) - ]* # AND - \*/ # */ - ) # ? ^ + ( # ? ++++++ ^^^ + /\* # /* AND + [ # 0 or more of any character + \w\s # + (?=\*) # (positive lookahead: doesn't make * part of the match) + :@!"'~,#%&-=;<>` # + \.\^\$\+\{\[\]\\\| # + ]* # + \*/ # AND */ + ) # - | //.*\n # OR any character from // until end-line (inclusive) ? ---- + | //.*\n # OR any character from // until end-line (inclusive) ? ++++++++++++ """, re.VERBOSE) + # read file and apply regex: - with open(filepath, "r") as file_: ? - ^^ + with open(input_path, "r") as file_in: ? ^^^^^ ++ temp = [] - for line in file_: + for line in file_in: ? ++ temp.append(line) output = ''.join(temp) - return pattern.sub('', output) ? ^^ ^^ + output = pattern.sub('', output) ? ^^ + ^^^ + # write to file: + # (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit) + with open(output_path, "w+") as file_out: + file_out.write(output) + ############################# + # Main # + ############################# if __name__ == "__main__": - print(minify('./test/stylesheet.css')) + # specify input and output paths in args: + minify(args[0], args[1])
c4de9152f34d2831d43dfa3769a7a6452bba5814
blockbuster/bb_security.py
blockbuster/bb_security.py
__author__ = 'matt' from blockbuster import bb_dbconnector_factory def credentials_are_valid(username, password): db = bb_dbconnector_factory.DBConnectorInterfaceFactory().create() print(username) result = db.api_username_exists(username) print (result) return result
__author__ = 'matt' from blockbuster import bb_dbconnector_factory def credentials_are_valid(username, password): db = bb_dbconnector_factory.DBConnectorInterfaceFactory().create() print(username) result = db.api_credentials_are_valid(username, password) print (result) return result
Update method to check both username and password
Update method to check both username and password
Python
mit
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
__author__ = 'matt' from blockbuster import bb_dbconnector_factory def credentials_are_valid(username, password): db = bb_dbconnector_factory.DBConnectorInterfaceFactory().create() print(username) - result = db.api_username_exists(username) + result = db.api_credentials_are_valid(username, password) print (result) return result
Update method to check both username and password
## Code Before: __author__ = 'matt' from blockbuster import bb_dbconnector_factory def credentials_are_valid(username, password): db = bb_dbconnector_factory.DBConnectorInterfaceFactory().create() print(username) result = db.api_username_exists(username) print (result) return result ## Instruction: Update method to check both username and password ## Code After: __author__ = 'matt' from blockbuster import bb_dbconnector_factory def credentials_are_valid(username, password): db = bb_dbconnector_factory.DBConnectorInterfaceFactory().create() print(username) result = db.api_credentials_are_valid(username, password) print (result) return result
__author__ = 'matt' from blockbuster import bb_dbconnector_factory def credentials_are_valid(username, password): db = bb_dbconnector_factory.DBConnectorInterfaceFactory().create() print(username) - result = db.api_username_exists(username) + result = db.api_credentials_are_valid(username, password) print (result) return result
1525d327adf76a37bdbd6b0b9f63308ad55c5dbc
setup.py
setup.py
from distutils.core import setup setup( name='django-databrowse', version='1.3', packages=['django_databrowse', 'django_databrowse.plugins'], package_dir={'django_databrowse': 'django_databrowse'}, package_data={ 'django_databrowse': [ 'templates/databrowse/*.html', 'templates/databrowse/include/*.html' ] }, provides=['django_databrowse'], include_package_data=True, url='http://pypi.python.org/pypi/django-databrowse', license=open('LICENSE').read(), author='Alireza Savand', author_email='alireza.savand@gmail.com', description='Databrowse is a Django application that lets you browse your data.', long_description=open('README.rst').read(), install_requires=['django', ], keywords=[ 'django', 'web', 'databrowse', 'data' ], platforms='OS Independent', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Framework :: Django', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development' ], )
from distutils.core import setup setup( name='django-databrowse', version='1.3', packages=['django_databrowse', 'django_databrowse.plugins'], package_dir={'django_databrowse': 'django_databrowse'}, package_data={ 'django_databrowse': [ 'templates/databrowse/*.html', 'templates/databrowse/include/*.html' ] }, provides=['django_databrowse'], include_package_data=True, url='https://github.com/Alir3z4/django-databrowse', license=open('LICENSE').read(), author='Alireza Savand', author_email='alireza.savand@gmail.com', description='Databrowse is a Django application that lets you browse your data.', long_description=open('README.rst').read(), install_requires=['django', ], keywords=[ 'django', 'web', 'databrowse', 'data' ], platforms='OS Independent', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Framework :: Django', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development' ], )
Change the pkg url to its github repo
Change the pkg url to its github repo
Python
bsd-3-clause
Alir3z4/django-databrowse,Alir3z4/django-databrowse
from distutils.core import setup setup( name='django-databrowse', version='1.3', packages=['django_databrowse', 'django_databrowse.plugins'], package_dir={'django_databrowse': 'django_databrowse'}, package_data={ 'django_databrowse': [ 'templates/databrowse/*.html', 'templates/databrowse/include/*.html' ] }, provides=['django_databrowse'], include_package_data=True, - url='http://pypi.python.org/pypi/django-databrowse', + url='https://github.com/Alir3z4/django-databrowse', license=open('LICENSE').read(), author='Alireza Savand', author_email='alireza.savand@gmail.com', description='Databrowse is a Django application that lets you browse your data.', long_description=open('README.rst').read(), install_requires=['django', ], keywords=[ 'django', 'web', 'databrowse', 'data' ], platforms='OS Independent', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Framework :: Django', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development' ], )
Change the pkg url to its github repo
## Code Before: from distutils.core import setup setup( name='django-databrowse', version='1.3', packages=['django_databrowse', 'django_databrowse.plugins'], package_dir={'django_databrowse': 'django_databrowse'}, package_data={ 'django_databrowse': [ 'templates/databrowse/*.html', 'templates/databrowse/include/*.html' ] }, provides=['django_databrowse'], include_package_data=True, url='http://pypi.python.org/pypi/django-databrowse', license=open('LICENSE').read(), author='Alireza Savand', author_email='alireza.savand@gmail.com', description='Databrowse is a Django application that lets you browse your data.', long_description=open('README.rst').read(), install_requires=['django', ], keywords=[ 'django', 'web', 'databrowse', 'data' ], platforms='OS Independent', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Framework :: Django', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development' ], ) ## Instruction: Change the pkg url to its github repo ## Code After: from distutils.core import setup setup( name='django-databrowse', version='1.3', packages=['django_databrowse', 'django_databrowse.plugins'], package_dir={'django_databrowse': 'django_databrowse'}, package_data={ 'django_databrowse': [ 'templates/databrowse/*.html', 'templates/databrowse/include/*.html' ] }, provides=['django_databrowse'], include_package_data=True, url='https://github.com/Alir3z4/django-databrowse', license=open('LICENSE').read(), author='Alireza Savand', author_email='alireza.savand@gmail.com', description='Databrowse is a Django application that lets you browse your data.', long_description=open('README.rst').read(), install_requires=['django', ], keywords=[ 'django', 'web', 'databrowse', 'data' ], platforms='OS Independent', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Framework :: Django', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development' ], )
from distutils.core import setup setup( name='django-databrowse', version='1.3', packages=['django_databrowse', 'django_databrowse.plugins'], package_dir={'django_databrowse': 'django_databrowse'}, package_data={ 'django_databrowse': [ 'templates/databrowse/*.html', 'templates/databrowse/include/*.html' ] }, provides=['django_databrowse'], include_package_data=True, - url='http://pypi.python.org/pypi/django-databrowse', + url='https://github.com/Alir3z4/django-databrowse', license=open('LICENSE').read(), author='Alireza Savand', author_email='alireza.savand@gmail.com', description='Databrowse is a Django application that lets you browse your data.', long_description=open('README.rst').read(), install_requires=['django', ], keywords=[ 'django', 'web', 'databrowse', 'data' ], platforms='OS Independent', classifiers=[ 'Development Status :: 5 - Production/Stable', 'Programming Language :: Python', 'Framework :: Django', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2.7', 'Topic :: Software Development' ], )
724d7235e546fb79009800700fd74328f8171b8c
src/etc/tidy.py
src/etc/tidy.py
import sys, fileinput, subprocess err=0 cols=78 try: result=subprocess.check_output([ "git", "config", "core.autocrlf" ]) autocrlf=result.strip() == b"true" except CalledProcessError: autocrlf=False def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") if not autocrlf and line.find('\r') != -1: report_err("CR character") line_len = len(line)-2 if autocrlf else len(line)-1 if line_len > cols: report_err("line longer than %d chars" % cols) sys.exit(err)
import sys, fileinput err=0 cols=78 def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") if line.find('\r') != -1: report_err("CR character") if len(line)-1 > cols: report_err("line longer than %d chars" % cols) sys.exit(err)
Revert "Don't complain about \r when core.autocrlf is on in Git"
Revert "Don't complain about \r when core.autocrlf is on in Git" This reverts commit 828afaa2fa4cc9e3e53bda0ae3073abfcfa151ca.
Python
apache-2.0
SiegeLord/rust,gifnksm/rust,kwantam/rust,defuz/rust,aidancully/rust,avdi/rust,sae-bom/rust,erickt/rust,pelmers/rust,avdi/rust,pythonesque/rust,Ryman/rust,carols10cents/rust,robertg/rust,aepsil0n/rust,kwantam/rust,jbclements/rust,aepsil0n/rust,kmcallister/rust,mihneadb/rust,andars/rust,pczarn/rust,pczarn/rust,krzysz00/rust,barosl/rust,richo/rust,omasanori/rust,LeoTestard/rust,rprichard/rust,0x73/rust,michaelballantyne/rust-gpu,sarojaba/rust-doc-korean,aneeshusa/rust,pelmers/rust,j16r/rust,P1start/rust,jroesch/rust,victorvde/rust,AerialX/rust,pshc/rust,SiegeLord/rust,kimroen/rust,mihneadb/rust,dinfuehr/rust,miniupnp/rust,gifnksm/rust,ktossell/rust,mdinger/rust,bombless/rust-docs-chinese,robertg/rust,michaelballantyne/rust-gpu,omasanori/rust,jashank/rust,KokaKiwi/rust,vhbit/rust,victorvde/rust,pythonesque/rust,seanrivera/rust,gifnksm/rust,servo/rust,j16r/rust,barosl/rust,P1start/rust,sae-bom/rust,TheNeikos/rust,AerialX/rust,michaelballantyne/rust-gpu,rohitjoshi/rust,robertg/rust,seanrivera/rust,l0kod/rust,michaelballantyne/rust-gpu,pshc/rust,vhbit/rust,hauleth/rust,aepsil0n/rust,pshc/rust,l0kod/rust,mdinger/rust,untitaker/rust,graydon/rust,aneeshusa/rust,ktossell/rust,defuz/rust,defuz/rust,aepsil0n/rust,pythonesque/rust,andars/rust,ebfull/rand,GBGamer/rust,KokaKiwi/rust,ktossell/rust,SiegeLord/rust,mihneadb/rust,barosl/rust,vhbit/rust,miniupnp/rust,reem/rust,aturon/rust,aturon/rust,Ryman/rust,carols10cents/rust,dwillmer/rust,GBGamer/rust,erickt/rust,quornian/rust,erickt/rust,P1start/rust,ebfull/rust,quornian/rust,ruud-v-a/rust,krzysz00/rust,omasanori/rust,aneeshusa/rust,vhbit/rust,cllns/rust,Ryman/rust,philyoon/rust,zaeleus/rust,mvdnes/rust,zaeleus/rust,jashank/rust,aneeshusa/rust,andars/rust,nwin/rust,kimroen/rust,jbclements/rust,miniupnp/rust,ejjeong/rust,XMPPwocky/rust,aturon/rust,nwin/rust,dwillmer/rust,kimroen/rust,kimroen/rust,nwin/rust,P1start/rust,quornian/rust,bombless/rust,aturon/rust,defuz/rust,barosl/rust,cllns/rust,jbclements/rust,ktossell/rust,j16r/rust,reem/rust,krzysz00/rust,richo/rust,ruud-v-a/rust,Ryman/rust,jroesch/rust,pczarn/rust,aturon/rust,XMPPwocky/rust,bombless/rust,TheNeikos/rust,pythonesque/rust,zachwick/rust,aneeshusa/rust,gifnksm/rust,sarojaba/rust-doc-korean,seanrivera/rust,AerialX/rust-rt-minimal,P1start/rust,KokaKiwi/rust,XMPPwocky/rust,dwillmer/rust,TheNeikos/rust,GBGamer/rust,waynenilsen/rand,servo/rust,aturon/rust,ruud-v-a/rust,kimroen/rust,aidancully/rust,emk/rust,AerialX/rust-rt-minimal,zachwick/rust,zubron/rust,mitsuhiko/rust,servo/rust,krzysz00/rust,miniupnp/rust,zubron/rust,robertg/rust,LeoTestard/rust,pczarn/rust,reem/rust,erickt/rust,zaeleus/rust,rprichard/rust,seanrivera/rust,omasanori/rust,AerialX/rust,fabricedesre/rust,jroesch/rust,philyoon/rust,mvdnes/rust,stepancheg/rust-ide-rust,AerialX/rust,LeoTestard/rust,stepancheg/rust-ide-rust,omasanori/rust,stepancheg/rust-ide-rust,GrahamDennis/rand,victorvde/rust,krzysz00/rust,hauleth/rust,nham/rust,jashank/rust,graydon/rust,jbclements/rust,servo/rust,jbclements/rust,ebfull/rust,servo/rust,pelmers/rust,krzysz00/rust,LeoTestard/rust,GBGamer/rust,sarojaba/rust-doc-korean,dwillmer/rust,stepancheg/rust-ide-rust,hauleth/rust,fabricedesre/rust,0x73/rust,jroesch/rust,mvdnes/rust,ruud-v-a/rust,michaelballantyne/rust-gpu,cllns/rust,miniupnp/rust,kwantam/rust,miniupnp/rust,untitaker/rust,richo/rust,hauleth/rust,mahkoh/rust,kimroen/rust,mahkoh/rust,jashank/rust,graydon/rust,mdinger/rust,defuz/rust,AerialX/rust-rt-minimal,aidancully/rust,LeoTestard/rust,kwantam/rust,quornian/rust,pelmers/rust,erickt/rust,avdi/rust,bhickey/rand,stepancheg/rust-ide-rust,carols10cents/rust,huonw/rand,cllns/rust,graydon/rust,andars/rust,kmcallister/rust,zubron/rust,dwillmer/rust,cllns/rust,vhbit/rust,zaeleus/rust,XMPPwocky/rust,cllns/rust,mahkoh/rust,aidancully/rust,mahkoh/rust,stepancheg/rust-ide-rust,jashank/rust,P1start/rust,GBGamer/rust,untitaker/rust,emk/rust,mihneadb/rust,SiegeLord/rust,bombless/rust,pczarn/rust,bombless/rust,carols10cents/rust,stepancheg/rust-ide-rust,jroesch/rust,jbclements/rust,emk/rust,kwantam/rust,j16r/rust,robertg/rust,seanrivera/rust,SiegeLord/rust,Ryman/rust,jroesch/rust,mvdnes/rust,aidancully/rust,richo/rust,untitaker/rust,hauleth/rust,0x73/rust,aepsil0n/rust,kmcallister/rust,AerialX/rust,emk/rust,pelmers/rust,emk/rust,zubron/rust,jbclements/rust,0x73/rust,LeoTestard/rust,dwillmer/rust,fabricedesre/rust,emk/rust,retep998/rand,jbclements/rust,GBGamer/rust,untitaker/rust,kmcallister/rust,jbclements/rust,jashank/rust,pythonesque/rust,AerialX/rust-rt-minimal,l0kod/rust,reem/rust,mitsuhiko/rust,nwin/rust,mitsuhiko/rust,kimroen/rust,rohitjoshi/rust,kmcallister/rust,AerialX/rust,erickt/rust,bombless/rust,bombless/rust,rprichard/rust,omasanori/rust,sarojaba/rust-doc-korean,achanda/rand,sae-bom/rust,andars/rust,nham/rust,kmcallister/rust,mitsuhiko/rust,ejjeong/rust,graydon/rust,philyoon/rust,zachwick/rust,ktossell/rust,victorvde/rust,dinfuehr/rust,AerialX/rust-rt-minimal,zachwick/rust,pelmers/rust,avdi/rust,pythonesque/rust,dinfuehr/rust,GBGamer/rust,mvdnes/rust,rohitjoshi/rust,quornian/rust,TheNeikos/rust,barosl/rust,pshc/rust,l0kod/rust,carols10cents/rust,mitsuhiko/rust,aturon/rust,P1start/rust,GBGamer/rust,defuz/rust,zaeleus/rust,nham/rust,ejjeong/rust,pythonesque/rust,reem/rust,jashank/rust,Ryman/rust,hauleth/rust,zaeleus/rust,SiegeLord/rust,barosl/rust,jashank/rust,servo/rust,sarojaba/rust-doc-korean,kmcallister/rust,aidancully/rust,mdinger/rust,nham/rust,philyoon/rust,servo/rust,SiegeLord/rust,jroesch/rust,KokaKiwi/rust,ktossell/rust,mdinger/rust,nwin/rust,untitaker/rust,avdi/rust,andars/rust,robertg/rust,sarojaba/rust-doc-korean,nham/rust,mitsuhiko/rust,dinfuehr/rust,j16r/rust,sae-bom/rust,mdinger/rust,philyoon/rust,pshc/rust,sae-bom/rust,nham/rust,Ryman/rust,zubron/rust,rprichard/rust,ejjeong/rust,mahkoh/rust,erickt/rust,ruud-v-a/rust,emk/rust,dwillmer/rust,mihneadb/rust,rohitjoshi/rust,pczarn/rust,sarojaba/rust-doc-korean,j16r/rust,TheNeikos/rust,AerialX/rust-rt-minimal,XMPPwocky/rust,dinfuehr/rust,nwin/rust,seanrivera/rust,0x73/rust,rprichard/rust,zubron/rust,shepmaster/rand,miniupnp/rust,l0kod/rust,KokaKiwi/rust,l0kod/rust,arthurprs/rand,0x73/rust,gifnksm/rust,mahkoh/rust,l0kod/rust,quornian/rust,KokaKiwi/rust,sae-bom/rust,nham/rust,fabricedesre/rust,richo/rust,pshc/rust,richo/rust,zachwick/rust,pshc/rust,fabricedesre/rust,ebfull/rust,barosl/rust,kwantam/rust,XMPPwocky/rust,nwin/rust,j16r/rust,reem/rust,victorvde/rust,ebfull/rust,rohitjoshi/rust,ejjeong/rust,gifnksm/rust,nwin/rust,miniupnp/rust,ebfull/rust,mitsuhiko/rust,philyoon/rust,l0kod/rust,vhbit/rust,jroesch/rust,rohitjoshi/rust,michaelballantyne/rust-gpu,zachwick/rust,pshc/rust,zubron/rust,bluss/rand,pczarn/rust,vhbit/rust,mihneadb/rust,ktossell/rust,michaelballantyne/rust-gpu,zubron/rust,dwillmer/rust,dinfuehr/rust,fabricedesre/rust,aneeshusa/rust,TheNeikos/rust,rprichard/rust,LeoTestard/rust,ejjeong/rust,carols10cents/rust,mvdnes/rust,graydon/rust,aepsil0n/rust,avdi/rust,fabricedesre/rust,ebfull/rust,victorvde/rust,0x73/rust,quornian/rust,ruud-v-a/rust,vhbit/rust
- import sys, fileinput, subprocess + import sys, fileinput err=0 cols=78 - - try: - result=subprocess.check_output([ "git", "config", "core.autocrlf" ]) - autocrlf=result.strip() == b"true" - except CalledProcessError: - autocrlf=False def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") - if not autocrlf and line.find('\r') != -1: + if line.find('\r') != -1: report_err("CR character") - line_len = len(line)-2 if autocrlf else len(line)-1 - if line_len > cols: + if len(line)-1 > cols: report_err("line longer than %d chars" % cols) sys.exit(err)
Revert "Don't complain about \r when core.autocrlf is on in Git"
## Code Before: import sys, fileinput, subprocess err=0 cols=78 try: result=subprocess.check_output([ "git", "config", "core.autocrlf" ]) autocrlf=result.strip() == b"true" except CalledProcessError: autocrlf=False def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") if not autocrlf and line.find('\r') != -1: report_err("CR character") line_len = len(line)-2 if autocrlf else len(line)-1 if line_len > cols: report_err("line longer than %d chars" % cols) sys.exit(err) ## Instruction: Revert "Don't complain about \r when core.autocrlf is on in Git" ## Code After: import sys, fileinput err=0 cols=78 def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") if line.find('\r') != -1: report_err("CR character") if len(line)-1 > cols: report_err("line longer than %d chars" % cols) sys.exit(err)
- import sys, fileinput, subprocess ? ------------ + import sys, fileinput err=0 cols=78 - - try: - result=subprocess.check_output([ "git", "config", "core.autocrlf" ]) - autocrlf=result.strip() == b"true" - except CalledProcessError: - autocrlf=False def report_err(s): global err print("%s:%d: %s" % (fileinput.filename(), fileinput.filelineno(), s)) err=1 for line in fileinput.input(openhook=fileinput.hook_encoded("utf-8")): if line.find('\t') != -1 and fileinput.filename().find("Makefile") == -1: report_err("tab character") - if not autocrlf and line.find('\r') != -1: ? ----------------- + if line.find('\r') != -1: report_err("CR character") - line_len = len(line)-2 if autocrlf else len(line)-1 - if line_len > cols: ? ^^^^ + if len(line)-1 > cols: ? ++++ ^^^ report_err("line longer than %d chars" % cols) sys.exit(err)
d89715196ba79da02a997688414dfa283bee5aeb
profiles/tests/test_views.py
profiles/tests/test_views.py
from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import RequestFactory from utils.factories import UserFactory from profiles.views import ProfileView class ProfileViewTests(TestCase): def setUp(self): request_factory = RequestFactory() request = request_factory.get(reverse('profile')) request.user = UserFactory.create() self.response = ProfileView.as_view()(request) def test_profile_view_200(self): self.assertEqual(self.response.status_code, 200) def test_profile_view_renders(self): self.response.render()
from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import RequestFactory from utils.factories import UserFactory from profiles.views import ( ProfileView, ReviewUserView, ) class ProfileViewTests(TestCase): def setUp(self): request_factory = RequestFactory() request = request_factory.get(reverse('profile')) request.user = UserFactory.create() self.response = ProfileView.as_view()(request) def test_profile_view_200(self): self.assertEqual(self.response.status_code, 200) def test_profile_view_renders(self): self.response.render() class ReviewUserViewTests(TestCase): def setUp(self): request_factory = RequestFactory() self.request = request_factory.get('/admin/dashboard/') def test_review_user_view_200(self): user = UserFactory.create() user.is_staff = True self.request.user = user response = ReviewUserView.as_view()(self.request) self.assertEqual(response.status_code, 200) def test_review_user_view_200(self): user = UserFactory.create() user.is_staff = True self.request.user = user response = ReviewUserView.as_view()(self.request) response.render() def test_review_user_view_not_staff(self): user = UserFactory.create() self.request.user = user response = ReviewUserView.as_view()(self.request) self.assertEqual(response.status_code, 302)
Add tests for user review view
Add tests for user review view
Python
mit
phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts,phildini/logtacts
from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import RequestFactory from utils.factories import UserFactory - from profiles.views import ProfileView + from profiles.views import ( + ProfileView, + ReviewUserView, + ) class ProfileViewTests(TestCase): def setUp(self): request_factory = RequestFactory() request = request_factory.get(reverse('profile')) request.user = UserFactory.create() self.response = ProfileView.as_view()(request) def test_profile_view_200(self): self.assertEqual(self.response.status_code, 200) def test_profile_view_renders(self): self.response.render() + + class ReviewUserViewTests(TestCase): + + def setUp(self): + request_factory = RequestFactory() + self.request = request_factory.get('/admin/dashboard/') + + def test_review_user_view_200(self): + user = UserFactory.create() + user.is_staff = True + self.request.user = user + response = ReviewUserView.as_view()(self.request) + self.assertEqual(response.status_code, 200) + + def test_review_user_view_200(self): + user = UserFactory.create() + user.is_staff = True + self.request.user = user + response = ReviewUserView.as_view()(self.request) + response.render() + + def test_review_user_view_not_staff(self): + user = UserFactory.create() + self.request.user = user + response = ReviewUserView.as_view()(self.request) + self.assertEqual(response.status_code, 302) +
Add tests for user review view
## Code Before: from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import RequestFactory from utils.factories import UserFactory from profiles.views import ProfileView class ProfileViewTests(TestCase): def setUp(self): request_factory = RequestFactory() request = request_factory.get(reverse('profile')) request.user = UserFactory.create() self.response = ProfileView.as_view()(request) def test_profile_view_200(self): self.assertEqual(self.response.status_code, 200) def test_profile_view_renders(self): self.response.render() ## Instruction: Add tests for user review view ## Code After: from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import RequestFactory from utils.factories import UserFactory from profiles.views import ( ProfileView, ReviewUserView, ) class ProfileViewTests(TestCase): def setUp(self): request_factory = RequestFactory() request = request_factory.get(reverse('profile')) request.user = UserFactory.create() self.response = ProfileView.as_view()(request) def test_profile_view_200(self): self.assertEqual(self.response.status_code, 200) def test_profile_view_renders(self): self.response.render() class ReviewUserViewTests(TestCase): def setUp(self): request_factory = RequestFactory() self.request = request_factory.get('/admin/dashboard/') def test_review_user_view_200(self): user = UserFactory.create() user.is_staff = True self.request.user = user response = ReviewUserView.as_view()(self.request) self.assertEqual(response.status_code, 200) def test_review_user_view_200(self): user = UserFactory.create() user.is_staff = True self.request.user = user response = ReviewUserView.as_view()(self.request) response.render() def test_review_user_view_not_staff(self): user = UserFactory.create() self.request.user = user response = ReviewUserView.as_view()(self.request) self.assertEqual(response.status_code, 302)
from django.core.urlresolvers import reverse from django.test import TestCase from django.test.client import RequestFactory from utils.factories import UserFactory - from profiles.views import ProfileView ? ^^^^^^^^^^^ + from profiles.views import ( ? ^ + ProfileView, + ReviewUserView, + ) class ProfileViewTests(TestCase): def setUp(self): request_factory = RequestFactory() request = request_factory.get(reverse('profile')) request.user = UserFactory.create() self.response = ProfileView.as_view()(request) def test_profile_view_200(self): self.assertEqual(self.response.status_code, 200) def test_profile_view_renders(self): self.response.render() + + + class ReviewUserViewTests(TestCase): + + def setUp(self): + request_factory = RequestFactory() + self.request = request_factory.get('/admin/dashboard/') + + def test_review_user_view_200(self): + user = UserFactory.create() + user.is_staff = True + self.request.user = user + response = ReviewUserView.as_view()(self.request) + self.assertEqual(response.status_code, 200) + + def test_review_user_view_200(self): + user = UserFactory.create() + user.is_staff = True + self.request.user = user + response = ReviewUserView.as_view()(self.request) + response.render() + + def test_review_user_view_not_staff(self): + user = UserFactory.create() + self.request.user = user + response = ReviewUserView.as_view()(self.request) + self.assertEqual(response.status_code, 302)
87bb90370b8d7439989072ae17634dd30276f24c
yanico/config.py
yanico/config.py
"""Handle yanico configuration.""" import configparser import os.path CONFIG_FILENAME = '.yanico.conf' def user_path(): """Return user configuration filepath. The filepath depends home directory and CONFIG_FILENAME constants. """ return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME) def load(*filepaths): parser = configparser.ConfigParser() parser.read((user_path(),) + filepaths) return parser
"""Handle yanico configuration.""" import configparser import os.path CONFIG_FILENAME = '.yanico.conf' def user_path(): """Return user configuration filepath. The filepath depends home directory and CONFIG_FILENAME constants. """ return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME) def load(*filepaths): """Return configration object. Object parses home directory config file. Args: filepaths (Tuple[str]): configuration file paths Returns: ConfigParser: object expects some configurations are loaded. """ parser = configparser.ConfigParser() parser.read((user_path(),) + filepaths) return parser
Add docstring into load function
Add docstring into load function Describe which file parse at least.
Python
apache-2.0
ma8ma/yanico
"""Handle yanico configuration.""" import configparser import os.path CONFIG_FILENAME = '.yanico.conf' def user_path(): """Return user configuration filepath. The filepath depends home directory and CONFIG_FILENAME constants. """ return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME) def load(*filepaths): + """Return configration object. + + Object parses home directory config file. + + Args: + filepaths (Tuple[str]): configuration file paths + + Returns: + ConfigParser: object expects some configurations are loaded. + """ parser = configparser.ConfigParser() parser.read((user_path(),) + filepaths) return parser
Add docstring into load function
## Code Before: """Handle yanico configuration.""" import configparser import os.path CONFIG_FILENAME = '.yanico.conf' def user_path(): """Return user configuration filepath. The filepath depends home directory and CONFIG_FILENAME constants. """ return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME) def load(*filepaths): parser = configparser.ConfigParser() parser.read((user_path(),) + filepaths) return parser ## Instruction: Add docstring into load function ## Code After: """Handle yanico configuration.""" import configparser import os.path CONFIG_FILENAME = '.yanico.conf' def user_path(): """Return user configuration filepath. The filepath depends home directory and CONFIG_FILENAME constants. """ return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME) def load(*filepaths): """Return configration object. Object parses home directory config file. Args: filepaths (Tuple[str]): configuration file paths Returns: ConfigParser: object expects some configurations are loaded. """ parser = configparser.ConfigParser() parser.read((user_path(),) + filepaths) return parser
"""Handle yanico configuration.""" import configparser import os.path CONFIG_FILENAME = '.yanico.conf' def user_path(): """Return user configuration filepath. The filepath depends home directory and CONFIG_FILENAME constants. """ return os.path.join(os.path.expanduser('~'), CONFIG_FILENAME) def load(*filepaths): + """Return configration object. + + Object parses home directory config file. + + Args: + filepaths (Tuple[str]): configuration file paths + + Returns: + ConfigParser: object expects some configurations are loaded. + """ parser = configparser.ConfigParser() parser.read((user_path(),) + filepaths) return parser
efaa172668b8961734fa8a10650dc3191b4a7348
website/project/metadata/authorizers/__init__.py
website/project/metadata/authorizers/__init__.py
import json import os import logging logger = logging.getLogger(__name__) HERE = os.path.dirname(os.path.realpath(__file__)) groups = json.load(open('{0}/defaults.json'.format(HERE))) try: fp = open('{0}/local.json'.format(HERE)) except IOError: logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.') for group, members in json.load(fp).iteritems(): if group not in groups: groups[group] = members else: groups[group] = set(groups[group]) | set(members) def members_for(group): global_members = set(groups['global']) return global_members | set(groups.get(group, []))
import json import os import logging logger = logging.getLogger(__name__) HERE = os.path.dirname(os.path.realpath(__file__)) groups = json.load(open('{0}/defaults.json'.format(HERE))) fp = None try: fp = open('{0}/local.json'.format(HERE)) except IOError: logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.') if fp: for group, members in json.load(fp).iteritems(): if group not in groups: groups[group] = members else: groups[group] = set(groups[group]) | set(members) def members_for(group): global_members = set(groups['global']) return global_members | set(groups.get(group, []))
Allow local.json to be missing
Allow local.json to be missing
Python
apache-2.0
kch8qx/osf.io,acshi/osf.io,binoculars/osf.io,abought/osf.io,mluo613/osf.io,cslzchen/osf.io,chrisseto/osf.io,ticklemepierce/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,kwierman/osf.io,brandonPurvis/osf.io,icereval/osf.io,TomBaxter/osf.io,doublebits/osf.io,mluke93/osf.io,wearpants/osf.io,alexschiller/osf.io,billyhunt/osf.io,danielneis/osf.io,rdhyee/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,Ghalko/osf.io,mattclark/osf.io,rdhyee/osf.io,kwierman/osf.io,cwisecarver/osf.io,mluke93/osf.io,Johnetordoff/osf.io,GageGaskins/osf.io,abought/osf.io,RomanZWang/osf.io,acshi/osf.io,alexschiller/osf.io,cslzchen/osf.io,GageGaskins/osf.io,SSJohns/osf.io,KAsante95/osf.io,hmoco/osf.io,saradbowman/osf.io,adlius/osf.io,zamattiac/osf.io,binoculars/osf.io,monikagrabowska/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,emetsger/osf.io,SSJohns/osf.io,brianjgeiger/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,samanehsan/osf.io,KAsante95/osf.io,emetsger/osf.io,TomBaxter/osf.io,monikagrabowska/osf.io,danielneis/osf.io,samanehsan/osf.io,mfraezz/osf.io,chennan47/osf.io,kch8qx/osf.io,GageGaskins/osf.io,kwierman/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,acshi/osf.io,Nesiehr/osf.io,caseyrollins/osf.io,GageGaskins/osf.io,doublebits/osf.io,sloria/osf.io,adlius/osf.io,mluke93/osf.io,samanehsan/osf.io,samchrisinger/osf.io,jnayak1/osf.io,jnayak1/osf.io,samchrisinger/osf.io,TomHeatwole/osf.io,binoculars/osf.io,samchrisinger/osf.io,billyhunt/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,doublebits/osf.io,RomanZWang/osf.io,KAsante95/osf.io,icereval/osf.io,amyshi188/osf.io,aaxelb/osf.io,leb2dg/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,zamattiac/osf.io,chrisseto/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,brandonPurvis/osf.io,zachjanicki/osf.io,alexschiller/osf.io,mluo613/osf.io,TomBaxter/osf.io,hmoco/osf.io,chrisseto/osf.io,billyhunt/osf.io,cwisecarver/osf.io,mluo613/osf.io,emetsger/osf.io,Nesiehr/osf.io,Ghalko/osf.io,HalcyonChimera/osf.io,adlius/osf.io,KAsante95/osf.io,felliott/osf.io,aaxelb/osf.io,caneruguz/osf.io,ticklemepierce/osf.io,leb2dg/osf.io,felliott/osf.io,caseyrollins/osf.io,danielneis/osf.io,cslzchen/osf.io,chennan47/osf.io,Nesiehr/osf.io,asanfilippo7/osf.io,crcresearch/osf.io,rdhyee/osf.io,baylee-d/osf.io,alexschiller/osf.io,samanehsan/osf.io,jnayak1/osf.io,danielneis/osf.io,amyshi188/osf.io,DanielSBrown/osf.io,HalcyonChimera/osf.io,wearpants/osf.io,mluo613/osf.io,baylee-d/osf.io,crcresearch/osf.io,mluo613/osf.io,aaxelb/osf.io,laurenrevere/osf.io,erinspace/osf.io,kch8qx/osf.io,chrisseto/osf.io,chennan47/osf.io,doublebits/osf.io,monikagrabowska/osf.io,pattisdr/osf.io,laurenrevere/osf.io,caseyrollins/osf.io,icereval/osf.io,abought/osf.io,emetsger/osf.io,TomHeatwole/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,brianjgeiger/osf.io,erinspace/osf.io,caneruguz/osf.io,amyshi188/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,acshi/osf.io,hmoco/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,zamattiac/osf.io,billyhunt/osf.io,kwierman/osf.io,amyshi188/osf.io,rdhyee/osf.io,Johnetordoff/osf.io,erinspace/osf.io,zachjanicki/osf.io,kch8qx/osf.io,doublebits/osf.io,mfraezz/osf.io,GageGaskins/osf.io,brandonPurvis/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,samchrisinger/osf.io,mfraezz/osf.io,hmoco/osf.io,RomanZWang/osf.io,wearpants/osf.io,brandonPurvis/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,mattclark/osf.io,leb2dg/osf.io,DanielSBrown/osf.io,pattisdr/osf.io,jnayak1/osf.io,mluke93/osf.io,asanfilippo7/osf.io,adlius/osf.io,cwisecarver/osf.io,mfraezz/osf.io,Nesiehr/osf.io,caneruguz/osf.io,zachjanicki/osf.io,SSJohns/osf.io,felliott/osf.io,TomHeatwole/osf.io,mattclark/osf.io,Ghalko/osf.io,cwisecarver/osf.io,ticklemepierce/osf.io,abought/osf.io,ticklemepierce/osf.io,sloria/osf.io,billyhunt/osf.io,felliott/osf.io,SSJohns/osf.io,Ghalko/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,baylee-d/osf.io,aaxelb/osf.io,saradbowman/osf.io,monikagrabowska/osf.io,kch8qx/osf.io,HalcyonChimera/osf.io
import json import os import logging logger = logging.getLogger(__name__) HERE = os.path.dirname(os.path.realpath(__file__)) groups = json.load(open('{0}/defaults.json'.format(HERE))) + fp = None try: fp = open('{0}/local.json'.format(HERE)) except IOError: logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.') + if fp: - for group, members in json.load(fp).iteritems(): + for group, members in json.load(fp).iteritems(): - if group not in groups: + if group not in groups: - groups[group] = members + groups[group] = members - else: + else: - groups[group] = set(groups[group]) | set(members) + groups[group] = set(groups[group]) | set(members) def members_for(group): global_members = set(groups['global']) return global_members | set(groups.get(group, []))
Allow local.json to be missing
## Code Before: import json import os import logging logger = logging.getLogger(__name__) HERE = os.path.dirname(os.path.realpath(__file__)) groups = json.load(open('{0}/defaults.json'.format(HERE))) try: fp = open('{0}/local.json'.format(HERE)) except IOError: logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.') for group, members in json.load(fp).iteritems(): if group not in groups: groups[group] = members else: groups[group] = set(groups[group]) | set(members) def members_for(group): global_members = set(groups['global']) return global_members | set(groups.get(group, [])) ## Instruction: Allow local.json to be missing ## Code After: import json import os import logging logger = logging.getLogger(__name__) HERE = os.path.dirname(os.path.realpath(__file__)) groups = json.load(open('{0}/defaults.json'.format(HERE))) fp = None try: fp = open('{0}/local.json'.format(HERE)) except IOError: logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.') if fp: for group, members in json.load(fp).iteritems(): if group not in groups: groups[group] = members else: groups[group] = set(groups[group]) | set(members) def members_for(group): global_members = set(groups['global']) return global_members | set(groups.get(group, []))
import json import os import logging logger = logging.getLogger(__name__) HERE = os.path.dirname(os.path.realpath(__file__)) groups = json.load(open('{0}/defaults.json'.format(HERE))) + fp = None try: fp = open('{0}/local.json'.format(HERE)) except IOError: logger.info('No local.json found to populate lists of DraftRegistrationApproval authorizers.') + if fp: - for group, members in json.load(fp).iteritems(): + for group, members in json.load(fp).iteritems(): ? ++++ - if group not in groups: + if group not in groups: ? ++++ - groups[group] = members + groups[group] = members ? ++++ - else: + else: ? ++++ - groups[group] = set(groups[group]) | set(members) + groups[group] = set(groups[group]) | set(members) ? ++++ def members_for(group): global_members = set(groups['global']) return global_members | set(groups.get(group, []))
62ad2eb82c037350f25d3e575e59f16740365159
pies/ast.py
pies/ast.py
from __future__ import absolute_import from ast import * from .version_info import PY2 if PY2: Try = TryExcept def argument_names(node): return [isinstance(arg, Name) and arg.id or None for arg in node.args.args] def kw_only_argument_names(node): return [] def kw_only_default_count(node): return 0 else: TryFinally = () def argument_names(node): return [arg.arg for arg in node.args.args] def kw_only_argument_names(node): return [arg.arg for arg in node.args.kwonlyargs] def kw_only_default_count(node): return sum(1 for n in node.args.kw_defaults if n is not None)
from __future__ import absolute_import import sys from ast import * from .version_info import PY2 if PY2 or sys.version_info[1] <= 2: Try = TryExcept else: TryFinally = () if PY2: def argument_names(node): return [isinstance(arg, Name) and arg.id or None for arg in node.args.args] def kw_only_argument_names(node): return [] def kw_only_default_count(node): return 0 else: def argument_names(node): return [arg.arg for arg in node.args.args] def kw_only_argument_names(node): return [arg.arg for arg in node.args.kwonlyargs] def kw_only_default_count(node): return sum(1 for n in node.args.kw_defaults if n is not None)
Fix small incompatibility with Python 3.2
Fix small incompatibility with Python 3.2
Python
mit
lisongmin/pies,AbsoluteMSTR/pies,timothycrosley/pies,AbsoluteMSTR/pies,timothycrosley/pies,lisongmin/pies
from __future__ import absolute_import + import sys from ast import * from .version_info import PY2 + if PY2 or sys.version_info[1] <= 2: + Try = TryExcept + else: + TryFinally = () + if PY2: - Try = TryExcept - def argument_names(node): return [isinstance(arg, Name) and arg.id or None for arg in node.args.args] def kw_only_argument_names(node): return [] def kw_only_default_count(node): return 0 else: - TryFinally = () - def argument_names(node): return [arg.arg for arg in node.args.args] def kw_only_argument_names(node): return [arg.arg for arg in node.args.kwonlyargs] def kw_only_default_count(node): return sum(1 for n in node.args.kw_defaults if n is not None)
Fix small incompatibility with Python 3.2
## Code Before: from __future__ import absolute_import from ast import * from .version_info import PY2 if PY2: Try = TryExcept def argument_names(node): return [isinstance(arg, Name) and arg.id or None for arg in node.args.args] def kw_only_argument_names(node): return [] def kw_only_default_count(node): return 0 else: TryFinally = () def argument_names(node): return [arg.arg for arg in node.args.args] def kw_only_argument_names(node): return [arg.arg for arg in node.args.kwonlyargs] def kw_only_default_count(node): return sum(1 for n in node.args.kw_defaults if n is not None) ## Instruction: Fix small incompatibility with Python 3.2 ## Code After: from __future__ import absolute_import import sys from ast import * from .version_info import PY2 if PY2 or sys.version_info[1] <= 2: Try = TryExcept else: TryFinally = () if PY2: def argument_names(node): return [isinstance(arg, Name) and arg.id or None for arg in node.args.args] def kw_only_argument_names(node): return [] def kw_only_default_count(node): return 0 else: def argument_names(node): return [arg.arg for arg in node.args.args] def kw_only_argument_names(node): return [arg.arg for arg in node.args.kwonlyargs] def kw_only_default_count(node): return sum(1 for n in node.args.kw_defaults if n is not None)
from __future__ import absolute_import + import sys from ast import * from .version_info import PY2 + if PY2 or sys.version_info[1] <= 2: + Try = TryExcept + else: + TryFinally = () + if PY2: - Try = TryExcept - def argument_names(node): return [isinstance(arg, Name) and arg.id or None for arg in node.args.args] def kw_only_argument_names(node): return [] def kw_only_default_count(node): return 0 else: - TryFinally = () - def argument_names(node): return [arg.arg for arg in node.args.args] def kw_only_argument_names(node): return [arg.arg for arg in node.args.kwonlyargs] def kw_only_default_count(node): return sum(1 for n in node.args.kw_defaults if n is not None)
58be36ca646c4bb7fd4263a592cf3a240fbca64f
post_tag.py
post_tag.py
from common import init, globaldata, tag_clean, tag_prefix, tag_post, tagtypes from bottle import post, request, redirect, mako_view as view @post("/post-tag") @view("post-tag") def r_post_tag(): client = init() m = request.forms.post post = client.get_post(m) tags = request.forms.tags create = request.forms.getall("create") ctype = request.forms.getall("ctype") full = set() weak = set() remove = set() failed = [] for n, t in zip(create, ctype): if t: client.add_tag(tag_clean(n), t) tags += u' ' + n for t in tags.split(): tag = client.find_tag(tag_clean(t)) if tag: p = tag_prefix(t) if p == "~": weak.add(tag) elif p == "-": remove.add(tag) else: full.add(tag) else: failed.append(t) tag_post(post, full, weak, remove) if not failed: redirect("post/" + m) data = globaldata() data.tagtypes = tagtypes() data.failed = failed data.m = m return data
from common import init, globaldata, tag_clean, tag_prefix, tag_post, tagtypes from bottle import post, request, redirect, mako_view as view @post("/post-tag") @view("post-tag") def r_post_tag(): client = init() m = request.forms.post post = client.get_post(m) tags = request.forms.tags create = [a.decode("utf-8") for a in request.forms.getall("create")] ctype = [a.decode("utf-8") for a in request.forms.getall("ctype")] full = set() weak = set() remove = set() failed = [] for n, t in zip(create, ctype): if t: client.add_tag(tag_clean(n), t) tags += u' ' + n for t in tags.split(): tag = client.find_tag(tag_clean(t)) if tag: p = tag_prefix(t) if p == "~": weak.add(tag) elif p == "-": remove.add(tag) else: full.add(tag) else: failed.append(t) tag_post(post, full, weak, remove) if not failed: redirect("post/" + m) data = globaldata() data.tagtypes = tagtypes() data.failed = failed data.m = m return data
Fix tag creation with non-ascii chars. (Dammit bottle!)
Fix tag creation with non-ascii chars. (Dammit bottle!)
Python
mit
drougge/wwwwellpapp,drougge/wwwwellpapp,drougge/wwwwellpapp
from common import init, globaldata, tag_clean, tag_prefix, tag_post, tagtypes from bottle import post, request, redirect, mako_view as view @post("/post-tag") @view("post-tag") def r_post_tag(): client = init() m = request.forms.post post = client.get_post(m) tags = request.forms.tags - create = request.forms.getall("create") - ctype = request.forms.getall("ctype") + create = [a.decode("utf-8") for a in request.forms.getall("create")] + ctype = [a.decode("utf-8") for a in request.forms.getall("ctype")] full = set() weak = set() remove = set() failed = [] for n, t in zip(create, ctype): if t: client.add_tag(tag_clean(n), t) tags += u' ' + n for t in tags.split(): tag = client.find_tag(tag_clean(t)) if tag: p = tag_prefix(t) if p == "~": weak.add(tag) elif p == "-": remove.add(tag) else: full.add(tag) else: failed.append(t) tag_post(post, full, weak, remove) if not failed: redirect("post/" + m) data = globaldata() data.tagtypes = tagtypes() data.failed = failed data.m = m return data
Fix tag creation with non-ascii chars. (Dammit bottle!)
## Code Before: from common import init, globaldata, tag_clean, tag_prefix, tag_post, tagtypes from bottle import post, request, redirect, mako_view as view @post("/post-tag") @view("post-tag") def r_post_tag(): client = init() m = request.forms.post post = client.get_post(m) tags = request.forms.tags create = request.forms.getall("create") ctype = request.forms.getall("ctype") full = set() weak = set() remove = set() failed = [] for n, t in zip(create, ctype): if t: client.add_tag(tag_clean(n), t) tags += u' ' + n for t in tags.split(): tag = client.find_tag(tag_clean(t)) if tag: p = tag_prefix(t) if p == "~": weak.add(tag) elif p == "-": remove.add(tag) else: full.add(tag) else: failed.append(t) tag_post(post, full, weak, remove) if not failed: redirect("post/" + m) data = globaldata() data.tagtypes = tagtypes() data.failed = failed data.m = m return data ## Instruction: Fix tag creation with non-ascii chars. (Dammit bottle!) ## Code After: from common import init, globaldata, tag_clean, tag_prefix, tag_post, tagtypes from bottle import post, request, redirect, mako_view as view @post("/post-tag") @view("post-tag") def r_post_tag(): client = init() m = request.forms.post post = client.get_post(m) tags = request.forms.tags create = [a.decode("utf-8") for a in request.forms.getall("create")] ctype = [a.decode("utf-8") for a in request.forms.getall("ctype")] full = set() weak = set() remove = set() failed = [] for n, t in zip(create, ctype): if t: client.add_tag(tag_clean(n), t) tags += u' ' + n for t in tags.split(): tag = client.find_tag(tag_clean(t)) if tag: p = tag_prefix(t) if p == "~": weak.add(tag) elif p == "-": remove.add(tag) else: full.add(tag) else: failed.append(t) tag_post(post, full, weak, remove) if not failed: redirect("post/" + m) data = globaldata() data.tagtypes = tagtypes() data.failed = failed data.m = m return data
from common import init, globaldata, tag_clean, tag_prefix, tag_post, tagtypes from bottle import post, request, redirect, mako_view as view @post("/post-tag") @view("post-tag") def r_post_tag(): client = init() m = request.forms.post post = client.get_post(m) tags = request.forms.tags - create = request.forms.getall("create") - ctype = request.forms.getall("ctype") + create = [a.decode("utf-8") for a in request.forms.getall("create")] + ctype = [a.decode("utf-8") for a in request.forms.getall("ctype")] full = set() weak = set() remove = set() failed = [] for n, t in zip(create, ctype): if t: client.add_tag(tag_clean(n), t) tags += u' ' + n for t in tags.split(): tag = client.find_tag(tag_clean(t)) if tag: p = tag_prefix(t) if p == "~": weak.add(tag) elif p == "-": remove.add(tag) else: full.add(tag) else: failed.append(t) tag_post(post, full, weak, remove) if not failed: redirect("post/" + m) data = globaldata() data.tagtypes = tagtypes() data.failed = failed data.m = m return data
943d2648c17facb9dbfd4f26d335beef341e9c49
fabfile.py
fabfile.py
from fabric.api import local __author__ = 'derek' def deploy(version): local('python runtests.py') local("git tag -a %s -m %s" % (version, version)) local('python setup.py sdist upload')
from fabric.api import local __author__ = 'derek' def deploy(version): local('python runtests.py') local("git tag -a %s -m %s" % (version, version)) local("git push origin --tags") local('python setup.py sdist upload')
Make sure to push the tags
Make sure to push the tags
Python
mit
winfieldco/django-mail-queue,Goury/django-mail-queue,Goury/django-mail-queue,dstegelman/django-mail-queue,dstegelman/django-mail-queue,styrmis/django-mail-queue
from fabric.api import local __author__ = 'derek' def deploy(version): local('python runtests.py') local("git tag -a %s -m %s" % (version, version)) + local("git push origin --tags") local('python setup.py sdist upload')
Make sure to push the tags
## Code Before: from fabric.api import local __author__ = 'derek' def deploy(version): local('python runtests.py') local("git tag -a %s -m %s" % (version, version)) local('python setup.py sdist upload') ## Instruction: Make sure to push the tags ## Code After: from fabric.api import local __author__ = 'derek' def deploy(version): local('python runtests.py') local("git tag -a %s -m %s" % (version, version)) local("git push origin --tags") local('python setup.py sdist upload')
from fabric.api import local __author__ = 'derek' def deploy(version): local('python runtests.py') local("git tag -a %s -m %s" % (version, version)) + local("git push origin --tags") local('python setup.py sdist upload')
e4798424b22a38cfca519e5e792644ae7757a4f5
api/base/pagination.py
api/base/pagination.py
from collections import OrderedDict from rest_framework import pagination from rest_framework.response import Response from rest_framework.utils.urls import ( replace_query_param, remove_query_param ) class JSONAPIPagination(pagination.PageNumberPagination): """Custom paginator that formats responses in a JSON-API compatible format.""" def get_first_link(self): url = self.request.build_absolute_uri() return remove_query_param(url, self.page_query_param) def get_last_link(self): url = self.request.build_absolute_uri() page_number = self.page.paginator.num_pages return replace_query_param(url, self.page_query_param, page_number) def get_paginated_response(self, data): response_dict = OrderedDict([ ('data', data), ('links', OrderedDict([ ('first', self.get_first_link()), ('last', self.get_last_link()), ('prev', self.get_previous_link()), ('next', self.get_next_link()), ('meta', OrderedDict([ ('total', self.page.paginator.count), ])) ])), ]) return Response(response_dict)
from collections import OrderedDict from rest_framework import pagination from rest_framework.response import Response from rest_framework.utils.urls import ( replace_query_param, remove_query_param ) class JSONAPIPagination(pagination.PageNumberPagination): """Custom paginator that formats responses in a JSON-API compatible format.""" page_size_query_param = 'page[size]' def get_first_link(self): url = self.request.build_absolute_uri() return remove_query_param(url, self.page_query_param) def get_last_link(self): url = self.request.build_absolute_uri() page_number = self.page.paginator.num_pages return replace_query_param(url, self.page_query_param, page_number) def get_paginated_response(self, data): response_dict = OrderedDict([ ('data', data), ('links', OrderedDict([ ('first', self.get_first_link()), ('last', self.get_last_link()), ('prev', self.get_previous_link()), ('next', self.get_next_link()), ('meta', OrderedDict([ ('total', self.page.paginator.count), ('per_page', self.page.paginator.per_page), ])) ])), ]) return Response(response_dict)
Allow client to customize page size using page[size] query param
Allow client to customize page size using page[size] query param
Python
apache-2.0
samchrisinger/osf.io,fabianvf/osf.io,erinspace/osf.io,lyndsysimon/osf.io,samanehsan/osf.io,brandonPurvis/osf.io,billyhunt/osf.io,baylee-d/osf.io,billyhunt/osf.io,erinspace/osf.io,ZobairAlijan/osf.io,mfraezz/osf.io,adlius/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,GageGaskins/osf.io,jnayak1/osf.io,jinluyuan/osf.io,acshi/osf.io,TomBaxter/osf.io,cwisecarver/osf.io,alexschiller/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,zamattiac/osf.io,Johnetordoff/osf.io,jolene-esposito/osf.io,kch8qx/osf.io,ckc6cz/osf.io,doublebits/osf.io,mattclark/osf.io,njantrania/osf.io,cwisecarver/osf.io,zamattiac/osf.io,sloria/osf.io,cslzchen/osf.io,TomHeatwole/osf.io,alexschiller/osf.io,felliott/osf.io,aaxelb/osf.io,doublebits/osf.io,bdyetton/prettychart,sbt9uc/osf.io,amyshi188/osf.io,TomBaxter/osf.io,SSJohns/osf.io,barbour-em/osf.io,cslzchen/osf.io,jmcarp/osf.io,Ghalko/osf.io,arpitar/osf.io,TomHeatwole/osf.io,petermalcolm/osf.io,brianjgeiger/osf.io,RomanZWang/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,jmcarp/osf.io,adlius/osf.io,TomHeatwole/osf.io,samanehsan/osf.io,MerlinZhang/osf.io,cldershem/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,caneruguz/osf.io,Ghalko/osf.io,mattclark/osf.io,kch8qx/osf.io,samanehsan/osf.io,samchrisinger/osf.io,bdyetton/prettychart,ZobairAlijan/osf.io,icereval/osf.io,sbt9uc/osf.io,arpitar/osf.io,wearpants/osf.io,kwierman/osf.io,caneruguz/osf.io,dplorimer/osf,mluo613/osf.io,jmcarp/osf.io,cldershem/osf.io,chrisseto/osf.io,Ghalko/osf.io,bdyetton/prettychart,mattclark/osf.io,rdhyee/osf.io,kch8qx/osf.io,hmoco/osf.io,mluo613/osf.io,DanielSBrown/osf.io,sloria/osf.io,adlius/osf.io,wearpants/osf.io,emetsger/osf.io,kwierman/osf.io,petermalcolm/osf.io,kch8qx/osf.io,barbour-em/osf.io,cslzchen/osf.io,reinaH/osf.io,leb2dg/osf.io,aaxelb/osf.io,cldershem/osf.io,KAsante95/osf.io,doublebits/osf.io,jeffreyliu3230/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,ticklemepierce/osf.io,adlius/osf.io,monikagrabowska/osf.io,petermalcolm/osf.io,abought/osf.io,emetsger/osf.io,binoculars/osf.io,danielneis/osf.io,mluo613/osf.io,reinaH/osf.io,njantrania/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,TomHeatwole/osf.io,KAsante95/osf.io,sbt9uc/osf.io,CenterForOpenScience/osf.io,leb2dg/osf.io,MerlinZhang/osf.io,haoyuchen1992/osf.io,cosenal/osf.io,chrisseto/osf.io,wearpants/osf.io,Johnetordoff/osf.io,caseyrygt/osf.io,dplorimer/osf,doublebits/osf.io,dplorimer/osf,GageGaskins/osf.io,jinluyuan/osf.io,asanfilippo7/osf.io,njantrania/osf.io,laurenrevere/osf.io,binoculars/osf.io,samchrisinger/osf.io,billyhunt/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,crcresearch/osf.io,ticklemepierce/osf.io,jeffreyliu3230/osf.io,Nesiehr/osf.io,cosenal/osf.io,jnayak1/osf.io,asanfilippo7/osf.io,MerlinZhang/osf.io,lyndsysimon/osf.io,mluo613/osf.io,caseyrygt/osf.io,rdhyee/osf.io,brianjgeiger/osf.io,chennan47/osf.io,leb2dg/osf.io,binoculars/osf.io,jolene-esposito/osf.io,kwierman/osf.io,acshi/osf.io,hmoco/osf.io,jolene-esposito/osf.io,zachjanicki/osf.io,Ghalko/osf.io,zachjanicki/osf.io,samchrisinger/osf.io,RomanZWang/osf.io,ticklemepierce/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,danielneis/osf.io,asanfilippo7/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,cosenal/osf.io,mluo613/osf.io,TomBaxter/osf.io,ZobairAlijan/osf.io,rdhyee/osf.io,wearpants/osf.io,dplorimer/osf,SSJohns/osf.io,chrisseto/osf.io,HarryRybacki/osf.io,KAsante95/osf.io,laurenrevere/osf.io,HarryRybacki/osf.io,zachjanicki/osf.io,abought/osf.io,cldershem/osf.io,mluke93/osf.io,arpitar/osf.io,brandonPurvis/osf.io,jeffreyliu3230/osf.io,caneruguz/osf.io,jeffreyliu3230/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,njantrania/osf.io,cosenal/osf.io,alexschiller/osf.io,mfraezz/osf.io,saradbowman/osf.io,icereval/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,brandonPurvis/osf.io,caseyrollins/osf.io,acshi/osf.io,alexschiller/osf.io,CenterForOpenScience/osf.io,fabianvf/osf.io,abought/osf.io,RomanZWang/osf.io,GageGaskins/osf.io,jinluyuan/osf.io,alexschiller/osf.io,reinaH/osf.io,abought/osf.io,caseyrygt/osf.io,zamattiac/osf.io,RomanZWang/osf.io,sloria/osf.io,aaxelb/osf.io,HarryRybacki/osf.io,felliott/osf.io,jmcarp/osf.io,pattisdr/osf.io,crcresearch/osf.io,chrisseto/osf.io,ckc6cz/osf.io,jolene-esposito/osf.io,icereval/osf.io,chennan47/osf.io,bdyetton/prettychart,cwisecarver/osf.io,cslzchen/osf.io,KAsante95/osf.io,jinluyuan/osf.io,SSJohns/osf.io,HalcyonChimera/osf.io,doublebits/osf.io,RomanZWang/osf.io,haoyuchen1992/osf.io,felliott/osf.io,crcresearch/osf.io,cwisecarver/osf.io,KAsante95/osf.io,Johnetordoff/osf.io,lyndsysimon/osf.io,DanielSBrown/osf.io,billyhunt/osf.io,kch8qx/osf.io,mluke93/osf.io,amyshi188/osf.io,caseyrollins/osf.io,mfraezz/osf.io,laurenrevere/osf.io,felliott/osf.io,brianjgeiger/osf.io,acshi/osf.io,hmoco/osf.io,ticklemepierce/osf.io,chennan47/osf.io,mluke93/osf.io,haoyuchen1992/osf.io,caneruguz/osf.io,danielneis/osf.io,haoyuchen1992/osf.io,hmoco/osf.io,sbt9uc/osf.io,HarryRybacki/osf.io,danielneis/osf.io,fabianvf/osf.io,SSJohns/osf.io,ckc6cz/osf.io,zamattiac/osf.io,caseyrygt/osf.io,MerlinZhang/osf.io,billyhunt/osf.io,acshi/osf.io,arpitar/osf.io,mluke93/osf.io,Nesiehr/osf.io,emetsger/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,ckc6cz/osf.io,fabianvf/osf.io,DanielSBrown/osf.io,barbour-em/osf.io,GageGaskins/osf.io,amyshi188/osf.io,amyshi188/osf.io,emetsger/osf.io,jnayak1/osf.io,Nesiehr/osf.io,barbour-em/osf.io,kwierman/osf.io,pattisdr/osf.io,baylee-d/osf.io,rdhyee/osf.io,caseyrollins/osf.io,CenterForOpenScience/osf.io,samanehsan/osf.io,reinaH/osf.io,erinspace/osf.io
from collections import OrderedDict from rest_framework import pagination from rest_framework.response import Response from rest_framework.utils.urls import ( replace_query_param, remove_query_param ) class JSONAPIPagination(pagination.PageNumberPagination): """Custom paginator that formats responses in a JSON-API compatible format.""" + + page_size_query_param = 'page[size]' def get_first_link(self): url = self.request.build_absolute_uri() return remove_query_param(url, self.page_query_param) def get_last_link(self): url = self.request.build_absolute_uri() page_number = self.page.paginator.num_pages return replace_query_param(url, self.page_query_param, page_number) def get_paginated_response(self, data): response_dict = OrderedDict([ ('data', data), ('links', OrderedDict([ ('first', self.get_first_link()), ('last', self.get_last_link()), ('prev', self.get_previous_link()), ('next', self.get_next_link()), ('meta', OrderedDict([ ('total', self.page.paginator.count), + ('per_page', self.page.paginator.per_page), ])) ])), ]) return Response(response_dict)
Allow client to customize page size using page[size] query param
## Code Before: from collections import OrderedDict from rest_framework import pagination from rest_framework.response import Response from rest_framework.utils.urls import ( replace_query_param, remove_query_param ) class JSONAPIPagination(pagination.PageNumberPagination): """Custom paginator that formats responses in a JSON-API compatible format.""" def get_first_link(self): url = self.request.build_absolute_uri() return remove_query_param(url, self.page_query_param) def get_last_link(self): url = self.request.build_absolute_uri() page_number = self.page.paginator.num_pages return replace_query_param(url, self.page_query_param, page_number) def get_paginated_response(self, data): response_dict = OrderedDict([ ('data', data), ('links', OrderedDict([ ('first', self.get_first_link()), ('last', self.get_last_link()), ('prev', self.get_previous_link()), ('next', self.get_next_link()), ('meta', OrderedDict([ ('total', self.page.paginator.count), ])) ])), ]) return Response(response_dict) ## Instruction: Allow client to customize page size using page[size] query param ## Code After: from collections import OrderedDict from rest_framework import pagination from rest_framework.response import Response from rest_framework.utils.urls import ( replace_query_param, remove_query_param ) class JSONAPIPagination(pagination.PageNumberPagination): """Custom paginator that formats responses in a JSON-API compatible format.""" page_size_query_param = 'page[size]' def get_first_link(self): url = self.request.build_absolute_uri() return remove_query_param(url, self.page_query_param) def get_last_link(self): url = self.request.build_absolute_uri() page_number = self.page.paginator.num_pages return replace_query_param(url, self.page_query_param, page_number) def get_paginated_response(self, data): response_dict = OrderedDict([ ('data', data), ('links', OrderedDict([ ('first', self.get_first_link()), ('last', self.get_last_link()), ('prev', self.get_previous_link()), ('next', self.get_next_link()), ('meta', OrderedDict([ ('total', self.page.paginator.count), ('per_page', self.page.paginator.per_page), ])) ])), ]) return Response(response_dict)
from collections import OrderedDict from rest_framework import pagination from rest_framework.response import Response from rest_framework.utils.urls import ( replace_query_param, remove_query_param ) class JSONAPIPagination(pagination.PageNumberPagination): """Custom paginator that formats responses in a JSON-API compatible format.""" + + page_size_query_param = 'page[size]' def get_first_link(self): url = self.request.build_absolute_uri() return remove_query_param(url, self.page_query_param) def get_last_link(self): url = self.request.build_absolute_uri() page_number = self.page.paginator.num_pages return replace_query_param(url, self.page_query_param, page_number) def get_paginated_response(self, data): response_dict = OrderedDict([ ('data', data), ('links', OrderedDict([ ('first', self.get_first_link()), ('last', self.get_last_link()), ('prev', self.get_previous_link()), ('next', self.get_next_link()), ('meta', OrderedDict([ ('total', self.page.paginator.count), + ('per_page', self.page.paginator.per_page), ])) ])), ]) return Response(response_dict)
b73b8797c3c9c6c9aa92bd6873e15a5b717f4142
test/test_nap.py
test/test_nap.py
import unittest import requests from nap.api import Api class TestNap(unittest.TestCase): def test_unallowed_method(self): """Tries to use non-existent HTTP method""" api = Api('') # lambda trickery is necessary, because otherwise it would raise # AttributeError uncontrolled self.assertRaises(AttributeError, lambda: api.resource.nonexisting) def test_requests_raises_error(self): """Test that requests properly raises its own errors >>> requests.get('/kk') requests.exceptions.MissingSchema: Invalid URL u'/kk': No schema supplied. Perhaps you meant http:///kk? """ api = Api('') self.assertRaises(requests.exceptions.MissingSchema, api.resource.get) def test_resource_not_callable(self): """Make sure resource can't be called directly""" api = Api('') self.assertRaises(TypeError, api.resource)
from mock import MagicMock, patch import unittest import requests from nap.api import Api class TestNap(unittest.TestCase): def test_unallowed_method(self): """Tries to use non-existent HTTP method""" api = Api('') # lambda trickery is necessary, because otherwise it would raise # AttributeError uncontrolled self.assertRaises(AttributeError, lambda: api.resource.nonexisting) def test_requests_raises_error(self): """Test that requests properly raises its own errors >>> requests.get('/kk') requests.exceptions.MissingSchema: Invalid URL u'/kk': No schema supplied. Perhaps you meant http:///kk? """ api = Api('') self.assertRaises(requests.exceptions.MissingSchema, api.resource.get) def test_resource_not_callable(self): """Make sure resource can't be called directly""" api = Api('') self.assertRaises(TypeError, api.resource) @patch('requests.get') def test_default_parameters(self, requests_get): """Test default parameter behavior""" api = Api('', auth=('user', 'password')) requests.get = MagicMock(return_value=None) # Make sure defaults are passed for each request api.resource.get() requests.get.assert_called_with('/resource', auth=('user', 'password')) # Make sure single calls can override defaults api.resource.get(auth=('defaults', 'overriden')) requests.get.assert_called_with( '/resource', auth=('defaults', 'overriden') )
Add tests which test default parameters for nap api
Add tests which test default parameters for nap api
Python
mit
kimmobrunfeldt/nap
+ from mock import MagicMock, patch import unittest import requests from nap.api import Api class TestNap(unittest.TestCase): def test_unallowed_method(self): """Tries to use non-existent HTTP method""" api = Api('') # lambda trickery is necessary, because otherwise it would raise # AttributeError uncontrolled self.assertRaises(AttributeError, lambda: api.resource.nonexisting) def test_requests_raises_error(self): """Test that requests properly raises its own errors >>> requests.get('/kk') requests.exceptions.MissingSchema: Invalid URL u'/kk': No schema supplied. Perhaps you meant http:///kk? """ api = Api('') self.assertRaises(requests.exceptions.MissingSchema, api.resource.get) def test_resource_not_callable(self): """Make sure resource can't be called directly""" api = Api('') self.assertRaises(TypeError, api.resource) + @patch('requests.get') + def test_default_parameters(self, requests_get): + """Test default parameter behavior""" + api = Api('', auth=('user', 'password')) + requests.get = MagicMock(return_value=None) + + # Make sure defaults are passed for each request + api.resource.get() + requests.get.assert_called_with('/resource', auth=('user', 'password')) + + # Make sure single calls can override defaults + api.resource.get(auth=('defaults', 'overriden')) + requests.get.assert_called_with( + '/resource', + auth=('defaults', 'overriden') + ) +
Add tests which test default parameters for nap api
## Code Before: import unittest import requests from nap.api import Api class TestNap(unittest.TestCase): def test_unallowed_method(self): """Tries to use non-existent HTTP method""" api = Api('') # lambda trickery is necessary, because otherwise it would raise # AttributeError uncontrolled self.assertRaises(AttributeError, lambda: api.resource.nonexisting) def test_requests_raises_error(self): """Test that requests properly raises its own errors >>> requests.get('/kk') requests.exceptions.MissingSchema: Invalid URL u'/kk': No schema supplied. Perhaps you meant http:///kk? """ api = Api('') self.assertRaises(requests.exceptions.MissingSchema, api.resource.get) def test_resource_not_callable(self): """Make sure resource can't be called directly""" api = Api('') self.assertRaises(TypeError, api.resource) ## Instruction: Add tests which test default parameters for nap api ## Code After: from mock import MagicMock, patch import unittest import requests from nap.api import Api class TestNap(unittest.TestCase): def test_unallowed_method(self): """Tries to use non-existent HTTP method""" api = Api('') # lambda trickery is necessary, because otherwise it would raise # AttributeError uncontrolled self.assertRaises(AttributeError, lambda: api.resource.nonexisting) def test_requests_raises_error(self): """Test that requests properly raises its own errors >>> requests.get('/kk') requests.exceptions.MissingSchema: Invalid URL u'/kk': No schema supplied. Perhaps you meant http:///kk? """ api = Api('') self.assertRaises(requests.exceptions.MissingSchema, api.resource.get) def test_resource_not_callable(self): """Make sure resource can't be called directly""" api = Api('') self.assertRaises(TypeError, api.resource) @patch('requests.get') def test_default_parameters(self, requests_get): """Test default parameter behavior""" api = Api('', auth=('user', 'password')) requests.get = MagicMock(return_value=None) # Make sure defaults are passed for each request api.resource.get() requests.get.assert_called_with('/resource', auth=('user', 'password')) # Make sure single calls can override defaults api.resource.get(auth=('defaults', 'overriden')) requests.get.assert_called_with( '/resource', auth=('defaults', 'overriden') )
+ from mock import MagicMock, patch import unittest import requests from nap.api import Api class TestNap(unittest.TestCase): def test_unallowed_method(self): """Tries to use non-existent HTTP method""" api = Api('') # lambda trickery is necessary, because otherwise it would raise # AttributeError uncontrolled self.assertRaises(AttributeError, lambda: api.resource.nonexisting) def test_requests_raises_error(self): """Test that requests properly raises its own errors >>> requests.get('/kk') requests.exceptions.MissingSchema: Invalid URL u'/kk': No schema supplied. Perhaps you meant http:///kk? """ api = Api('') self.assertRaises(requests.exceptions.MissingSchema, api.resource.get) def test_resource_not_callable(self): """Make sure resource can't be called directly""" api = Api('') self.assertRaises(TypeError, api.resource) + + @patch('requests.get') + def test_default_parameters(self, requests_get): + """Test default parameter behavior""" + api = Api('', auth=('user', 'password')) + requests.get = MagicMock(return_value=None) + + # Make sure defaults are passed for each request + api.resource.get() + requests.get.assert_called_with('/resource', auth=('user', 'password')) + + # Make sure single calls can override defaults + api.resource.get(auth=('defaults', 'overriden')) + requests.get.assert_called_with( + '/resource', + auth=('defaults', 'overriden') + )
3ca4a7334a3a759762d309bcff94ddde62d5a48b
accounts/management/__init__.py
accounts/management/__init__.py
from django.db.models.signals import post_syncdb from accounts import models, names def ensure_core_accounts_exists(sender, **kwargs): # We only create core accounts the first time syncdb is run if models.Account.objects.all().count() > 0: return # Create asset accounts assets = models.AccountType.add_root(name='Assets') assets.accounts.create(name=names.REDEMPTIONS) assets.accounts.create(name=names.LAPSED) # Create liability accounts liabilities = models.AccountType.add_root(name='Liabilities') liabilities.accounts.create(name=names.MERCHANT_SOURCE, credit_limit=None) liabilities.add_child(name="Giftcards") liabilities.add_child(name="User accounts") post_syncdb.connect(ensure_core_accounts_exists, sender=models)
from accounts import models, names def ensure_core_accounts_exists(sender, **kwargs): # We only create core accounts the first time syncdb is run if models.Account.objects.all().count() > 0: return # Create asset accounts assets = models.AccountType.add_root(name='Assets') assets.accounts.create(name=names.REDEMPTIONS) assets.accounts.create(name=names.LAPSED) # Create liability accounts liabilities = models.AccountType.add_root(name='Liabilities') liabilities.accounts.create(name=names.MERCHANT_SOURCE, credit_limit=None) liabilities.add_child(name="Giftcards") liabilities.add_child(name="User accounts") #post_syncdb.connect(ensure_core_accounts_exists, sender=models)
Remove syncdb signal - will move to migration shortly
Remove syncdb signal - will move to migration shortly
Python
bsd-3-clause
Jannes123/django-oscar-accounts,machtfit/django-oscar-accounts,michaelkuty/django-oscar-accounts,Mariana-Tek/django-oscar-accounts,amsys/django-account-balances,michaelkuty/django-oscar-accounts,Jannes123/django-oscar-accounts,carver/django-account-balances,Mariana-Tek/django-oscar-accounts,amsys/django-account-balances,machtfit/django-oscar-accounts,django-oscar/django-oscar-accounts,django-oscar/django-oscar-accounts
- from django.db.models.signals import post_syncdb - from accounts import models, names def ensure_core_accounts_exists(sender, **kwargs): # We only create core accounts the first time syncdb is run if models.Account.objects.all().count() > 0: return # Create asset accounts assets = models.AccountType.add_root(name='Assets') assets.accounts.create(name=names.REDEMPTIONS) assets.accounts.create(name=names.LAPSED) # Create liability accounts liabilities = models.AccountType.add_root(name='Liabilities') liabilities.accounts.create(name=names.MERCHANT_SOURCE, credit_limit=None) liabilities.add_child(name="Giftcards") liabilities.add_child(name="User accounts") - post_syncdb.connect(ensure_core_accounts_exists, sender=models) + #post_syncdb.connect(ensure_core_accounts_exists, sender=models)
Remove syncdb signal - will move to migration shortly
## Code Before: from django.db.models.signals import post_syncdb from accounts import models, names def ensure_core_accounts_exists(sender, **kwargs): # We only create core accounts the first time syncdb is run if models.Account.objects.all().count() > 0: return # Create asset accounts assets = models.AccountType.add_root(name='Assets') assets.accounts.create(name=names.REDEMPTIONS) assets.accounts.create(name=names.LAPSED) # Create liability accounts liabilities = models.AccountType.add_root(name='Liabilities') liabilities.accounts.create(name=names.MERCHANT_SOURCE, credit_limit=None) liabilities.add_child(name="Giftcards") liabilities.add_child(name="User accounts") post_syncdb.connect(ensure_core_accounts_exists, sender=models) ## Instruction: Remove syncdb signal - will move to migration shortly ## Code After: from accounts import models, names def ensure_core_accounts_exists(sender, **kwargs): # We only create core accounts the first time syncdb is run if models.Account.objects.all().count() > 0: return # Create asset accounts assets = models.AccountType.add_root(name='Assets') assets.accounts.create(name=names.REDEMPTIONS) assets.accounts.create(name=names.LAPSED) # Create liability accounts liabilities = models.AccountType.add_root(name='Liabilities') liabilities.accounts.create(name=names.MERCHANT_SOURCE, credit_limit=None) liabilities.add_child(name="Giftcards") liabilities.add_child(name="User accounts") #post_syncdb.connect(ensure_core_accounts_exists, sender=models)
- from django.db.models.signals import post_syncdb - from accounts import models, names def ensure_core_accounts_exists(sender, **kwargs): # We only create core accounts the first time syncdb is run if models.Account.objects.all().count() > 0: return # Create asset accounts assets = models.AccountType.add_root(name='Assets') assets.accounts.create(name=names.REDEMPTIONS) assets.accounts.create(name=names.LAPSED) # Create liability accounts liabilities = models.AccountType.add_root(name='Liabilities') liabilities.accounts.create(name=names.MERCHANT_SOURCE, credit_limit=None) liabilities.add_child(name="Giftcards") liabilities.add_child(name="User accounts") - post_syncdb.connect(ensure_core_accounts_exists, sender=models) + #post_syncdb.connect(ensure_core_accounts_exists, sender=models) ? +
b0e5dff69b9e40b916ad8a6655624de7fa85d247
chmvh_website/team/migrations/0002_auto_20161024_2338.py
chmvh_website/team/migrations/0002_auto_20161024_2338.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ] operations = [ migrations.AlterModelOptions( name='teammember', options={'ordering': ('order',)}, ), migrations.AddField( model_name='teammember', name='order', field=models.PositiveSmallIntegerField(default=0), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ] operations = [ migrations.AddField( model_name='teammember', name='order', field=models.PositiveSmallIntegerField(default=0), ), migrations.AlterModelOptions( name='teammember', options={'ordering': ('order',)}, ), ]
Change order of migration operations.
Change order of migration operations.
Python
mit
cdriehuys/chmvh-website,cdriehuys/chmvh-website,cdriehuys/chmvh-website
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ] operations = [ - migrations.AlterModelOptions( - name='teammember', - options={'ordering': ('order',)}, - ), migrations.AddField( model_name='teammember', name='order', field=models.PositiveSmallIntegerField(default=0), ), + migrations.AlterModelOptions( + name='teammember', + options={'ordering': ('order',)}, + ), ]
Change order of migration operations.
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ] operations = [ migrations.AlterModelOptions( name='teammember', options={'ordering': ('order',)}, ), migrations.AddField( model_name='teammember', name='order', field=models.PositiveSmallIntegerField(default=0), ), ] ## Instruction: Change order of migration operations. ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ] operations = [ migrations.AddField( model_name='teammember', name='order', field=models.PositiveSmallIntegerField(default=0), ), migrations.AlterModelOptions( name='teammember', options={'ordering': ('order',)}, ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('team', '0001_initial'), ] operations = [ - migrations.AlterModelOptions( - name='teammember', - options={'ordering': ('order',)}, - ), migrations.AddField( model_name='teammember', name='order', field=models.PositiveSmallIntegerField(default=0), ), + migrations.AlterModelOptions( + name='teammember', + options={'ordering': ('order',)}, + ), ]
c0c67c14cb9c91c8cd07bfe6d013639121d1c5f7
crm/tests/test_contact_user.py
crm/tests/test_contact_user.py
from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" fred = make_user('fred') jsmith = make_contact('jsmith', 'John Smith') pkimber = make_contact('pkimber', 'Patrick Kimber') make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, fred, jsmith, )
from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from crm.tests.scenario import ( contact_contractor, ) from login.tests.scenario import ( get_fred, get_sara, user_contractor, ) class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" user_contractor() contact_contractor() user_contacts = get_fred().usercontact_set.all() self.assertIn("Fred's Farm", user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" user_contractor() contact_contractor() self.assertRaises( IntegrityError, make_user_contact, get_sara(), make_contact('zoo', 'Bristol Zoo') )
Update test to use standard scenario
Update test to use standard scenario
Python
apache-2.0
pkimber/crm,pkimber/crm,pkimber/crm
- from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) - from login.tests.model_maker import make_user + from crm.tests.scenario import ( + contact_contractor, + ) + from login.tests.scenario import ( + get_fred, + get_sara, + user_contractor, + ) class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" + user_contractor() - contact = make_contact( + contact_contractor() - 'pkimber', - 'Patrick Kimber', - ) - make_user_contact(make_user('fred'), contact) - user = User.objects.get(username='fred') - user_contacts = user.usercontact_set.all() + user_contacts = get_fred().usercontact_set.all() - self.assertIn('Kimber', user_contacts[0].contact.name) + self.assertIn("Fred's Farm", user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" + user_contractor() + contact_contractor() - fred = make_user('fred') - jsmith = make_contact('jsmith', 'John Smith') - pkimber = make_contact('pkimber', 'Patrick Kimber') - make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, - fred, - jsmith, + get_sara(), + make_contact('zoo', 'Bristol Zoo') )
Update test to use standard scenario
## Code Before: from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from login.tests.model_maker import make_user class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" contact = make_contact( 'pkimber', 'Patrick Kimber', ) make_user_contact(make_user('fred'), contact) user = User.objects.get(username='fred') user_contacts = user.usercontact_set.all() self.assertIn('Kimber', user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" fred = make_user('fred') jsmith = make_contact('jsmith', 'John Smith') pkimber = make_contact('pkimber', 'Patrick Kimber') make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, fred, jsmith, ) ## Instruction: Update test to use standard scenario ## Code After: from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) from crm.tests.scenario import ( contact_contractor, ) from login.tests.scenario import ( get_fred, get_sara, user_contractor, ) class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" user_contractor() contact_contractor() user_contacts = get_fred().usercontact_set.all() self.assertIn("Fred's Farm", user_contacts[0].contact.name) def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" user_contractor() contact_contractor() self.assertRaises( IntegrityError, make_user_contact, get_sara(), make_contact('zoo', 'Bristol Zoo') )
- from django.contrib.auth.models import User from django.db import IntegrityError from django.test import TestCase from crm.tests.model_maker import ( make_contact, make_user_contact, ) - from login.tests.model_maker import make_user + from crm.tests.scenario import ( + contact_contractor, + ) + from login.tests.scenario import ( + get_fred, + get_sara, + user_contractor, + ) class TestContactUser(TestCase): def test_link_user_to_contact(self): """Create a contact and link it to a user""" + user_contractor() - contact = make_contact( ? ------- + contact_contractor() ? + ++ + - 'pkimber', - 'Patrick Kimber', - ) - make_user_contact(make_user('fred'), contact) - user = User.objects.get(username='fred') - user_contacts = user.usercontact_set.all() ? ^^ + user_contacts = get_fred().usercontact_set.all() ? ^ +++ ++++ - self.assertIn('Kimber', user_contacts[0].contact.name) ? ^^ ^^^^ + self.assertIn("Fred's Farm", user_contacts[0].contact.name) ? +++++ ^^^^^ ^ def test_one_contact_per_user(self): """Make sure a user can only link to one contact""" + user_contractor() + contact_contractor() - fred = make_user('fred') - jsmith = make_contact('jsmith', 'John Smith') - pkimber = make_contact('pkimber', 'Patrick Kimber') - make_user_contact(fred, pkimber) self.assertRaises( IntegrityError, make_user_contact, - fred, - jsmith, + get_sara(), + make_contact('zoo', 'Bristol Zoo') )
6740f677903c7d48748fd0a595762b8bf2c7dcb3
test_connector/components/components.py
test_connector/components/components.py
from odoo.addons.connector.components.core import Component from odoo.addons.connector.components.collection import use class BaseComponent(Component): _inherit = 'base' def test_inherit_base(self): return 'test_inherit_base' class Mapper(Component): _name = 'mapper' def test_inherit_component(self): return 'test_inherit_component' class TestMapper(Component): _name = 'test.mapper' _inherit = 'mapper' def name(self): return 'test.mapper'
from odoo.addons.connector.components.core import Component class BaseComponent(Component): _inherit = 'base' def test_inherit_base(self): return 'test_inherit_base' class Mapper(Component): _name = 'mapper' def test_inherit_component(self): return 'test_inherit_component' class TestMapper(Component): _name = 'test.mapper' _inherit = 'mapper' def name(self): return 'test.mapper'
Improve on the collections, work, ...
Improve on the collections, work, ...
Python
agpl-3.0
OCA/connector,OCA/connector
from odoo.addons.connector.components.core import Component - from odoo.addons.connector.components.collection import use class BaseComponent(Component): _inherit = 'base' def test_inherit_base(self): return 'test_inherit_base' class Mapper(Component): _name = 'mapper' def test_inherit_component(self): return 'test_inherit_component' class TestMapper(Component): _name = 'test.mapper' _inherit = 'mapper' def name(self): return 'test.mapper'
Improve on the collections, work, ...
## Code Before: from odoo.addons.connector.components.core import Component from odoo.addons.connector.components.collection import use class BaseComponent(Component): _inherit = 'base' def test_inherit_base(self): return 'test_inherit_base' class Mapper(Component): _name = 'mapper' def test_inherit_component(self): return 'test_inherit_component' class TestMapper(Component): _name = 'test.mapper' _inherit = 'mapper' def name(self): return 'test.mapper' ## Instruction: Improve on the collections, work, ... ## Code After: from odoo.addons.connector.components.core import Component class BaseComponent(Component): _inherit = 'base' def test_inherit_base(self): return 'test_inherit_base' class Mapper(Component): _name = 'mapper' def test_inherit_component(self): return 'test_inherit_component' class TestMapper(Component): _name = 'test.mapper' _inherit = 'mapper' def name(self): return 'test.mapper'
from odoo.addons.connector.components.core import Component - from odoo.addons.connector.components.collection import use class BaseComponent(Component): _inherit = 'base' def test_inherit_base(self): return 'test_inherit_base' class Mapper(Component): _name = 'mapper' def test_inherit_component(self): return 'test_inherit_component' class TestMapper(Component): _name = 'test.mapper' _inherit = 'mapper' def name(self): return 'test.mapper'
0c30fe72179a125b41ffb88fec387862c78e6c7c
flaskrst/modules/atom.py
flaskrst/modules/atom.py
from flask import Blueprint, request, current_app from werkzeug.contrib.atom import AtomFeed, FeedEntry from flaskrst.modules.blog import get_posts atom = Blueprint('atom', __name__) @atom.route("/atom.xml") def atom_feed(): feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"), feed_url=request.url, url=request.host_url, subtitle=current_app.config.get('SITE_SUBTITLE', None)) for post in get_posts(): entry = FeedEntry(post.title, url=post.external_url, updated=post.pub_date) feed.add(entry) return feed.to_string(), 200, {}, "application/atom+xml" def setup(app, cfg): app.register_blueprint(atom)
from flask import Blueprint, request, current_app from werkzeug.contrib.atom import AtomFeed, FeedEntry from flaskrst.modules.blog import get_posts atom = Blueprint('atom', __name__) @atom.route("/atom.xml") def atom_feed(): feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"), feed_url=request.url, url=request.host_url, subtitle=current_app.config.get('SITE_SUBTITLE', None)) for post in get_posts(): entry = FeedEntry(post.title, url=post.external_url, updated=post.pub_date, content=post.body, summary=post.config.get('summary', None), author={ 'name': current_app.config.get('AUTHOR_NAME'), 'email': current_app.config.get('AUTHOR_EMAIL') }) feed.add(entry) return feed.to_string(), 200, {}, "application/atom+xml" def setup(app, cfg): app.register_blueprint(atom)
Add more information to the feed entry
Add more information to the feed entry
Python
bsd-3-clause
jarus/flask-rst
from flask import Blueprint, request, current_app from werkzeug.contrib.atom import AtomFeed, FeedEntry from flaskrst.modules.blog import get_posts atom = Blueprint('atom', __name__) @atom.route("/atom.xml") def atom_feed(): feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"), feed_url=request.url, url=request.host_url, subtitle=current_app.config.get('SITE_SUBTITLE', None)) for post in get_posts(): - entry = FeedEntry(post.title, url=post.external_url, + entry = FeedEntry(post.title, + url=post.external_url, - updated=post.pub_date) + updated=post.pub_date, + content=post.body, + summary=post.config.get('summary', None), + author={ + 'name': current_app.config.get('AUTHOR_NAME'), + 'email': current_app.config.get('AUTHOR_EMAIL') + }) feed.add(entry) return feed.to_string(), 200, {}, "application/atom+xml" def setup(app, cfg): app.register_blueprint(atom)
Add more information to the feed entry
## Code Before: from flask import Blueprint, request, current_app from werkzeug.contrib.atom import AtomFeed, FeedEntry from flaskrst.modules.blog import get_posts atom = Blueprint('atom', __name__) @atom.route("/atom.xml") def atom_feed(): feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"), feed_url=request.url, url=request.host_url, subtitle=current_app.config.get('SITE_SUBTITLE', None)) for post in get_posts(): entry = FeedEntry(post.title, url=post.external_url, updated=post.pub_date) feed.add(entry) return feed.to_string(), 200, {}, "application/atom+xml" def setup(app, cfg): app.register_blueprint(atom) ## Instruction: Add more information to the feed entry ## Code After: from flask import Blueprint, request, current_app from werkzeug.contrib.atom import AtomFeed, FeedEntry from flaskrst.modules.blog import get_posts atom = Blueprint('atom', __name__) @atom.route("/atom.xml") def atom_feed(): feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"), feed_url=request.url, url=request.host_url, subtitle=current_app.config.get('SITE_SUBTITLE', None)) for post in get_posts(): entry = FeedEntry(post.title, url=post.external_url, updated=post.pub_date, content=post.body, summary=post.config.get('summary', None), author={ 'name': current_app.config.get('AUTHOR_NAME'), 'email': current_app.config.get('AUTHOR_EMAIL') }) feed.add(entry) return feed.to_string(), 200, {}, "application/atom+xml" def setup(app, cfg): app.register_blueprint(atom)
from flask import Blueprint, request, current_app from werkzeug.contrib.atom import AtomFeed, FeedEntry from flaskrst.modules.blog import get_posts atom = Blueprint('atom', __name__) @atom.route("/atom.xml") def atom_feed(): feed = AtomFeed(current_app.config.get('SITE_NAME', "My Site"), feed_url=request.url, url=request.host_url, subtitle=current_app.config.get('SITE_SUBTITLE', None)) for post in get_posts(): - entry = FeedEntry(post.title, url=post.external_url, ? ---------------------- + entry = FeedEntry(post.title, + url=post.external_url, - updated=post.pub_date) ? ^ + updated=post.pub_date, ? ^^ + content=post.body, + summary=post.config.get('summary', None), + author={ + 'name': current_app.config.get('AUTHOR_NAME'), + 'email': current_app.config.get('AUTHOR_EMAIL') + }) feed.add(entry) return feed.to_string(), 200, {}, "application/atom+xml" def setup(app, cfg): app.register_blueprint(atom)
87d868283d1972330da593fa605bd05e574cf2fd
sslyze/cli/output_generator.py
sslyze/cli/output_generator.py
from abc import ABCMeta, abstractmethod from sslyze.cli import CompletedServerScan from sslyze.cli import FailedServerScan from sslyze.server_connectivity import ServerConnectivityInfo class OutputGenerator(object): """The abstract class output generator classes should inherit from. Each method must be implemented and will be called in the order below, as the SSLyze CLI runs scans. """ __metaclass__ = ABCMeta def __init__(self, file_to): # type: (file) -> None self._file_to = file_to def close(self): # type: (None) -> None self._file_to.close() @abstractmethod def command_line_parsed(self, available_plugins, args_command_list): pass @abstractmethod def server_connectivity_test_failed(self, failed_scan): # type: (FailedServerScan) -> None pass @abstractmethod def server_connectivity_test_succeeded(self, server_connectivity_info): # type: (ServerConnectivityInfo) -> None pass @abstractmethod def scans_started(self): # type: (None) -> None pass @abstractmethod def server_scan_completed(self, server_scan_result): # type: (CompletedServerScan) -> None pass @abstractmethod def scans_completed(self, total_scan_time): # type: (float) -> None pass
from abc import ABCMeta, abstractmethod from sslyze.cli import CompletedServerScan from sslyze.cli import FailedServerScan from sslyze.server_connectivity import ServerConnectivityInfo class OutputGenerator(object): """The abstract class output generator classes should inherit from. Each method must be implemented and will be called in the order below, as the SSLyze CLI runs scans. """ __metaclass__ = ABCMeta def __init__(self, file_to): # type: (file) -> None self._file_to = file_to def close(self): # type: (None) -> None self._file_to.close() @abstractmethod def command_line_parsed(self, available_plugins, args_command_list): """The CLI was just started and successfully parsed the command line. """ @abstractmethod def server_connectivity_test_failed(self, failed_scan): # type: (FailedServerScan) -> None """The CLI found a server that it could not connect to; no scans will be performed against this server. """ @abstractmethod def server_connectivity_test_succeeded(self, server_connectivity_info): # type: (ServerConnectivityInfo) -> None """The CLI found a server that it was able to connect to; scans will be run against this server. """ @abstractmethod def scans_started(self): # type: (None) -> None """The CLI has finished testing connectivity with the supplied servers and will now start the scans. """ @abstractmethod def server_scan_completed(self, server_scan_result): # type: (CompletedServerScan) -> None """The CLI has finished scanning one single server. """ @abstractmethod def scans_completed(self, total_scan_time): # type: (float) -> None """The CLI has finished scanning all the supplied servers and will now exit. """
Document how output generators work
Document how output generators work
Python
agpl-3.0
nabla-c0d3/sslyze
from abc import ABCMeta, abstractmethod from sslyze.cli import CompletedServerScan from sslyze.cli import FailedServerScan from sslyze.server_connectivity import ServerConnectivityInfo class OutputGenerator(object): """The abstract class output generator classes should inherit from. Each method must be implemented and will be called in the order below, as the SSLyze CLI runs scans. """ __metaclass__ = ABCMeta def __init__(self, file_to): # type: (file) -> None self._file_to = file_to def close(self): # type: (None) -> None self._file_to.close() @abstractmethod def command_line_parsed(self, available_plugins, args_command_list): - pass + """The CLI was just started and successfully parsed the command line. + """ @abstractmethod def server_connectivity_test_failed(self, failed_scan): # type: (FailedServerScan) -> None - pass + """The CLI found a server that it could not connect to; no scans will be performed against this server. + """ @abstractmethod def server_connectivity_test_succeeded(self, server_connectivity_info): # type: (ServerConnectivityInfo) -> None - pass + """The CLI found a server that it was able to connect to; scans will be run against this server. + """ @abstractmethod def scans_started(self): # type: (None) -> None - pass + """The CLI has finished testing connectivity with the supplied servers and will now start the scans. + """ @abstractmethod def server_scan_completed(self, server_scan_result): # type: (CompletedServerScan) -> None - pass + """The CLI has finished scanning one single server. + """ @abstractmethod def scans_completed(self, total_scan_time): # type: (float) -> None - pass + """The CLI has finished scanning all the supplied servers and will now exit. + """
Document how output generators work
## Code Before: from abc import ABCMeta, abstractmethod from sslyze.cli import CompletedServerScan from sslyze.cli import FailedServerScan from sslyze.server_connectivity import ServerConnectivityInfo class OutputGenerator(object): """The abstract class output generator classes should inherit from. Each method must be implemented and will be called in the order below, as the SSLyze CLI runs scans. """ __metaclass__ = ABCMeta def __init__(self, file_to): # type: (file) -> None self._file_to = file_to def close(self): # type: (None) -> None self._file_to.close() @abstractmethod def command_line_parsed(self, available_plugins, args_command_list): pass @abstractmethod def server_connectivity_test_failed(self, failed_scan): # type: (FailedServerScan) -> None pass @abstractmethod def server_connectivity_test_succeeded(self, server_connectivity_info): # type: (ServerConnectivityInfo) -> None pass @abstractmethod def scans_started(self): # type: (None) -> None pass @abstractmethod def server_scan_completed(self, server_scan_result): # type: (CompletedServerScan) -> None pass @abstractmethod def scans_completed(self, total_scan_time): # type: (float) -> None pass ## Instruction: Document how output generators work ## Code After: from abc import ABCMeta, abstractmethod from sslyze.cli import CompletedServerScan from sslyze.cli import FailedServerScan from sslyze.server_connectivity import ServerConnectivityInfo class OutputGenerator(object): """The abstract class output generator classes should inherit from. Each method must be implemented and will be called in the order below, as the SSLyze CLI runs scans. """ __metaclass__ = ABCMeta def __init__(self, file_to): # type: (file) -> None self._file_to = file_to def close(self): # type: (None) -> None self._file_to.close() @abstractmethod def command_line_parsed(self, available_plugins, args_command_list): """The CLI was just started and successfully parsed the command line. """ @abstractmethod def server_connectivity_test_failed(self, failed_scan): # type: (FailedServerScan) -> None """The CLI found a server that it could not connect to; no scans will be performed against this server. """ @abstractmethod def server_connectivity_test_succeeded(self, server_connectivity_info): # type: (ServerConnectivityInfo) -> None """The CLI found a server that it was able to connect to; scans will be run against this server. """ @abstractmethod def scans_started(self): # type: (None) -> None """The CLI has finished testing connectivity with the supplied servers and will now start the scans. """ @abstractmethod def server_scan_completed(self, server_scan_result): # type: (CompletedServerScan) -> None """The CLI has finished scanning one single server. """ @abstractmethod def scans_completed(self, total_scan_time): # type: (float) -> None """The CLI has finished scanning all the supplied servers and will now exit. """
from abc import ABCMeta, abstractmethod from sslyze.cli import CompletedServerScan from sslyze.cli import FailedServerScan from sslyze.server_connectivity import ServerConnectivityInfo class OutputGenerator(object): """The abstract class output generator classes should inherit from. Each method must be implemented and will be called in the order below, as the SSLyze CLI runs scans. """ __metaclass__ = ABCMeta def __init__(self, file_to): # type: (file) -> None self._file_to = file_to def close(self): # type: (None) -> None self._file_to.close() @abstractmethod def command_line_parsed(self, available_plugins, args_command_list): - pass + """The CLI was just started and successfully parsed the command line. + """ @abstractmethod def server_connectivity_test_failed(self, failed_scan): # type: (FailedServerScan) -> None - pass + """The CLI found a server that it could not connect to; no scans will be performed against this server. + """ @abstractmethod def server_connectivity_test_succeeded(self, server_connectivity_info): # type: (ServerConnectivityInfo) -> None - pass + """The CLI found a server that it was able to connect to; scans will be run against this server. + """ @abstractmethod def scans_started(self): # type: (None) -> None - pass + """The CLI has finished testing connectivity with the supplied servers and will now start the scans. + """ @abstractmethod def server_scan_completed(self, server_scan_result): # type: (CompletedServerScan) -> None - pass + """The CLI has finished scanning one single server. + """ @abstractmethod def scans_completed(self, total_scan_time): # type: (float) -> None - pass + """The CLI has finished scanning all the supplied servers and will now exit. + """
8be6b576007f89fad50ea1dfacad46614c0a97c5
apps/domain/src/main/core/exceptions.py
apps/domain/src/main/core/exceptions.py
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class EnvironmentNotFoundError(PyGridError): def __init__(self): message = "Environment not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
ADD new exception -> EnvironmentNotFound!
ADD new exception -> EnvironmentNotFound!
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) + class EnvironmentNotFoundError(PyGridError): + def __init__(self): + message = "Environment not found!" + super().__init__(message) + + class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
ADD new exception -> EnvironmentNotFound!
## Code Before: """Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message) ## Instruction: ADD new exception -> EnvironmentNotFound! ## Code After: """Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) class EnvironmentNotFoundError(PyGridError): def __init__(self): message = "Environment not found!" super().__init__(message) class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
"""Specific PyGrid exceptions.""" class PyGridError(Exception): def __init__(self, message): super().__init__(message) class AuthorizationError(PyGridError): def __init__(self, message=""): if not message: message = "User is not authorized for this operation!" super().__init__(message) class RoleNotFoundError(PyGridError): def __init__(self): message = "Role ID not found!" super().__init__(message) class UserNotFoundError(PyGridError): def __init__(self): message = "User not found!" super().__init__(message) + class EnvironmentNotFoundError(PyGridError): + def __init__(self): + message = "Environment not found!" + super().__init__(message) + + class GroupNotFoundError(PyGridError): def __init__(self): message = "Group ID not found!" super().__init__(message) class InvalidRequestKeyError(PyGridError): def __init__(self): message = "Invalid request key!" super().__init__(message) class InvalidCredentialsError(PyGridError): def __init__(self): message = "Invalid credentials!" super().__init__(message) class MissingRequestKeyError(PyGridError): def __init__(self, message=""): if not message: message = "Missing request key!" super().__init__(message)
85d0bc9fbb20daeff9aa48a83be1823fa346cb9c
tests/test_helpers.py
tests/test_helpers.py
from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert items.next()['page'] == 1 assert items.next()['page'] == 2
from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert next(items)['page'] == 1 assert next(items)['page'] == 2
Fix tests for Python 3
Fix tests for Python 3
Python
mit
alexandriagroup/rakuten-ws
from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page - assert items.next()['page'] == 1 + assert next(items)['page'] == 1 - assert items.next()['page'] == 2 + assert next(items)['page'] == 2
Fix tests for Python 3
## Code Before: from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert items.next()['page'] == 1 assert items.next()['page'] == 2 ## Instruction: Fix tests for Python 3 ## Code After: from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page assert next(items)['page'] == 1 assert next(items)['page'] == 2
from __future__ import unicode_literals import pytest import types from rakuten_ws.webservice import RakutenWebService from rakuten_ws.base import RakutenAPIResponse @pytest.mark.online def test_response(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") assert isinstance(response, RakutenAPIResponse) @pytest.mark.online def test_single_item(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") item = response['Items'][0] assert item['itemName'] == 'NARUTO THE BEST (期間生産限定盤) [ (アニメーション) ]' # noqa @pytest.mark.online def test_item_pages(credentials): ws = RakutenWebService(**credentials) response = ws.ichiba.item.search(keyword="Naruto") items = response.pages() # search should also allow to retrieve all the available responses # within a generator assert isinstance(items, types.GeneratorType) # The iteration should switch to the next page - assert items.next()['page'] == 1 ? ------ + assert next(items)['page'] == 1 ? +++++ - assert items.next()['page'] == 2 ? ------ + assert next(items)['page'] == 2 ? +++++