commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
ee53ec51d98802bf0bc55e70c39cc0918f2bb274
icekit/plugins/blog_post/content_plugins.py
icekit/plugins/blog_post/content_plugins.py
from django.apps import apps from django.conf import settings from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
from django.apps import apps from django.conf import settings from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
Update Blog model and content item matching
Update Blog model and content item matching
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
from django.apps import apps from django.conf import settings - from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): - model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) + model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
Update Blog model and content item matching
## Code Before: from django.apps import apps from django.conf import settings from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ] ## Instruction: Update Blog model and content item matching ## Code After: from django.apps import apps from django.conf import settings from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
from django.apps import apps from django.conf import settings - from django.db.models.loading import get_model from django.utils.translation import ugettext_lazy as _ from fluent_contents.extensions import ContentPlugin, plugin_pool default_blog_model = 'blog_tools.BlogPost' icekit_blog_model = getattr(settings, 'ICEKIT_BLOG_MODEL', default_blog_model) BLOG_MODEL = apps.get_model(*icekit_blog_model.rsplit('.', 1)) if icekit_blog_model != default_blog_model: @plugin_pool.register class BlogPostPlugin(ContentPlugin): - model = get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.PostItem')) + model = apps.get_model(getattr(settings, 'ICEKIT_BLOG_CONTENT_ITEM', 'blog_post.BlogPostItem')) ? +++++ ++++ category = _('Blog') render_template = 'icekit/plugins/post/default.html' raw_id_fields = ['post', ]
6cfb0ca69b43784d495920865f0a250f7d16ff84
trump/extensions/loader.py
trump/extensions/loader.py
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) extension_names = os.listdir(os.path.join(curdir,'source')) for name in extension_names: ext = find_module(name, ['source']) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) sourcedir = os.path.join(curdir,'source') extension_names = os.listdir(sourcedir) for name in extension_names: ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
Use full path to find mods
Use full path to find mods
Python
bsd-3-clause
jnmclarty/trump,Equitable/trump
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) + sourcedir = os.path.join(curdir,'source') - extension_names = os.listdir(os.path.join(curdir,'source')) + extension_names = os.listdir(sourcedir) for name in extension_names: - ext = find_module(name, ['source']) + ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
Use full path to find mods
## Code Before: from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) extension_names = os.listdir(os.path.join(curdir,'source')) for name in extension_names: ext = find_module(name, ['source']) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod) ## Instruction: Use full path to find mods ## Code After: from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) sourcedir = os.path.join(curdir,'source') extension_names = os.listdir(sourcedir) for name in extension_names: ext = find_module(name, [sourcedir]) mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
from imp import find_module, load_module import os class SourceExtension(object): def __init__(self, mod): self.initialized = False self.mod = mod self.renew = mod.renew self.Source = mod.Source def __call__(self, _ses, **kwargs): if not self.initialized or self.renew: self.fetcher = self.Source(_ses, **kwargs) self.initialized = True return self.fetcher.getseries(_ses, **kwargs) sources = {} curdir = os.path.dirname(os.path.realpath(__file__)) + sourcedir = os.path.join(curdir,'source') - extension_names = os.listdir(os.path.join(curdir,'source')) + extension_names = os.listdir(sourcedir) for name in extension_names: - ext = find_module(name, ['source']) ? - ^ + ext = find_module(name, [sourcedir]) ? ^^^ mod = load_module(name, *ext) sources[mod.stype] = SourceExtension(mod)
ae6ed4e7dc6510637d322eb6403f43b9d4aa5d25
karteikarten/helpers/exporters.py
karteikarten/helpers/exporters.py
from karteikarten.models import Card class AnkiExporter(object): ''' Exports cards to ANKI Text File. ''' @staticmethod def export(cardset): result = u'front\tback\n' for card in Card.objects.filter(parent_card_set = cardset): result += card.front.replace('\n', '<br />') + '\t' + \ card.back.replace('\n', '<br />') + '\n' return result @staticmethod def getExtension(): return '.txt'
from karteikarten.models import Card class AnkiExporter(object): ''' Exports cards to ANKI Text File. ''' @staticmethod def export(cardset): result = u'front\tback\n' for card in Card.objects.filter(parent_card_set = cardset): result += card.front.replace('\n', '<br />').replace('\r', '') + '\t' + \ card.back.replace('\n', '<br />').replace('\r', '') + '\n' return result @staticmethod def getExtension(): return '.txt'
Remove carriage returns in Anki exporter
Remove carriage returns in Anki exporter
Python
agpl-3.0
meoblast001/kksystem,meoblast001/kksystem,meoblast001/kksystem
from karteikarten.models import Card class AnkiExporter(object): ''' Exports cards to ANKI Text File. ''' @staticmethod def export(cardset): result = u'front\tback\n' for card in Card.objects.filter(parent_card_set = cardset): - result += card.front.replace('\n', '<br />') + '\t' + \ + result += card.front.replace('\n', '<br />').replace('\r', '') + '\t' + \ - card.back.replace('\n', '<br />') + '\n' + card.back.replace('\n', '<br />').replace('\r', '') + '\n' return result @staticmethod def getExtension(): return '.txt'
Remove carriage returns in Anki exporter
## Code Before: from karteikarten.models import Card class AnkiExporter(object): ''' Exports cards to ANKI Text File. ''' @staticmethod def export(cardset): result = u'front\tback\n' for card in Card.objects.filter(parent_card_set = cardset): result += card.front.replace('\n', '<br />') + '\t' + \ card.back.replace('\n', '<br />') + '\n' return result @staticmethod def getExtension(): return '.txt' ## Instruction: Remove carriage returns in Anki exporter ## Code After: from karteikarten.models import Card class AnkiExporter(object): ''' Exports cards to ANKI Text File. ''' @staticmethod def export(cardset): result = u'front\tback\n' for card in Card.objects.filter(parent_card_set = cardset): result += card.front.replace('\n', '<br />').replace('\r', '') + '\t' + \ card.back.replace('\n', '<br />').replace('\r', '') + '\n' return result @staticmethod def getExtension(): return '.txt'
from karteikarten.models import Card class AnkiExporter(object): ''' Exports cards to ANKI Text File. ''' @staticmethod def export(cardset): result = u'front\tback\n' for card in Card.objects.filter(parent_card_set = cardset): - result += card.front.replace('\n', '<br />') + '\t' + \ + result += card.front.replace('\n', '<br />').replace('\r', '') + '\t' + \ ? ++++++++++++++++++ - card.back.replace('\n', '<br />') + '\n' + card.back.replace('\n', '<br />').replace('\r', '') + '\n' ? ++++++++++++++++++ return result @staticmethod def getExtension(): return '.txt'
8c1b7f8a5a7403e464938aa0aa6876557ec6a2b3
daphne/server.py
daphne/server.py
import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000): self.channel_layer = channel_layer self.host = host self.port = port def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run() def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): self.channel_layer = channel_layer self.host = host self.port = port self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
Allow signal handlers to be disabled to run in subthread
Allow signal handlers to be disabled to run in subthread
Python
bsd-3-clause
django/daphne,maikhoepfel/daphne
import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): - def __init__(self, channel_layer, host="127.0.0.1", port=8000): + def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): self.channel_layer = channel_layer self.host = host self.port = port + self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) - reactor.run() + reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
Allow signal handlers to be disabled to run in subthread
## Code Before: import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000): self.channel_layer = channel_layer self.host = host self.port = port def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run() def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message) ## Instruction: Allow signal handlers to be disabled to run in subthread ## Code After: import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): self.channel_layer = channel_layer self.host = host self.port = port self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
import time from twisted.internet import reactor from .http_protocol import HTTPFactory class Server(object): - def __init__(self, channel_layer, host="127.0.0.1", port=8000): + def __init__(self, channel_layer, host="127.0.0.1", port=8000, signal_handlers=True): ? ++++++++++++++++++++++ self.channel_layer = channel_layer self.host = host self.port = port + self.signal_handlers = signal_handlers def run(self): self.factory = HTTPFactory(self.channel_layer) reactor.listenTCP(self.port, self.factory, interface=self.host) reactor.callInThread(self.backend_reader) - reactor.run() + reactor.run(installSignalHandlers=self.signal_handlers) def backend_reader(self): """ Run in a separate thread; reads messages from the backend. """ while True: channels = self.factory.reply_channels() # Quit if reactor is stopping if not reactor.running: return # Don't do anything if there's no channels to listen on if channels: channel, message = self.channel_layer.receive_many(channels, block=True) else: time.sleep(0.1) continue # Wait around if there's nothing received if channel is None: time.sleep(0.05) continue # Deal with the message self.factory.dispatch_reply(channel, message)
4375e1d72832f9672eaba87019be9b769eb69e78
alg_hash_string.py
alg_hash_string.py
from __future__ import print_function def hash_str(a_str, table_size): """Hash a string by the folding method. - Get ordinal number for each char. - Sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for c in a_str: sum += ord(c) return sum % table_size def weighted_hash_str(a_str, table_size): """Weighted-Hash a string by the folding method. - Get ordinal number for each char. - Weighted-sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for i, c in enumerate(a_str): sum += (i + 1) * ord(c) return sum % table_size def main(): a_str = 'cat' print('For hash_str(): {}'.format(hash_str(a_str, 11))) print('For weighted_hash_str(): {}' .format(weighted_hash_str(a_str, 11))) if __name__ == '__main__': main()
from __future__ import absolute_import from __future__ import division from __future__ import print_function def hash_str(a_str, table_size): """Hash a string by the folding method. - Get ordinal number for each char. - Sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for c in a_str: sum += ord(c) return sum % table_size def weighted_hash_str(a_str, table_size): """Weighted-Hash a string by the folding method. - Get ordinal number for each char. - Weighted-sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for i, c in enumerate(a_str): sum += (i + 1) * ord(c) return sum % table_size def main(): a_str = 'cat' print('For hash_str(): {}'.format(hash_str(a_str, 11))) print('For weighted_hash_str(): {}' .format(weighted_hash_str(a_str, 11))) if __name__ == '__main__': main()
Add importing absolute_import & division from Prague
Add importing absolute_import & division from Prague
Python
bsd-2-clause
bowen0701/algorithms_data_structures
+ from __future__ import absolute_import + from __future__ import division from __future__ import print_function def hash_str(a_str, table_size): """Hash a string by the folding method. - Get ordinal number for each char. - Sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for c in a_str: sum += ord(c) return sum % table_size def weighted_hash_str(a_str, table_size): """Weighted-Hash a string by the folding method. - Get ordinal number for each char. - Weighted-sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for i, c in enumerate(a_str): sum += (i + 1) * ord(c) return sum % table_size def main(): a_str = 'cat' print('For hash_str(): {}'.format(hash_str(a_str, 11))) print('For weighted_hash_str(): {}' .format(weighted_hash_str(a_str, 11))) if __name__ == '__main__': main()
Add importing absolute_import & division from Prague
## Code Before: from __future__ import print_function def hash_str(a_str, table_size): """Hash a string by the folding method. - Get ordinal number for each char. - Sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for c in a_str: sum += ord(c) return sum % table_size def weighted_hash_str(a_str, table_size): """Weighted-Hash a string by the folding method. - Get ordinal number for each char. - Weighted-sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for i, c in enumerate(a_str): sum += (i + 1) * ord(c) return sum % table_size def main(): a_str = 'cat' print('For hash_str(): {}'.format(hash_str(a_str, 11))) print('For weighted_hash_str(): {}' .format(weighted_hash_str(a_str, 11))) if __name__ == '__main__': main() ## Instruction: Add importing absolute_import & division from Prague ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import print_function def hash_str(a_str, table_size): """Hash a string by the folding method. - Get ordinal number for each char. - Sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for c in a_str: sum += ord(c) return sum % table_size def weighted_hash_str(a_str, table_size): """Weighted-Hash a string by the folding method. - Get ordinal number for each char. - Weighted-sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for i, c in enumerate(a_str): sum += (i + 1) * ord(c) return sum % table_size def main(): a_str = 'cat' print('For hash_str(): {}'.format(hash_str(a_str, 11))) print('For weighted_hash_str(): {}' .format(weighted_hash_str(a_str, 11))) if __name__ == '__main__': main()
+ from __future__ import absolute_import + from __future__ import division from __future__ import print_function def hash_str(a_str, table_size): """Hash a string by the folding method. - Get ordinal number for each char. - Sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for c in a_str: sum += ord(c) return sum % table_size def weighted_hash_str(a_str, table_size): """Weighted-Hash a string by the folding method. - Get ordinal number for each char. - Weighted-sum all of the ordinal numbers. - Return the remainder of the sum with table_size. """ sum = 0 for i, c in enumerate(a_str): sum += (i + 1) * ord(c) return sum % table_size def main(): a_str = 'cat' print('For hash_str(): {}'.format(hash_str(a_str, 11))) print('For weighted_hash_str(): {}' .format(weighted_hash_str(a_str, 11))) if __name__ == '__main__': main()
e0dac0a621cbeed615553e5c3544f9c49de96eb2
metadata/FrostNumberModel/hooks/pre-stage.py
metadata/FrostNumberModel/hooks/pre-stage.py
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir)
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) yaml_dump('_env.yaml', env)
Subtract 1 from model end_year
Subtract 1 from model end_year This matches the behavior of the FrostNumberModel BMI.
Python
mit
csdms/wmt-metadata
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil - from wmt.utils.hook import find_simulation_input_file + from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ - env['end_year'] = long(env['start_year']) + long(env['_run_duration']) + env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) + for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) + yaml_dump('_env.yaml', env) +
Subtract 1 from model end_year
## Code Before: """A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) ## Instruction: Subtract 1 from model end_year ## Code After: """A hook for modifying parameter values read from the WMT client.""" import os import shutil from wmt.utils.hook import find_simulation_input_file, yaml_dump from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) yaml_dump('_env.yaml', env)
"""A hook for modifying parameter values read from the WMT client.""" import os import shutil - from wmt.utils.hook import find_simulation_input_file + from wmt.utils.hook import find_simulation_input_file, yaml_dump ? +++++++++++ from topoflow_utils.hook import assign_parameters file_list = [] def execute(env): """Perform pre-stage tasks for running a component. Parameters ---------- env : dict A dict of component parameter values from WMT. """ - env['end_year'] = long(env['start_year']) + long(env['_run_duration']) + env['end_year'] = long(env['start_year']) + long(env['_run_duration']) - 1 ? ++++ env['fn_out_filename'] = 'frostnumber_output.dat' assign_parameters(env, file_list) + for fname in file_list: src = find_simulation_input_file(env[fname]) shutil.copy(src, os.curdir) + + yaml_dump('_env.yaml', env)
3b4af27a5e6a13e384852d31108449aa60f30fa2
tools/gdb/gdb_chrome.py
tools/gdb/gdb_chrome.py
import gdb import webkit class String16Printer(webkit.StringPrinter): def to_string(self): return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p']) class GURLPrinter(webkit.StringPrinter): def to_string(self): return self.val['spec_'] def lookup_function(val): typ = str(val.type) if typ == 'string16': return String16Printer(val) elif typ == 'GURL': return GURLPrinter(val) return None gdb.pretty_printers.append(lookup_function)
import gdb import webkit class String16Printer(webkit.StringPrinter): def to_string(self): return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p']) class GURLPrinter(webkit.StringPrinter): def to_string(self): return self.val['spec_'] class FilePathPrinter(object): def __init__(self, val): self.val = val def to_string(self): return self.val['path_']['_M_dataplus']['_M_p'] def lookup_function(val): type_to_printer = { 'string16': String16Printer, 'GURL': GURLPrinter, 'FilePath': FilePathPrinter, } printer = type_to_printer.get(str(val.type), None) if printer: return printer(val) return None gdb.pretty_printers.append(lookup_function)
Add FilePath to the gdb pretty printers.
Add FilePath to the gdb pretty printers. Review URL: http://codereview.chromium.org/6621017 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@76956 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
ropik/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,ropik/chromium,ropik/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,yitian134/chromium,ropik/chromium,yitian134/chromium,ropik/chromium,gavinp/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,Crystalnix/house-of-life-chromium,adobe/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,gavinp/chromium,adobe/chromium,ropik/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,adobe/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,yitian134/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,yitian134/chromium,Crystalnix/house-of-life-chromium,adobe/chromium,Crystalnix/house-of-life-chromium,gavinp/chromium,adobe/chromium,gavinp/chromium
import gdb import webkit class String16Printer(webkit.StringPrinter): def to_string(self): return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p']) class GURLPrinter(webkit.StringPrinter): def to_string(self): return self.val['spec_'] + class FilePathPrinter(object): + def __init__(self, val): + self.val = val + + def to_string(self): + return self.val['path_']['_M_dataplus']['_M_p'] + + def lookup_function(val): - typ = str(val.type) - if typ == 'string16': - return String16Printer(val) - elif typ == 'GURL': + type_to_printer = { + 'string16': String16Printer, + 'GURL': GURLPrinter, + 'FilePath': FilePathPrinter, + } + + printer = type_to_printer.get(str(val.type), None) + if printer: - return GURLPrinter(val) + return printer(val) return None gdb.pretty_printers.append(lookup_function)
Add FilePath to the gdb pretty printers.
## Code Before: import gdb import webkit class String16Printer(webkit.StringPrinter): def to_string(self): return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p']) class GURLPrinter(webkit.StringPrinter): def to_string(self): return self.val['spec_'] def lookup_function(val): typ = str(val.type) if typ == 'string16': return String16Printer(val) elif typ == 'GURL': return GURLPrinter(val) return None gdb.pretty_printers.append(lookup_function) ## Instruction: Add FilePath to the gdb pretty printers. ## Code After: import gdb import webkit class String16Printer(webkit.StringPrinter): def to_string(self): return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p']) class GURLPrinter(webkit.StringPrinter): def to_string(self): return self.val['spec_'] class FilePathPrinter(object): def __init__(self, val): self.val = val def to_string(self): return self.val['path_']['_M_dataplus']['_M_p'] def lookup_function(val): type_to_printer = { 'string16': String16Printer, 'GURL': GURLPrinter, 'FilePath': FilePathPrinter, } printer = type_to_printer.get(str(val.type), None) if printer: return printer(val) return None gdb.pretty_printers.append(lookup_function)
import gdb import webkit class String16Printer(webkit.StringPrinter): def to_string(self): return webkit.ustring_to_string(self.val['_M_dataplus']['_M_p']) class GURLPrinter(webkit.StringPrinter): def to_string(self): return self.val['spec_'] + class FilePathPrinter(object): + def __init__(self, val): + self.val = val + + def to_string(self): + return self.val['path_']['_M_dataplus']['_M_p'] + + def lookup_function(val): - typ = str(val.type) - if typ == 'string16': - return String16Printer(val) - elif typ == 'GURL': + type_to_printer = { + 'string16': String16Printer, + 'GURL': GURLPrinter, + 'FilePath': FilePathPrinter, + } + + printer = type_to_printer.get(str(val.type), None) + if printer: - return GURLPrinter(val) ? ^^^^^ + return printer(val) ? ^ return None gdb.pretty_printers.append(lookup_function)
847a88c579118f8a0d528284ab3ea029ccca7215
git_pre_commit_hook/builtin_plugins/rst_check.py
git_pre_commit_hook/builtin_plugins/rst_check.py
import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
"""Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
Add description to rst plugin
Add description to rst plugin
Python
mit
evvers/git-pre-commit-hook
+ """Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
Add description to rst plugin
## Code Before: import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True ## Instruction: Add description to rst plugin ## Code After: """Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
+ """Check that files contains valid ReStructuredText.""" import os import fnmatch import restructuredtext_lint DEFAULTS = { 'files': '*.rst', } def make_message(error): return '%s %s:%s %s\n' % ( error.type, error.source, error.line, error.message, ) def check(file_staged_for_commit, options): basename = os.path.basename(file_staged_for_commit.path) if not fnmatch.fnmatch(basename, options.rst_files): return True errors = restructuredtext_lint.lint( file_staged_for_commit.contents, file_staged_for_commit.path, ) if errors: print('\n'.join(make_message(e) for e in errors)) return False else: return True
0409580aed43b6a0556fcc4b8e6e9252d9f082ea
froide/publicbody/management/commands/validate_publicbodies.py
froide/publicbody/management/commands/validate_publicbodies.py
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all().iterator() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
Use queryset in validate publicbodies command
Use queryset in validate publicbodies command
Python
mit
stefanw/froide,stefanw/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,stefanw/froide,fin/froide,fin/froide
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] - pbs = PublicBody.objects.all().iterator() + pbs = PublicBody.objects.all() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
Use queryset in validate publicbodies command
## Code Before: from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all().iterator() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] ) ## Instruction: Use queryset in validate publicbodies command ## Code After: from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] pbs = PublicBody.objects.all() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
from io import StringIO from contextlib import contextmanager from django.conf import settings from django.core.management.base import BaseCommand from django.utils import translation from django.utils.translation import ugettext_lazy as _ from froide.helper.email_sending import send_mail from ...validators import PublicBodyValidator from ...models import PublicBody class Command(BaseCommand): help = "Validates public bodies" def add_arguments(self, parser): parser.add_argument('filename', type=str, nargs='?', default=None) @contextmanager def get_stream(self, filename): if filename is None: stream = StringIO() else: if filename == '-': stream = self.stdout else: stream = open(filename, 'w') yield stream if filename is not None and filename != '-': stream.close() def handle(self, *args, **options): translation.activate(settings.LANGUAGE_CODE) filename = options['filename'] - pbs = PublicBody.objects.all().iterator() ? ----------- + pbs = PublicBody.objects.all() validator = PublicBodyValidator(pbs) with self.get_stream(filename) as stream: validator.write_csv(stream) if filename is None and not validator.is_valid: for name, email in settings.MANAGERS: send_mail( _('Public body validation results'), _('Please find attached the results of the public body validation'), email, attachments=[ ('validation_result.csv', stream.getvalue().encode('utf-8'), 'text/csv') ] )
36950cf9cffd5083408bc257e37a846835029d58
symposion/speakers/admin.py
symposion/speakers/admin.py
from django.contrib import admin from markedit.admin import MarkEditAdmin from symposion.speakers.models import Speaker class SpeakerAdmin(MarkEditAdmin): list_display = ["name", "email", "created", "twitter_username"] search_fields = ["name", "twitter_username"] class MarkEdit: fields = ['biography', ] options = { 'preview': 'below' } admin.site.register(Speaker, SpeakerAdmin)
from django.contrib import admin from markedit.admin import MarkEditAdmin from symposion.speakers.models import Speaker class SpeakerAdmin(MarkEditAdmin): list_display = ["name", "email", "created", "twitter_username"] raw_id_fields = ["user"] search_fields = ["name", "twitter_username"] class MarkEdit: fields = ['biography', ] options = { 'preview': 'below' } admin.site.register(Speaker, SpeakerAdmin)
Fix user selection for speaker add
Fix user selection for speaker add When adding a speaker in the admin, the staff person had to pick a user from a huge dropdown with all the users, unsorted. Change 'user' to a raw id field, meaning to pick a user, the staff member clicks a magnifying glass icon next to the field and gets a popup listing all the users in an admin list page with sortable columns and search.
Python
bsd-3-clause
PyCon/pycon,PyCon/pycon,PyCon/pycon,PyCon/pycon
from django.contrib import admin from markedit.admin import MarkEditAdmin from symposion.speakers.models import Speaker class SpeakerAdmin(MarkEditAdmin): list_display = ["name", "email", "created", "twitter_username"] + raw_id_fields = ["user"] search_fields = ["name", "twitter_username"] class MarkEdit: fields = ['biography', ] options = { 'preview': 'below' } admin.site.register(Speaker, SpeakerAdmin)
Fix user selection for speaker add
## Code Before: from django.contrib import admin from markedit.admin import MarkEditAdmin from symposion.speakers.models import Speaker class SpeakerAdmin(MarkEditAdmin): list_display = ["name", "email", "created", "twitter_username"] search_fields = ["name", "twitter_username"] class MarkEdit: fields = ['biography', ] options = { 'preview': 'below' } admin.site.register(Speaker, SpeakerAdmin) ## Instruction: Fix user selection for speaker add ## Code After: from django.contrib import admin from markedit.admin import MarkEditAdmin from symposion.speakers.models import Speaker class SpeakerAdmin(MarkEditAdmin): list_display = ["name", "email", "created", "twitter_username"] raw_id_fields = ["user"] search_fields = ["name", "twitter_username"] class MarkEdit: fields = ['biography', ] options = { 'preview': 'below' } admin.site.register(Speaker, SpeakerAdmin)
from django.contrib import admin from markedit.admin import MarkEditAdmin from symposion.speakers.models import Speaker class SpeakerAdmin(MarkEditAdmin): list_display = ["name", "email", "created", "twitter_username"] + raw_id_fields = ["user"] search_fields = ["name", "twitter_username"] class MarkEdit: fields = ['biography', ] options = { 'preview': 'below' } admin.site.register(Speaker, SpeakerAdmin)
5456ae0af9ad83b8e0339c671ce8954bb48d62cf
database.py
database.py
from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base import config as cfg class DB(object): engine = None db_session = None Base = declarative_base() def __init__(self, dbstring): self.engine = create_engine(dbstring, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) self.Base.query = self.db_session.query_property() def get_session(self): return self.db_session def get_base(self): return self.Base
from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker, class_mapper from sqlalchemy.ext.declarative import declarative_base import config as cfg class DB(object): engine = None db_session = None Base = declarative_base() def __init__(self, dbstring): self.engine = create_engine(dbstring, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) self.Base.query = self.db_session.query_property() def get_session(self): return self.db_session def get_base(self): return self.Base class ImposterBase(object): """ Mixin class to provide additional generic functions for the sqlalchemy models """ def to_dict(obj): """Return dict containing all object data""" return dict((col.name, unicode(getattr(obj, col.name))) for col in class_mapper(obj.__class__).mapped_table.c) def get_public_dict(obj): """Return dict containing only public object data""" return dict((col.name, unicode(getattr(obj, col.name))) for col in obj.__class__.__public_columns__)
Add ImposterBase mixin class so we can add methods and properties to the sqlalchemy based models
Add ImposterBase mixin class so we can add methods and properties to the sqlalchemy based models
Python
bsd-2-clause
jkossen/imposter,jkossen/imposter
from sqlalchemy import create_engine - from sqlalchemy.orm import scoped_session, sessionmaker + from sqlalchemy.orm import scoped_session, sessionmaker, class_mapper from sqlalchemy.ext.declarative import declarative_base import config as cfg class DB(object): engine = None db_session = None Base = declarative_base() def __init__(self, dbstring): self.engine = create_engine(dbstring, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) self.Base.query = self.db_session.query_property() def get_session(self): return self.db_session def get_base(self): return self.Base + class ImposterBase(object): + """ Mixin class to provide additional generic functions for the sqlalchemy models """ + + def to_dict(obj): + """Return dict containing all object data""" + return dict((col.name, unicode(getattr(obj, col.name))) + for col in class_mapper(obj.__class__).mapped_table.c) + + def get_public_dict(obj): + """Return dict containing only public object data""" + return dict((col.name, unicode(getattr(obj, col.name))) + for col in obj.__class__.__public_columns__) +
Add ImposterBase mixin class so we can add methods and properties to the sqlalchemy based models
## Code Before: from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker from sqlalchemy.ext.declarative import declarative_base import config as cfg class DB(object): engine = None db_session = None Base = declarative_base() def __init__(self, dbstring): self.engine = create_engine(dbstring, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) self.Base.query = self.db_session.query_property() def get_session(self): return self.db_session def get_base(self): return self.Base ## Instruction: Add ImposterBase mixin class so we can add methods and properties to the sqlalchemy based models ## Code After: from sqlalchemy import create_engine from sqlalchemy.orm import scoped_session, sessionmaker, class_mapper from sqlalchemy.ext.declarative import declarative_base import config as cfg class DB(object): engine = None db_session = None Base = declarative_base() def __init__(self, dbstring): self.engine = create_engine(dbstring, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) self.Base.query = self.db_session.query_property() def get_session(self): return self.db_session def get_base(self): return self.Base class ImposterBase(object): """ Mixin class to provide additional generic functions for the sqlalchemy models """ def to_dict(obj): """Return dict containing all object data""" return dict((col.name, unicode(getattr(obj, col.name))) for col in class_mapper(obj.__class__).mapped_table.c) def get_public_dict(obj): """Return dict containing only public object data""" return dict((col.name, unicode(getattr(obj, col.name))) for col in obj.__class__.__public_columns__)
from sqlalchemy import create_engine - from sqlalchemy.orm import scoped_session, sessionmaker + from sqlalchemy.orm import scoped_session, sessionmaker, class_mapper ? ++++++++++++++ from sqlalchemy.ext.declarative import declarative_base import config as cfg class DB(object): engine = None db_session = None Base = declarative_base() def __init__(self, dbstring): self.engine = create_engine(dbstring, convert_unicode=True) self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) self.Base.query = self.db_session.query_property() def get_session(self): return self.db_session def get_base(self): return self.Base + + class ImposterBase(object): + """ Mixin class to provide additional generic functions for the sqlalchemy models """ + + def to_dict(obj): + """Return dict containing all object data""" + return dict((col.name, unicode(getattr(obj, col.name))) + for col in class_mapper(obj.__class__).mapped_table.c) + + def get_public_dict(obj): + """Return dict containing only public object data""" + return dict((col.name, unicode(getattr(obj, col.name))) + for col in obj.__class__.__public_columns__)
8b4fc00e7a5ac1d416d54952cbb6d09ef328a9c3
cache_keras_weights.py
cache_keras_weights.py
from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.applications.vgg19 import VGG19 from keras.applications.inception_v3 import InceptionV3 resnet = ResNet50(weights='imagenet') vgg16 = VGG16(weights='imagenet') vgg19 = VGG19(weights='imagenet') inception = InceptionV3(weights='imagenet')
from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.applications.vgg19 import VGG19 from keras.applications.inception_v3 import InceptionV3 from keras.applications.xception import Xception resnet = ResNet50(weights='imagenet') vgg16 = VGG16(weights='imagenet') vgg19 = VGG19(weights='imagenet') inception = InceptionV3(weights='imagenet') xception = Xception(weights='imagenet')
Add Xception to keras cache
Add Xception to keras cache
Python
apache-2.0
Kaggle/docker-python,Kaggle/docker-python
from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.applications.vgg19 import VGG19 from keras.applications.inception_v3 import InceptionV3 + from keras.applications.xception import Xception resnet = ResNet50(weights='imagenet') vgg16 = VGG16(weights='imagenet') vgg19 = VGG19(weights='imagenet') inception = InceptionV3(weights='imagenet') + xception = Xception(weights='imagenet')
Add Xception to keras cache
## Code Before: from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.applications.vgg19 import VGG19 from keras.applications.inception_v3 import InceptionV3 resnet = ResNet50(weights='imagenet') vgg16 = VGG16(weights='imagenet') vgg19 = VGG19(weights='imagenet') inception = InceptionV3(weights='imagenet') ## Instruction: Add Xception to keras cache ## Code After: from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.applications.vgg19 import VGG19 from keras.applications.inception_v3 import InceptionV3 from keras.applications.xception import Xception resnet = ResNet50(weights='imagenet') vgg16 = VGG16(weights='imagenet') vgg19 = VGG19(weights='imagenet') inception = InceptionV3(weights='imagenet') xception = Xception(weights='imagenet')
from keras.applications.resnet50 import ResNet50 from keras.applications.vgg16 import VGG16 from keras.applications.vgg19 import VGG19 from keras.applications.inception_v3 import InceptionV3 + from keras.applications.xception import Xception resnet = ResNet50(weights='imagenet') vgg16 = VGG16(weights='imagenet') vgg19 = VGG19(weights='imagenet') inception = InceptionV3(weights='imagenet') + xception = Xception(weights='imagenet')
1ba12783fca76247447d84013d91f5c3073386a4
web_scraper/core/html_fetchers.py
web_scraper/core/html_fetchers.py
import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import requests def fetch_html_document(url): """Fetch html from url and return html :param str url: an address to a resource on the Internet :return no except hit: status code and html of page (if exists) :rtype: tuple :return except hit: error :rtype: str """ try: response = requests.get(url) if response.status_code == requests.codes.ok: return response.status_code, response.text # html else: return response.status_code, response.text except Exception as err: return err
import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import requests def fetch_html_document(url, user_agent='python_requests.cli-ws'): """Fetch html from url and return html :param str url: an address to a resource on the Internet :opt param str user_agent: user agent that the request will be made with :return no except hit: status code and html of page (if exists) :rtype: tuple :return except hit: error :rtype: str """ try: response = requests.get(url, headers={'User-Agent': user_agent}) if response.status_code == requests.codes.ok: return response.status_code, response.text # html else: return response.status_code, response.text except Exception as err: return err
Add user-agent field to html_fetcher
Add user-agent field to html_fetcher
Python
mit
Samuel-L/cli-ws,Samuel-L/cli-ws
import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import requests - def fetch_html_document(url): + def fetch_html_document(url, user_agent='python_requests.cli-ws'): """Fetch html from url and return html :param str url: an address to a resource on the Internet + :opt param str user_agent: user agent that the request will be made with :return no except hit: status code and html of page (if exists) :rtype: tuple :return except hit: error :rtype: str """ try: - response = requests.get(url) + response = requests.get(url, headers={'User-Agent': user_agent}) if response.status_code == requests.codes.ok: return response.status_code, response.text # html else: return response.status_code, response.text except Exception as err: return err
Add user-agent field to html_fetcher
## Code Before: import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import requests def fetch_html_document(url): """Fetch html from url and return html :param str url: an address to a resource on the Internet :return no except hit: status code and html of page (if exists) :rtype: tuple :return except hit: error :rtype: str """ try: response = requests.get(url) if response.status_code == requests.codes.ok: return response.status_code, response.text # html else: return response.status_code, response.text except Exception as err: return err ## Instruction: Add user-agent field to html_fetcher ## Code After: import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import requests def fetch_html_document(url, user_agent='python_requests.cli-ws'): """Fetch html from url and return html :param str url: an address to a resource on the Internet :opt param str user_agent: user agent that the request will be made with :return no except hit: status code and html of page (if exists) :rtype: tuple :return except hit: error :rtype: str """ try: response = requests.get(url, headers={'User-Agent': user_agent}) if response.status_code == requests.codes.ok: return response.status_code, response.text # html else: return response.status_code, response.text except Exception as err: return err
import os import sys sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) import requests - def fetch_html_document(url): + def fetch_html_document(url, user_agent='python_requests.cli-ws'): """Fetch html from url and return html :param str url: an address to a resource on the Internet + :opt param str user_agent: user agent that the request will be made with :return no except hit: status code and html of page (if exists) :rtype: tuple :return except hit: error :rtype: str """ try: - response = requests.get(url) + response = requests.get(url, headers={'User-Agent': user_agent}) if response.status_code == requests.codes.ok: return response.status_code, response.text # html else: return response.status_code, response.text except Exception as err: return err
07549339c6b0e4b1c98a11799ca95e90cbf109cd
homedisplay/control_milight/management/commands/listen_433.py
homedisplay/control_milight/management/commands/listen_433.py
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in self.ITEM_MAP: item_name = self.ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in ITEM_MAP: item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
Move ITEM_MAP to method variable
Move ITEM_MAP to method variable
Python
bsd-3-clause
ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display,ojarva/home-info-display
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] - if id in self.ITEM_MAP: + if id in ITEM_MAP: - item_name = self.ITEM_MAP[id] + item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
Move ITEM_MAP to method variable
## Code Before: from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in self.ITEM_MAP: item_name = self.ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id) ## Instruction: Move ITEM_MAP to method variable ## Code After: from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] if id in ITEM_MAP: item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
from control_milight.utils import process_automatic_trigger from django.conf import settings from django.core.management.base import BaseCommand, CommandError import serial import time import logging logger = logging.getLogger("%s.%s" % ("homecontroller", __name__)) class Command(BaseCommand): args = '' help = 'Listen for 433MHz radio messages' def handle(self, *args, **options): s = serial.Serial(settings.ARDUINO_433, 9600) ITEM_MAP = settings.ARDUINO_433_ITEM_MAP sent_event_map = {} while True: line = s.readline() if line.startswith("Received "): id = line.split(" ")[1] - if id in self.ITEM_MAP: ? ----- + if id in ITEM_MAP: - item_name = self.ITEM_MAP[id] ? ----- + item_name = ITEM_MAP[id] if item_name in sent_event_map: if sent_event_map[item_name] > time.time() - 5: continue logger.info("Processing trigger %s (%s)", item_name, id) process_automatic_trigger(item_name) sent_event_map[item_name] = time.time() else: logger.warn("Unknown ID: %s", id)
03491b6c11964f18f7c1867ef9f2612761a006ae
test/config/nsuserdefaults_config.py
test/config/nsuserdefaults_config.py
import unittest if sys.platform.startswith('darwin'): from nativeconfig.config import NSUserDefaultsConfig from test.config import TestConfigMixin class MyNSUserDefaultsConfig(NSUserDefaultsConfig): pass class TestMemoryConfig(unittest.TestCase, TestConfigMixin): CONFIG_TYPE = MyNSUserDefaultsConfig def tearDown(self): try: c = self.CONFIG_TYPE.get_instance() c.del_value_for_option_name('FirstName') c.del_value_for_option_name('LastName') c.del_value_for_option_name('LuckyNumber') except OSError: pass TestConfigMixin.tearDown(self) def test_config_is_created_if_not_found(self): pass
import sys import unittest if sys.platform.startswith('darwin'): from nativeconfig.config import NSUserDefaultsConfig from test.config import TestConfigMixin class MyNSUserDefaultsConfig(NSUserDefaultsConfig): pass class TestMemoryConfig(unittest.TestCase, TestConfigMixin): CONFIG_TYPE = MyNSUserDefaultsConfig def tearDown(self): try: c = self.CONFIG_TYPE.get_instance() c.del_value_for_option_name('FirstName') c.del_value_for_option_name('LastName') c.del_value_for_option_name('LuckyNumber') except OSError: pass TestConfigMixin.tearDown(self) def test_config_is_created_if_not_found(self): pass
Add missing import of sys.
Add missing import of sys.
Python
mit
GreatFruitOmsk/nativeconfig
+ import sys import unittest if sys.platform.startswith('darwin'): from nativeconfig.config import NSUserDefaultsConfig from test.config import TestConfigMixin class MyNSUserDefaultsConfig(NSUserDefaultsConfig): pass class TestMemoryConfig(unittest.TestCase, TestConfigMixin): CONFIG_TYPE = MyNSUserDefaultsConfig def tearDown(self): try: c = self.CONFIG_TYPE.get_instance() c.del_value_for_option_name('FirstName') c.del_value_for_option_name('LastName') c.del_value_for_option_name('LuckyNumber') except OSError: pass TestConfigMixin.tearDown(self) def test_config_is_created_if_not_found(self): pass
Add missing import of sys.
## Code Before: import unittest if sys.platform.startswith('darwin'): from nativeconfig.config import NSUserDefaultsConfig from test.config import TestConfigMixin class MyNSUserDefaultsConfig(NSUserDefaultsConfig): pass class TestMemoryConfig(unittest.TestCase, TestConfigMixin): CONFIG_TYPE = MyNSUserDefaultsConfig def tearDown(self): try: c = self.CONFIG_TYPE.get_instance() c.del_value_for_option_name('FirstName') c.del_value_for_option_name('LastName') c.del_value_for_option_name('LuckyNumber') except OSError: pass TestConfigMixin.tearDown(self) def test_config_is_created_if_not_found(self): pass ## Instruction: Add missing import of sys. ## Code After: import sys import unittest if sys.platform.startswith('darwin'): from nativeconfig.config import NSUserDefaultsConfig from test.config import TestConfigMixin class MyNSUserDefaultsConfig(NSUserDefaultsConfig): pass class TestMemoryConfig(unittest.TestCase, TestConfigMixin): CONFIG_TYPE = MyNSUserDefaultsConfig def tearDown(self): try: c = self.CONFIG_TYPE.get_instance() c.del_value_for_option_name('FirstName') c.del_value_for_option_name('LastName') c.del_value_for_option_name('LuckyNumber') except OSError: pass TestConfigMixin.tearDown(self) def test_config_is_created_if_not_found(self): pass
+ import sys import unittest if sys.platform.startswith('darwin'): from nativeconfig.config import NSUserDefaultsConfig from test.config import TestConfigMixin class MyNSUserDefaultsConfig(NSUserDefaultsConfig): pass class TestMemoryConfig(unittest.TestCase, TestConfigMixin): CONFIG_TYPE = MyNSUserDefaultsConfig def tearDown(self): try: c = self.CONFIG_TYPE.get_instance() c.del_value_for_option_name('FirstName') c.del_value_for_option_name('LastName') c.del_value_for_option_name('LuckyNumber') except OSError: pass TestConfigMixin.tearDown(self) def test_config_is_created_if_not_found(self): pass
ec9c671bc4140590c17b00277c424f93e20a5a5e
hvac/api/secrets_engines/__init__.py
hvac/api/secrets_engines/__init__.py
"""Vault secrets engines endpoints""" from hvac.api.secrets_engines.aws import Aws from hvac.api.secrets_engines.azure import Azure from hvac.api.secrets_engines.gcp import Gcp from hvac.api.secrets_engines.identity import Identity from hvac.api.secrets_engines.kv import Kv from hvac.api.secrets_engines.pki import Pki from hvac.api.secrets_engines.kv_v1 import KvV1 from hvac.api.secrets_engines.kv_v2 import KvV2 from hvac.api.secrets_engines.transit import Transit from hvac.api.secrets_engines.database import Database from hvac.api.vault_api_category import VaultApiCategory __all__ = ( 'Aws', 'Azure', 'Gcp', 'Identity', 'Kv', 'KvV1', 'KvV2', 'Pki', 'Transit', 'SecretsEngines', 'Database' ) class SecretsEngines(VaultApiCategory): """Secrets Engines.""" implemented_classes = [ Aws, Azure, Gcp, Identity, Kv, Pki, Transit, Database, ] unimplemented_classes = [ 'Ad', 'AliCloud', 'Azure', 'Consul', 'GcpKms', 'Nomad', 'RabbitMq', 'Ssh', 'TOTP', 'Cassandra', 'MongoDb', 'Mssql', 'MySql', 'PostgreSql', ]
"""Vault secrets engines endpoints""" from hvac.api.secrets_engines.aws import Aws from hvac.api.secrets_engines.azure import Azure from hvac.api.secrets_engines.gcp import Gcp from hvac.api.secrets_engines.identity import Identity from hvac.api.secrets_engines.kv import Kv from hvac.api.secrets_engines.pki import Pki from hvac.api.secrets_engines.kv_v1 import KvV1 from hvac.api.secrets_engines.kv_v2 import KvV2 from hvac.api.secrets_engines.transit import Transit from hvac.api.secrets_engines.database import Database from hvac.api.secrets_engines.consul import Consul from hvac.api.vault_api_category import VaultApiCategory __all__ = ( 'Aws', 'Azure', 'Gcp', 'Identity', 'Kv', 'KvV1', 'KvV2', 'Pki', 'Transit', 'SecretsEngines', 'Database' ) class SecretsEngines(VaultApiCategory): """Secrets Engines.""" implemented_classes = [ Aws, Azure, Gcp, Identity, Kv, Pki, Transit, Database, Consul, ] unimplemented_classes = [ 'Ad', 'AliCloud', 'Azure', 'GcpKms', 'Nomad', 'RabbitMq', 'Ssh', 'TOTP', 'Cassandra', 'MongoDb', 'Mssql', 'MySql', 'PostgreSql', ]
Enable the consul secret engine
Enable the consul secret engine
Python
apache-2.0
ianunruh/hvac,ianunruh/hvac
"""Vault secrets engines endpoints""" from hvac.api.secrets_engines.aws import Aws from hvac.api.secrets_engines.azure import Azure from hvac.api.secrets_engines.gcp import Gcp from hvac.api.secrets_engines.identity import Identity from hvac.api.secrets_engines.kv import Kv from hvac.api.secrets_engines.pki import Pki from hvac.api.secrets_engines.kv_v1 import KvV1 from hvac.api.secrets_engines.kv_v2 import KvV2 from hvac.api.secrets_engines.transit import Transit from hvac.api.secrets_engines.database import Database + from hvac.api.secrets_engines.consul import Consul from hvac.api.vault_api_category import VaultApiCategory __all__ = ( 'Aws', 'Azure', 'Gcp', 'Identity', 'Kv', 'KvV1', 'KvV2', 'Pki', 'Transit', 'SecretsEngines', 'Database' ) class SecretsEngines(VaultApiCategory): """Secrets Engines.""" implemented_classes = [ Aws, Azure, Gcp, Identity, Kv, Pki, Transit, Database, + Consul, ] unimplemented_classes = [ 'Ad', 'AliCloud', 'Azure', - 'Consul', 'GcpKms', 'Nomad', 'RabbitMq', 'Ssh', 'TOTP', 'Cassandra', 'MongoDb', 'Mssql', 'MySql', 'PostgreSql', ]
Enable the consul secret engine
## Code Before: """Vault secrets engines endpoints""" from hvac.api.secrets_engines.aws import Aws from hvac.api.secrets_engines.azure import Azure from hvac.api.secrets_engines.gcp import Gcp from hvac.api.secrets_engines.identity import Identity from hvac.api.secrets_engines.kv import Kv from hvac.api.secrets_engines.pki import Pki from hvac.api.secrets_engines.kv_v1 import KvV1 from hvac.api.secrets_engines.kv_v2 import KvV2 from hvac.api.secrets_engines.transit import Transit from hvac.api.secrets_engines.database import Database from hvac.api.vault_api_category import VaultApiCategory __all__ = ( 'Aws', 'Azure', 'Gcp', 'Identity', 'Kv', 'KvV1', 'KvV2', 'Pki', 'Transit', 'SecretsEngines', 'Database' ) class SecretsEngines(VaultApiCategory): """Secrets Engines.""" implemented_classes = [ Aws, Azure, Gcp, Identity, Kv, Pki, Transit, Database, ] unimplemented_classes = [ 'Ad', 'AliCloud', 'Azure', 'Consul', 'GcpKms', 'Nomad', 'RabbitMq', 'Ssh', 'TOTP', 'Cassandra', 'MongoDb', 'Mssql', 'MySql', 'PostgreSql', ] ## Instruction: Enable the consul secret engine ## Code After: """Vault secrets engines endpoints""" from hvac.api.secrets_engines.aws import Aws from hvac.api.secrets_engines.azure import Azure from hvac.api.secrets_engines.gcp import Gcp from hvac.api.secrets_engines.identity import Identity from hvac.api.secrets_engines.kv import Kv from hvac.api.secrets_engines.pki import Pki from hvac.api.secrets_engines.kv_v1 import KvV1 from hvac.api.secrets_engines.kv_v2 import KvV2 from hvac.api.secrets_engines.transit import Transit from hvac.api.secrets_engines.database import Database from hvac.api.secrets_engines.consul import Consul from hvac.api.vault_api_category import VaultApiCategory __all__ = ( 'Aws', 'Azure', 'Gcp', 'Identity', 'Kv', 'KvV1', 'KvV2', 'Pki', 'Transit', 'SecretsEngines', 'Database' ) class SecretsEngines(VaultApiCategory): """Secrets Engines.""" implemented_classes = [ Aws, Azure, Gcp, Identity, Kv, Pki, Transit, Database, Consul, ] unimplemented_classes = [ 'Ad', 'AliCloud', 'Azure', 'GcpKms', 'Nomad', 'RabbitMq', 'Ssh', 'TOTP', 'Cassandra', 'MongoDb', 'Mssql', 'MySql', 'PostgreSql', ]
"""Vault secrets engines endpoints""" from hvac.api.secrets_engines.aws import Aws from hvac.api.secrets_engines.azure import Azure from hvac.api.secrets_engines.gcp import Gcp from hvac.api.secrets_engines.identity import Identity from hvac.api.secrets_engines.kv import Kv from hvac.api.secrets_engines.pki import Pki from hvac.api.secrets_engines.kv_v1 import KvV1 from hvac.api.secrets_engines.kv_v2 import KvV2 from hvac.api.secrets_engines.transit import Transit from hvac.api.secrets_engines.database import Database + from hvac.api.secrets_engines.consul import Consul from hvac.api.vault_api_category import VaultApiCategory __all__ = ( 'Aws', 'Azure', 'Gcp', 'Identity', 'Kv', 'KvV1', 'KvV2', 'Pki', 'Transit', 'SecretsEngines', 'Database' ) class SecretsEngines(VaultApiCategory): """Secrets Engines.""" implemented_classes = [ Aws, Azure, Gcp, Identity, Kv, Pki, Transit, Database, + Consul, ] unimplemented_classes = [ 'Ad', 'AliCloud', 'Azure', - 'Consul', 'GcpKms', 'Nomad', 'RabbitMq', 'Ssh', 'TOTP', 'Cassandra', 'MongoDb', 'Mssql', 'MySql', 'PostgreSql', ]
b180c7e3907df74252ee3270468a768036dc4467
tests/test_timeseries.py
tests/test_timeseries.py
import unittest from datetime import datetime, timedelta import sys sys.path.append(r"..") from daymetpy import download_Daymet class TimeseriesTest(unittest.TestCase): def setUp(self): pass def test_ornl_df(self): ornl_lat, ornl_long = 35.9313167, -84.3104124 df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013) self.assertTrue(df.year.count() == 365) self.assertTrue("tmax" in df.columns) self.assertTrue("tmin" in df.columns) self.assertTrue("prcp" in df.columns) def test_out_of_bounds(self): london_lat, london_long = 51.5072, 0.1275 with self.assertRaises(NameError): df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013) if __name__ == '__main__': unittest.main()
import unittest from datetime import datetime, timedelta import sys sys.path.append(r"../..") from daymetpy import daymet_timeseries class TimeseriesTest(unittest.TestCase): def setUp(self): pass def test_ornl_df(self): ornl_lat, ornl_long = 35.9313167, -84.3104124 df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012) self.assertTrue(df.year.count() == 365) self.assertTrue("tmax" in df.columns) self.assertTrue("tmin" in df.columns) self.assertTrue("prcp" in df.columns) def test_out_of_bounds(self): london_lat, london_long = 51.5072, 0.1275 with self.assertRaises(NameError): df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012) if __name__ == '__main__': unittest.main()
Update test to new package structure
Update test to new package structure
Python
agpl-3.0
khufkens/daymetpy
import unittest from datetime import datetime, timedelta import sys - sys.path.append(r"..") + sys.path.append(r"../..") - from daymetpy import download_Daymet + from daymetpy import daymet_timeseries class TimeseriesTest(unittest.TestCase): def setUp(self): pass def test_ornl_df(self): ornl_lat, ornl_long = 35.9313167, -84.3104124 - df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013) + df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012) self.assertTrue(df.year.count() == 365) self.assertTrue("tmax" in df.columns) self.assertTrue("tmin" in df.columns) self.assertTrue("prcp" in df.columns) def test_out_of_bounds(self): london_lat, london_long = 51.5072, 0.1275 with self.assertRaises(NameError): - df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013) + df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012) if __name__ == '__main__': unittest.main()
Update test to new package structure
## Code Before: import unittest from datetime import datetime, timedelta import sys sys.path.append(r"..") from daymetpy import download_Daymet class TimeseriesTest(unittest.TestCase): def setUp(self): pass def test_ornl_df(self): ornl_lat, ornl_long = 35.9313167, -84.3104124 df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013) self.assertTrue(df.year.count() == 365) self.assertTrue("tmax" in df.columns) self.assertTrue("tmin" in df.columns) self.assertTrue("prcp" in df.columns) def test_out_of_bounds(self): london_lat, london_long = 51.5072, 0.1275 with self.assertRaises(NameError): df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013) if __name__ == '__main__': unittest.main() ## Instruction: Update test to new package structure ## Code After: import unittest from datetime import datetime, timedelta import sys sys.path.append(r"../..") from daymetpy import daymet_timeseries class TimeseriesTest(unittest.TestCase): def setUp(self): pass def test_ornl_df(self): ornl_lat, ornl_long = 35.9313167, -84.3104124 df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012) self.assertTrue(df.year.count() == 365) self.assertTrue("tmax" in df.columns) self.assertTrue("tmin" in df.columns) self.assertTrue("prcp" in df.columns) def test_out_of_bounds(self): london_lat, london_long = 51.5072, 0.1275 with self.assertRaises(NameError): df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012) if __name__ == '__main__': unittest.main()
import unittest from datetime import datetime, timedelta import sys - sys.path.append(r"..") + sys.path.append(r"../..") ? +++ - from daymetpy import download_Daymet + from daymetpy import daymet_timeseries class TimeseriesTest(unittest.TestCase): def setUp(self): pass def test_ornl_df(self): ornl_lat, ornl_long = 35.9313167, -84.3104124 - df = download_Daymet(lon=ornl_long, lat=ornl_lat, start_yr=2012, end_yr=2013) ? --------- ^ + df = daymet_timeseries(lon=ornl_long, lat=ornl_lat, start_year=2012, end_year=2012) ? +++++++++++ ++ ++ ^ self.assertTrue(df.year.count() == 365) self.assertTrue("tmax" in df.columns) self.assertTrue("tmin" in df.columns) self.assertTrue("prcp" in df.columns) def test_out_of_bounds(self): london_lat, london_long = 51.5072, 0.1275 with self.assertRaises(NameError): - df = download_Daymet(lon=london_long, lat=london_lat, start_yr=2012, end_yr=2013) ? --------- ^ + df = daymet_timeseries(lon=london_long, lat=london_lat, start_year=2012, end_year=2012) ? +++++++++++ ++ ++ ^ if __name__ == '__main__': unittest.main()
bfbdf34e2efd1d22ee6f15f4655334764106725c
locksmith/lightauth/common.py
locksmith/lightauth/common.py
from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise
from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
Make client key checking actually work.
Make client key checking actually work.
Python
bsd-3-clause
sunlightlabs/django-locksmith,sunlightlabs/django-locksmith,sunlightlabs/django-locksmith
from locksmith.common import apicall + import urllib2 try: from django.conf import settings - SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, + SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, - api=api, endpoint=endpoint, key=key + api=api, key=key ) + return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
Make client key checking actually work.
## Code Before: from locksmith.common import apicall try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, endpoint=endpoint, key=key ) except urllib2.HTTPError as e: if e.code == 404: return None else: raise ## Instruction: Make client key checking actually work. ## Code After: from locksmith.common import apicall import urllib2 try: from django.conf import settings SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, api=api, key=key ) return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
from locksmith.common import apicall + import urllib2 try: from django.conf import settings - SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY, ? - + SIGNING_KEY = settings.LOCKSMITH_SIGNING_KEY API_NAME = settings.LOCKSMITH_API_NAME ENDPOINT = settings.LOCKSMITH_HUB_URL.replace('analytics', 'accounts') + 'checkkey/' except: SIGNING_KEY = "" API_NAME = "" ENDPOINT = "" def check_key(key, signing_key=SIGNING_KEY, api=API_NAME, endpoint=ENDPOINT): try: apicall(endpoint, signing_key, - api=api, endpoint=endpoint, key=key + api=api, key=key ) + return True except urllib2.HTTPError as e: if e.code == 404: return None else: raise
9d7f2626294fbf25934e7dda4892b7ac13bd5555
fireplace/cards/tgt/warlock.py
fireplace/cards/tgt/warlock.py
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Void Crusher class AT_023: inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
Implement more TGT Warlock cards
Implement more TGT Warlock cards
Python
agpl-3.0
liujimj/fireplace,beheh/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace,amw2104/fireplace,smallnamespace/fireplace,smallnamespace/fireplace,oftc-ftw/fireplace,liujimj/fireplace,oftc-ftw/fireplace,Meerkov/fireplace,jleclanche/fireplace,Meerkov/fireplace,NightKev/fireplace
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) + # Void Crusher + class AT_023: + inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) + + # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) + # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
Implement more TGT Warlock cards
## Code Before: from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2) ## Instruction: Implement more TGT Warlock cards ## Code After: from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) # Void Crusher class AT_023: inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
from ..utils import * ## # Minions # Dreadsteed class AT_019: deathrattle = Summon(CONTROLLER, "AT_019") # Tiny Knight of Evil class AT_021: events = Discard(FRIENDLY).on(Buff(SELF, "AT_021e")) + # Void Crusher + class AT_023: + inspire = Destroy(RANDOM_ENEMY_MINION | RANDOM_FRIENDLY_MINION) + + # Wrathguard class AT_026: events = Damage(SELF).on(Hit(FRIENDLY_HERO, Damage.Args.AMOUNT)) + # Wilfred Fizzlebang class AT_027: events = Draw(CONTROLLER).on( lambda self, target, card, source: source is self.controller.hero.power and Buff(card, "AT_027e") ) class AT_027e: cost = lambda self, i: 0 ## # Spells # Fist of Jaraxxus class AT_022: play = Hit(RANDOM_ENEMY_CHARACTER, 4) in_hand = Discard(SELF).on(play) # Demonfuse class AT_024: play = Buff(TARGET, "AT_024e"), GainMana(OPPONENT, 1) # Dark Bargain class AT_025: play = Destroy(RANDOM(ENEMY_MINIONS) * 2), Discard(RANDOM(CONTROLLER_HAND) * 2)
ac0267d318939e4e7a62342b5dc6a09c3264ea74
flocker/node/_deploy.py
flocker/node/_deploy.py
class Deployment(object): """ """ _gear_client = None def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
from .gear import GearClient class Deployment(object): """ """ def __init__(self, gear_client=None): """ :param IGearClient gear_client: The gear client API to use in deployment operations. Default ``GearClient``. """ if gear_client is None: gear_client = GearClient(hostname=b'127.0.0.1') self._gear_client = gear_client def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
Allow a fake gear client to be supplied
Allow a fake gear client to be supplied
Python
apache-2.0
wallnerryan/flocker-profiles,hackday-profilers/flocker,lukemarsden/flocker,Azulinho/flocker,w4ngyi/flocker,hackday-profilers/flocker,LaynePeng/flocker,lukemarsden/flocker,AndyHuu/flocker,beni55/flocker,mbrukman/flocker,1d4Nf6/flocker,w4ngyi/flocker,beni55/flocker,adamtheturtle/flocker,hackday-profilers/flocker,achanda/flocker,moypray/flocker,achanda/flocker,runcom/flocker,LaynePeng/flocker,achanda/flocker,agonzalezro/flocker,1d4Nf6/flocker,LaynePeng/flocker,jml/flocker,beni55/flocker,jml/flocker,Azulinho/flocker,runcom/flocker,adamtheturtle/flocker,moypray/flocker,lukemarsden/flocker,agonzalezro/flocker,wallnerryan/flocker-profiles,adamtheturtle/flocker,agonzalezro/flocker,AndyHuu/flocker,w4ngyi/flocker,moypray/flocker,Azulinho/flocker,1d4Nf6/flocker,wallnerryan/flocker-profiles,mbrukman/flocker,mbrukman/flocker,jml/flocker,runcom/flocker,AndyHuu/flocker
+ + from .gear import GearClient class Deployment(object): """ """ + def __init__(self, gear_client=None): + """ + :param IGearClient gear_client: The gear client API to use in deployment + operations. Default ``GearClient``. + """ - _gear_client = None + if gear_client is None: + gear_client = GearClient(hostname=b'127.0.0.1') + self._gear_client = gear_client def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
Allow a fake gear client to be supplied
## Code Before: class Deployment(object): """ """ _gear_client = None def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """ ## Instruction: Allow a fake gear client to be supplied ## Code After: from .gear import GearClient class Deployment(object): """ """ def __init__(self, gear_client=None): """ :param IGearClient gear_client: The gear client API to use in deployment operations. Default ``GearClient``. """ if gear_client is None: gear_client = GearClient(hostname=b'127.0.0.1') self._gear_client = gear_client def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
+ + from .gear import GearClient class Deployment(object): """ """ + def __init__(self, gear_client=None): + """ + :param IGearClient gear_client: The gear client API to use in deployment + operations. Default ``GearClient``. + """ - _gear_client = None ? ^ ^ + if gear_client is None: ? ^^^^^^^ ^^ + + gear_client = GearClient(hostname=b'127.0.0.1') + self._gear_client = gear_client def start_container(self, application): """ Launch the supplied application as a `gear` unit. """ def stop_container(self, application): """ Stop and disable the application. """
0b88b8e2cf1f841535a679bea249fba19cd2ba1d
maas/client/viscera/tests/test_sshkeys.py
maas/client/viscera/tests/test_sshkeys.py
"""Test for `maas.client.viscera.sshkeys`.""" from .. import sshkeys from ...testing import ( make_string_without_spaces, TestCase, ) from ..testing import bind def make_origin(): return bind(sshkeys.SSHKeys, sshkeys.SSHKey) class TestSSHKeys(TestCase): def test__sshkeys_create(self): """ SSHKeys.create() returns a new SSHKey. """ SSHKeys = make_origin().SSHKeys key = make_string_without_spaces() SSHKeys._handler.create.return_value = { "id": 1, "key": key, "keysource": "", } SSHKeys.create(key=key) SSHKeys._handler.create.assert_called_once_with( key=key )
"""Test for `maas.client.viscera.sshkeys`.""" import random from .. import sshkeys from ...testing import ( make_string_without_spaces, TestCase, ) from ..testing import bind from testtools.matchers import Equals def make_origin(): return bind(sshkeys.SSHKeys, sshkeys.SSHKey) class TestSSHKeys(TestCase): def test__sshkeys_create(self): """ SSHKeys.create() returns a new SSHKey. """ SSHKeys = make_origin().SSHKeys key = make_string_without_spaces() SSHKeys._handler.create.return_value = { "id": 1, "key": key, "keysource": "", } SSHKeys.create(key=key) SSHKeys._handler.create.assert_called_once_with( key=key ) def test__sshkeys_read(self): """ SSHKeys.read() returns a list of SSH keys. """ SSHKeys = make_origin().SSHKeys keys = [ { "id": random.randint(0, 100), "key": make_string_without_spaces(), "keysource": "", } for _ in range(3) ] SSHKeys._handler.read.return_value = keys ssh_keys = SSHKeys.read() self.assertThat(len(ssh_keys), Equals(3)) class TestSSHKey(TestCase): def test__sshkey_read(self): """ SSHKeys.read() returns a single SSH key. """ SSHKey = make_origin().SSHKey key_id = random.randint(0, 100) key_dict = { "id": key_id, "key": make_string_without_spaces(), "keysource": "", } SSHKey._handler.read.return_value = key_dict self.assertThat(SSHKey.read(id=key_id), Equals(SSHKey(key_dict)))
Add tests for .read methods
Add tests for .read methods
Python
agpl-3.0
alburnum/alburnum-maas-client,blakerouse/python-libmaas
"""Test for `maas.client.viscera.sshkeys`.""" + + import random from .. import sshkeys from ...testing import ( make_string_without_spaces, TestCase, ) from ..testing import bind + + from testtools.matchers import Equals def make_origin(): return bind(sshkeys.SSHKeys, sshkeys.SSHKey) class TestSSHKeys(TestCase): def test__sshkeys_create(self): """ SSHKeys.create() returns a new SSHKey. """ SSHKeys = make_origin().SSHKeys key = make_string_without_spaces() SSHKeys._handler.create.return_value = { "id": 1, "key": key, "keysource": "", } SSHKeys.create(key=key) SSHKeys._handler.create.assert_called_once_with( key=key ) + def test__sshkeys_read(self): + """ SSHKeys.read() returns a list of SSH keys. """ + SSHKeys = make_origin().SSHKeys + keys = [ + { + "id": random.randint(0, 100), + "key": make_string_without_spaces(), + "keysource": "", + } + for _ in range(3) + ] + SSHKeys._handler.read.return_value = keys + ssh_keys = SSHKeys.read() + self.assertThat(len(ssh_keys), Equals(3)) + + + class TestSSHKey(TestCase): + + def test__sshkey_read(self): + """ SSHKeys.read() returns a single SSH key. """ + SSHKey = make_origin().SSHKey + key_id = random.randint(0, 100) + key_dict = { + "id": key_id, + "key": make_string_without_spaces(), + "keysource": "", + } + SSHKey._handler.read.return_value = key_dict + self.assertThat(SSHKey.read(id=key_id), Equals(SSHKey(key_dict))) +
Add tests for .read methods
## Code Before: """Test for `maas.client.viscera.sshkeys`.""" from .. import sshkeys from ...testing import ( make_string_without_spaces, TestCase, ) from ..testing import bind def make_origin(): return bind(sshkeys.SSHKeys, sshkeys.SSHKey) class TestSSHKeys(TestCase): def test__sshkeys_create(self): """ SSHKeys.create() returns a new SSHKey. """ SSHKeys = make_origin().SSHKeys key = make_string_without_spaces() SSHKeys._handler.create.return_value = { "id": 1, "key": key, "keysource": "", } SSHKeys.create(key=key) SSHKeys._handler.create.assert_called_once_with( key=key ) ## Instruction: Add tests for .read methods ## Code After: """Test for `maas.client.viscera.sshkeys`.""" import random from .. import sshkeys from ...testing import ( make_string_without_spaces, TestCase, ) from ..testing import bind from testtools.matchers import Equals def make_origin(): return bind(sshkeys.SSHKeys, sshkeys.SSHKey) class TestSSHKeys(TestCase): def test__sshkeys_create(self): """ SSHKeys.create() returns a new SSHKey. """ SSHKeys = make_origin().SSHKeys key = make_string_without_spaces() SSHKeys._handler.create.return_value = { "id": 1, "key": key, "keysource": "", } SSHKeys.create(key=key) SSHKeys._handler.create.assert_called_once_with( key=key ) def test__sshkeys_read(self): """ SSHKeys.read() returns a list of SSH keys. """ SSHKeys = make_origin().SSHKeys keys = [ { "id": random.randint(0, 100), "key": make_string_without_spaces(), "keysource": "", } for _ in range(3) ] SSHKeys._handler.read.return_value = keys ssh_keys = SSHKeys.read() self.assertThat(len(ssh_keys), Equals(3)) class TestSSHKey(TestCase): def test__sshkey_read(self): """ SSHKeys.read() returns a single SSH key. """ SSHKey = make_origin().SSHKey key_id = random.randint(0, 100) key_dict = { "id": key_id, "key": make_string_without_spaces(), "keysource": "", } SSHKey._handler.read.return_value = key_dict self.assertThat(SSHKey.read(id=key_id), Equals(SSHKey(key_dict)))
"""Test for `maas.client.viscera.sshkeys`.""" + + import random from .. import sshkeys from ...testing import ( make_string_without_spaces, TestCase, ) from ..testing import bind + + from testtools.matchers import Equals def make_origin(): return bind(sshkeys.SSHKeys, sshkeys.SSHKey) class TestSSHKeys(TestCase): def test__sshkeys_create(self): """ SSHKeys.create() returns a new SSHKey. """ SSHKeys = make_origin().SSHKeys key = make_string_without_spaces() SSHKeys._handler.create.return_value = { "id": 1, "key": key, "keysource": "", } SSHKeys.create(key=key) SSHKeys._handler.create.assert_called_once_with( key=key ) + + def test__sshkeys_read(self): + """ SSHKeys.read() returns a list of SSH keys. """ + SSHKeys = make_origin().SSHKeys + keys = [ + { + "id": random.randint(0, 100), + "key": make_string_without_spaces(), + "keysource": "", + } + for _ in range(3) + ] + SSHKeys._handler.read.return_value = keys + ssh_keys = SSHKeys.read() + self.assertThat(len(ssh_keys), Equals(3)) + + + class TestSSHKey(TestCase): + + def test__sshkey_read(self): + """ SSHKeys.read() returns a single SSH key. """ + SSHKey = make_origin().SSHKey + key_id = random.randint(0, 100) + key_dict = { + "id": key_id, + "key": make_string_without_spaces(), + "keysource": "", + } + SSHKey._handler.read.return_value = key_dict + self.assertThat(SSHKey.read(id=key_id), Equals(SSHKey(key_dict)))
7a20ee42aae2d2a6f5766ab4ec1ee4ef33fe14c8
madam_rest/__init__.py
madam_rest/__init__.py
from flask import Flask from madam import Madam app = Flask(__name__) app.from_object('config') asset_manager = Madam() asset_storage = app.config['ASSET_STORAGE'] from madam_rest import views
import madam from flask import Flask app = Flask(__name__) app.from_object('config') asset_manager = madam.Madam() asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH']) from madam_rest import views
Create shelve asset storage by default.
Create shelve asset storage by default.
Python
agpl-3.0
eseifert/madam-rest
+ import madam from flask import Flask - from madam import Madam app = Flask(__name__) app.from_object('config') - asset_manager = Madam() + asset_manager = madam.Madam() - asset_storage = app.config['ASSET_STORAGE'] + asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH']) from madam_rest import views
Create shelve asset storage by default.
## Code Before: from flask import Flask from madam import Madam app = Flask(__name__) app.from_object('config') asset_manager = Madam() asset_storage = app.config['ASSET_STORAGE'] from madam_rest import views ## Instruction: Create shelve asset storage by default. ## Code After: import madam from flask import Flask app = Flask(__name__) app.from_object('config') asset_manager = madam.Madam() asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH']) from madam_rest import views
+ import madam from flask import Flask - from madam import Madam app = Flask(__name__) app.from_object('config') - asset_manager = Madam() + asset_manager = madam.Madam() ? ++++++ - asset_storage = app.config['ASSET_STORAGE'] + asset_storage = madam.core.ShelveStorage(app.config['ASSET_STORAGE_PATH']) from madam_rest import views
b6b9c6f3f8faaade428d044f93acd25edade075d
tools/pdtools/pdtools/__main__.py
tools/pdtools/pdtools/__main__.py
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") @click.group() @click.pass_context def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ # Options can be parsed from PDTOOLS_* environment variables. ctx.auto_envvar_prefix = 'PDTOOLS' # Respond to both -h and --help for all commands. ctx.help_option_names = ['-h', '--help'] ctx.obj = { 'pdserver_url': PDSERVER_URL } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") CONTEXT_SETTINGS = dict( # Options can be parsed from PDTOOLS_* environment variables. auto_envvar_prefix = 'PDTOOLS', # Respond to both -h and --help for all commands. help_option_names = ['-h', '--help'], obj = { 'pdserver_url': PDSERVER_URL } ) @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ pass root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
Enable '-h' help option from the pdtools root level.
Enable '-h' help option from the pdtools root level.
Python
apache-2.0
ParadropLabs/Paradrop,ParadropLabs/Paradrop,ParadropLabs/Paradrop
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") + CONTEXT_SETTINGS = dict( + # Options can be parsed from PDTOOLS_* environment variables. + auto_envvar_prefix = 'PDTOOLS', - @click.group() - @click.pass_context + # Respond to both -h and --help for all commands. + help_option_names = ['-h', '--help'], + + obj = { + 'pdserver_url': PDSERVER_URL + } + ) + + + @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ + pass - # Options can be parsed from PDTOOLS_* environment variables. - ctx.auto_envvar_prefix = 'PDTOOLS' - - # Respond to both -h and --help for all commands. - ctx.help_option_names = ['-h', '--help'] - - ctx.obj = { - 'pdserver_url': PDSERVER_URL - } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
Enable '-h' help option from the pdtools root level.
## Code Before: import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") @click.group() @click.pass_context def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ # Options can be parsed from PDTOOLS_* environment variables. ctx.auto_envvar_prefix = 'PDTOOLS' # Respond to both -h and --help for all commands. ctx.help_option_names = ['-h', '--help'] ctx.obj = { 'pdserver_url': PDSERVER_URL } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main() ## Instruction: Enable '-h' help option from the pdtools root level. ## Code After: import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") CONTEXT_SETTINGS = dict( # Options can be parsed from PDTOOLS_* environment variables. auto_envvar_prefix = 'PDTOOLS', # Respond to both -h and --help for all commands. help_option_names = ['-h', '--help'], obj = { 'pdserver_url': PDSERVER_URL } ) @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ pass root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
import os import click from . import chute from . import device from . import routers from . import store PDSERVER_URL = os.environ.get("PDSERVER_URL", "https://paradrop.org") + CONTEXT_SETTINGS = dict( + # Options can be parsed from PDTOOLS_* environment variables. + auto_envvar_prefix = 'PDTOOLS', - @click.group() - @click.pass_context + # Respond to both -h and --help for all commands. + help_option_names = ['-h', '--help'], + + obj = { + 'pdserver_url': PDSERVER_URL + } + ) + + + @click.group(context_settings=CONTEXT_SETTINGS) def root(ctx): """ Paradrop command line utility. Environment Variables PDSERVER_URL ParaDrop controller URL [default: https://paradrop.org] """ + pass - # Options can be parsed from PDTOOLS_* environment variables. - ctx.auto_envvar_prefix = 'PDTOOLS' - - # Respond to both -h and --help for all commands. - ctx.help_option_names = ['-h', '--help'] - - ctx.obj = { - 'pdserver_url': PDSERVER_URL - } root.add_command(chute.chute) root.add_command(device.device) root.add_command(routers.routers) root.add_command(store.store) def main(): """ Entry point for the pdtools Python package. """ root() if __name__ == "__main__": main()
2050385a5f5fdcffe333ae17463d6469af0b5cd8
mopidy/__init__.py
mopidy/__init__.py
from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Update Python and Pykka version check error messages
Update Python and Pykka version check error messages
Python
apache-2.0
jmarsik/mopidy,adamcik/mopidy,priestd09/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,tkem/mopidy,bencevans/mopidy,hkariti/mopidy,jcass77/mopidy,pacificIT/mopidy,vrs01/mopidy,ali/mopidy,bencevans/mopidy,mokieyue/mopidy,rawdlite/mopidy,swak/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,woutervanwijk/mopidy,swak/mopidy,swak/mopidy,SuperStarPL/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,mopidy/mopidy,bencevans/mopidy,jcass77/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,jodal/mopidy,mopidy/mopidy,ali/mopidy,tkem/mopidy,pacificIT/mopidy,quartz55/mopidy,dbrgn/mopidy,ali/mopidy,rawdlite/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,rawdlite/mopidy,priestd09/mopidy,jodal/mopidy,priestd09/mopidy,dbrgn/mopidy,hkariti/mopidy,jmarsik/mopidy,mopidy/mopidy,ZenithDK/mopidy,jmarsik/mopidy,dbrgn/mopidy,quartz55/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,bacontext/mopidy,hkariti/mopidy,kingosticks/mopidy,tkem/mopidy,jodal/mopidy,jmarsik/mopidy,diandiankan/mopidy,diandiankan/mopidy,ZenithDK/mopidy,vrs01/mopidy,ZenithDK/mopidy,vrs01/mopidy,bacontext/mopidy,ali/mopidy,bencevans/mopidy,hkariti/mopidy,bacontext/mopidy,swak/mopidy,quartz55/mopidy,mokieyue/mopidy,diandiankan/mopidy,adamcik/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,adamcik/mopidy,quartz55/mopidy,bacontext/mopidy,vrs01/mopidy,mokieyue/mopidy,diandiankan/mopidy
from __future__ import unicode_literals + import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( - 'Mopidy requires Python >= 2.7, < 3, but found %s' % + 'ERROR: Mopidy requires Python 2.7, but found %s.' % - '.'.join(map(str, sys.version_info[:3]))) + platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( - 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) + 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % + pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
Update Python and Pykka version check error messages
## Code Before: from __future__ import unicode_literals import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'Mopidy requires Python >= 2.7, < 3, but found %s' % '.'.join(map(str, sys.version_info[:3]))) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4' ## Instruction: Update Python and Pykka version check error messages ## Code After: from __future__ import unicode_literals import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( 'ERROR: Mopidy requires Python 2.7, but found %s.' % platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
from __future__ import unicode_literals + import platform import sys import warnings from distutils.version import StrictVersion as SV import pykka if not (2, 7) <= sys.version_info < (3,): sys.exit( - 'Mopidy requires Python >= 2.7, < 3, but found %s' % ? --- ----- + 'ERROR: Mopidy requires Python 2.7, but found %s.' % ? +++++++ + - '.'.join(map(str, sys.version_info[:3]))) + platform.python_version()) if (isinstance(pykka.__version__, basestring) and not SV('1.1') <= SV(pykka.__version__) < SV('2.0')): sys.exit( - 'Mopidy requires Pykka >= 1.1, < 2, but found %s' % pykka.__version__) ? ------------------- + 'ERROR: Mopidy requires Pykka >= 1.1, < 2, but found %s.' % ? +++++++ + + pykka.__version__) warnings.filterwarnings('ignore', 'could not open display') __version__ = '0.19.4'
5c1fad9e6a75ee43d3a3b7bce6c9249cf601b4b9
tendrl/commons/objects/cluster_tendrl_context/__init__.py
tendrl/commons/objects/cluster_tendrl_context/__init__.py
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.node_context.node_id return super(_ClusterTendrlContextEtcd, self).render()
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render()
Write cluster_tendrl_context to proper location
Write cluster_tendrl_context to proper location Currently it is written to clusters/<node-id>/TendrlContext This is fixed in this PR tendrl-bug-id: Tendrl/commons#302 Signed-off-by: nnDarshan <d2c6d450ab98b078f2f1942c995e6d92dd504bc8@gmail.com>
Python
lgpl-2.1
r0h4n/commons,Tendrl/commons,rishubhjain/commons
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): - self.__name__ = self.__name__ % NS.node_context.node_id + self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render()
Write cluster_tendrl_context to proper location
## Code Before: import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.node_context.node_id return super(_ClusterTendrlContextEtcd, self).render() ## Instruction: Write cluster_tendrl_context to proper location ## Code After: import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): self.__name__ = self.__name__ % NS.tendrl_context.integration_id return super(_ClusterTendrlContextEtcd, self).render()
import json import logging import os import socket import uuid from tendrl.commons.etcdobj import EtcdObj from tendrl.commons.utils import cmd_utils from tendrl.commons import objects LOG = logging.getLogger(__name__) class ClusterTendrlContext(objects.BaseObject): def __init__( self, integration_id=None, cluster_id=None, cluster_name=None, sds_name=None, sds_version=None, *args, **kwargs): super(ClusterTendrlContext, self).__init__(*args, **kwargs) self.value = 'clusters/%s/TendrlContext' # integration_id is the Tendrl generated cluster UUID self.integration_id = integration_id self.cluster_id=cluster_id self.cluster_name=cluster_name self.sds_name=sds_name self.sds_version=sds_version self._etcd_cls = _ClusterTendrlContextEtcd class _ClusterTendrlContextEtcd(EtcdObj): """A table of the cluster tendrl context, lazily updated """ __name__ = 'clusters/%s/TendrlContext' _tendrl_cls = ClusterTendrlContext def render(self): - self.__name__ = self.__name__ % NS.node_context.node_id ? - ^ ^^ + self.__name__ = self.__name__ % NS.tendrl_context.integration_id ? ++ ^^ + +++++++ ^ return super(_ClusterTendrlContextEtcd, self).render()
c987ed375da13f53928157f14528bed0c148eeac
tasks.py
tasks.py
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): asyncio.set_event_loop(cls.loop) try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
Set implicit loop for Python <3.6
Set implicit loop for Python <3.6
Python
apache-2.0
Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): + asyncio.set_event_loop(cls.loop) + try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
Set implicit loop for Python <3.6
## Code Before: import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start() ## Instruction: Set implicit loop for Python <3.6 ## Code After: import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): asyncio.set_event_loop(cls.loop) try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
import asyncio import threading class Tasks: loop = asyncio.new_event_loop() @classmethod def _run(cls): + asyncio.set_event_loop(cls.loop) + try: cls.loop.run_forever() finally: cls.loop.close() @classmethod def do(cls, func, *args, **kwargs): cls.loop.call_soon(lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def later(cls, func, *args, after=None, **kwargs): cls.loop.call_later(after, lambda: func(*args, **kwargs)) cls.loop._write_to_self() @classmethod def periodic(cls, func, *args, interval=None, **kwargs): @asyncio.coroutine def f(): while True: yield from asyncio.sleep(interval) func(*args, **kwargs) cls.loop.create_task(f()) cls.loop._write_to_self() threading.Thread(name="tasks", target=Tasks._run, daemon=True).start()
5fb365333711f7e999f71d53061ae14c386e575c
src/waldur_core/core/api_groups_mapping.py
src/waldur_core/core/api_groups_mapping.py
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], }
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
Add accounting group to apidocs
Add accounting group to apidocs
Python
mit
opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/waldur-mastermind
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], + 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
Add accounting group to apidocs
## Code Before: API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], } ## Instruction: Add accounting group to apidocs ## Code After: API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
API_GROUPS = { 'authentication': ['/api-auth/', '/api/auth-valimo/',], 'user': ['/api/users/', '/api/user-invitations/', '/api/user-counters/',], 'organization': [ '/api/customers/', '/api/customer-permissions-log/', '/api/customer-permissions-reviews/', '/api/customer-permissions/', ], 'marketplace': [ '/api/marketplace-bookings/', '/api/marketplace-cart-items/', '/api/marketplace-categories/', '/api/marketplace-category-component-usages/', '/api/marketplace-checklists-categories/', '/api/marketplace-checklists/', '/api/marketplace-component-usages/', '/api/marketplace-offering-files/', '/api/marketplace-offerings/', '/api/marketplace-order-items/', '/api/marketplace-orders/', '/api/marketplace-plans/', '/api/marketplace-plugins/', '/api/marketplace-public-api/', '/api/marketplace-resource-offerings/', '/api/marketplace-resources/', '/api/marketplace-screenshots/', '/api/marketplace-service-providers/', ], 'reporting': [ '/api/support-feedback-average-report/', '/api/support-feedback-report/', ], + 'accounting': ['/api/invoices/', '/api/invoice-items/',], }
d7001ccab0879e17308bf2dc945b5fd3b726be27
statblock/dice.py
statblock/dice.py
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): base = "%sd%s" % (self.multiplicator, self.number) if self.modifier > 0: return base + ("+%s" % self.modifier) return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
Write the critical multiplier or the range when the damage gets converted into a String
Write the critical multiplier or the range when the damage gets converted into a String
Python
mit
bkittelmann/statblock
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): - return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) + base = "%sd%s" % (self.multiplicator, self.number) + if self.modifier > 0: + return base + ("+%s" % self.modifier) + return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
Write the critical multiplier or the range when the damage gets converted into a String
## Code Before: from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100) ## Instruction: Write the critical multiplier or the range when the damage gets converted into a String ## Code After: from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): base = "%sd%s" % (self.multiplicator, self.number) if self.modifier > 0: return base + ("+%s" % self.modifier) return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
from random import random class Die: """ Abstracts the random dice throw. Roll will produce the result. The die can be further parametrized by a multiplicator and/or a modifier, like 2 * Die(8) +4. """ def __init__(self, number, multiplicator=1, modifier=0): self.number = number self.multiplicator = multiplicator self.modifier = modifier def roll(self): return self.multiplicator * random.choice(range(1, self.number + 1)) + self.modifier def __rmul__(self, other): return Die(self.number, multiplicator=other, modifier=self.modifier) def __add__(self, other): return Die(self.number, multiplicator=self.multiplicator, modifier=other) def __call__(self): return self.roll() def __eq__(self, other): return (other.number == self.number and other.multiplicator == self.multiplicator and other.modifier == self.modifier) @classmethod def parse(cls, text): return cls.__new__() def __repr__(self): - return "%sd%s+%s" % (self.multiplicator, self.number, self.modifier) ? ^ ^^^^ --- --------------- + base = "%sd%s" % (self.multiplicator, self.number) ? ^^^ ^^ + if self.modifier > 0: + return base + ("+%s" % self.modifier) + return base d4 = Die(4) d6 = Die(6) d8 = Die(8) d10 = Die(10) d12 = Die(12) d20 = Die(20) d100 = Die(100)
306c56883939be640512f3d835b8d3f6b93b4ad7
judge/signals.py
judge/signals.py
from django.core.cache.utils import make_template_fragment_key from django.db.models.signals import post_save from django.dispatch import receiver from django.core.cache import cache from .models import Problem, Contest, Submission, Organization from .caching import update_submission @receiver(post_save, sender=Problem) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('problem_html', (instance.id,))) cache.delete(make_template_fragment_key('submission_problem', (instance.id,))) cache.delete(make_template_fragment_key('problem_list_group', (instance.group_id,))) @receiver(post_save, sender=Contest) def contest_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('contest_html', (instance.id,))) @receiver(post_save, sender=Submission) def submission_update(sender, instance, **kwargs): update_submission(instance.id) @receiver(post_save, sender=Organization) def organization_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('organization_html', (instance.id,)))
from django.core.cache.utils import make_template_fragment_key from django.db.models.signals import post_save from django.dispatch import receiver from django.core.cache import cache from .models import Problem, Contest, Submission, Organization, Profile from .caching import update_submission @receiver(post_save, sender=Problem) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('problem_html', (instance.id,))) cache.delete(make_template_fragment_key('submission_problem', (instance.id,))) cache.delete(make_template_fragment_key('problem_list_group', (instance.group_id,))) @receiver(post_save, sender=Profile) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('user_on_rank', (instance.id,))) cache.delete(make_template_fragment_key('submission_user', (instance.id,))) @receiver(post_save, sender=Contest) def contest_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('contest_html', (instance.id,))) @receiver(post_save, sender=Submission) def submission_update(sender, instance, **kwargs): update_submission(instance.id) @receiver(post_save, sender=Organization) def organization_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('organization_html', (instance.id,)))
Clear cache when user changes info.
Clear cache when user changes info.
Python
agpl-3.0
Minkov/site,monouno/site,DMOJ/site,DMOJ/site,Phoenix1369/site,DMOJ/site,Phoenix1369/site,monouno/site,monouno/site,Phoenix1369/site,Minkov/site,Minkov/site,Phoenix1369/site,Minkov/site,monouno/site,monouno/site,DMOJ/site
from django.core.cache.utils import make_template_fragment_key from django.db.models.signals import post_save from django.dispatch import receiver from django.core.cache import cache - from .models import Problem, Contest, Submission, Organization + from .models import Problem, Contest, Submission, Organization, Profile from .caching import update_submission @receiver(post_save, sender=Problem) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('problem_html', (instance.id,))) cache.delete(make_template_fragment_key('submission_problem', (instance.id,))) cache.delete(make_template_fragment_key('problem_list_group', (instance.group_id,))) + + + @receiver(post_save, sender=Profile) + def problem_update(sender, instance, **kwargs): + cache.delete(make_template_fragment_key('user_on_rank', (instance.id,))) + cache.delete(make_template_fragment_key('submission_user', (instance.id,))) @receiver(post_save, sender=Contest) def contest_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('contest_html', (instance.id,))) @receiver(post_save, sender=Submission) def submission_update(sender, instance, **kwargs): update_submission(instance.id) @receiver(post_save, sender=Organization) def organization_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('organization_html', (instance.id,)))
Clear cache when user changes info.
## Code Before: from django.core.cache.utils import make_template_fragment_key from django.db.models.signals import post_save from django.dispatch import receiver from django.core.cache import cache from .models import Problem, Contest, Submission, Organization from .caching import update_submission @receiver(post_save, sender=Problem) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('problem_html', (instance.id,))) cache.delete(make_template_fragment_key('submission_problem', (instance.id,))) cache.delete(make_template_fragment_key('problem_list_group', (instance.group_id,))) @receiver(post_save, sender=Contest) def contest_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('contest_html', (instance.id,))) @receiver(post_save, sender=Submission) def submission_update(sender, instance, **kwargs): update_submission(instance.id) @receiver(post_save, sender=Organization) def organization_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('organization_html', (instance.id,))) ## Instruction: Clear cache when user changes info. ## Code After: from django.core.cache.utils import make_template_fragment_key from django.db.models.signals import post_save from django.dispatch import receiver from django.core.cache import cache from .models import Problem, Contest, Submission, Organization, Profile from .caching import update_submission @receiver(post_save, sender=Problem) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('problem_html', (instance.id,))) cache.delete(make_template_fragment_key('submission_problem', (instance.id,))) cache.delete(make_template_fragment_key('problem_list_group', (instance.group_id,))) @receiver(post_save, sender=Profile) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('user_on_rank', (instance.id,))) cache.delete(make_template_fragment_key('submission_user', (instance.id,))) @receiver(post_save, sender=Contest) def contest_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('contest_html', (instance.id,))) @receiver(post_save, sender=Submission) def submission_update(sender, instance, **kwargs): update_submission(instance.id) @receiver(post_save, sender=Organization) def organization_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('organization_html', (instance.id,)))
from django.core.cache.utils import make_template_fragment_key from django.db.models.signals import post_save from django.dispatch import receiver from django.core.cache import cache - from .models import Problem, Contest, Submission, Organization + from .models import Problem, Contest, Submission, Organization, Profile ? +++++++++ from .caching import update_submission @receiver(post_save, sender=Problem) def problem_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('problem_html', (instance.id,))) cache.delete(make_template_fragment_key('submission_problem', (instance.id,))) cache.delete(make_template_fragment_key('problem_list_group', (instance.group_id,))) + + + @receiver(post_save, sender=Profile) + def problem_update(sender, instance, **kwargs): + cache.delete(make_template_fragment_key('user_on_rank', (instance.id,))) + cache.delete(make_template_fragment_key('submission_user', (instance.id,))) @receiver(post_save, sender=Contest) def contest_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('contest_html', (instance.id,))) @receiver(post_save, sender=Submission) def submission_update(sender, instance, **kwargs): update_submission(instance.id) @receiver(post_save, sender=Organization) def organization_update(sender, instance, **kwargs): cache.delete(make_template_fragment_key('organization_html', (instance.id,)))
39bd25ffa9a90fb4dbbd63321eeee4acd84b8781
tests/test_movingfiles.py
tests/test_movingfiles.py
from functional_runner import run_tvnamer, verify_out_data def test_simple_realtive_move(): """ """ conf = """ {"move_files_enable": true, "move_files_desination": "test/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['test/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files)
from functional_runner import run_tvnamer, verify_out_data def test_simple_realtive_move(): """Move file to simple relative static dir """ conf = """ {"move_files_enable": true, "move_files_destination": "test/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['test/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files) def test_dynamic_destination(): """Move file to simple relative static dir """ conf = """ {"move_files_enable": true, "move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files)
Add more complex move_file test
Add more complex move_file test
Python
unlicense
lahwaacz/tvnamer,m42e/tvnamer,dbr/tvnamer
from functional_runner import run_tvnamer, verify_out_data def test_simple_realtive_move(): - """ + """Move file to simple relative static dir """ conf = """ {"move_files_enable": true, - "move_files_desination": "test/", + "move_files_destination": "test/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['test/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files) + def test_dynamic_destination(): + """Move file to simple relative static dir + """ + + conf = """ + {"move_files_enable": true, + "move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/", + "batch": true} + """ + + out_data = run_tvnamer( + with_files = ['scrubs.s01e01.avi'], + with_config = conf, + with_input = "") + + expected_files = ['tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi'] + + verify_out_data(out_data, expected_files) +
Add more complex move_file test
## Code Before: from functional_runner import run_tvnamer, verify_out_data def test_simple_realtive_move(): """ """ conf = """ {"move_files_enable": true, "move_files_desination": "test/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['test/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files) ## Instruction: Add more complex move_file test ## Code After: from functional_runner import run_tvnamer, verify_out_data def test_simple_realtive_move(): """Move file to simple relative static dir """ conf = """ {"move_files_enable": true, "move_files_destination": "test/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['test/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files) def test_dynamic_destination(): """Move file to simple relative static dir """ conf = """ {"move_files_enable": true, "move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/", "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files)
from functional_runner import run_tvnamer, verify_out_data def test_simple_realtive_move(): - """ + """Move file to simple relative static dir """ conf = """ {"move_files_enable": true, - "move_files_desination": "test/", + "move_files_destination": "test/", ? + "batch": true} """ out_data = run_tvnamer( with_files = ['scrubs.s01e01.avi'], with_config = conf, with_input = "") expected_files = ['test/Scrubs - [01x01] - My First Day.avi'] verify_out_data(out_data, expected_files) + + def test_dynamic_destination(): + """Move file to simple relative static dir + """ + + conf = """ + {"move_files_enable": true, + "move_files_destination": "tv/%(seriesname)s/season %(seasonnumber)d/", + "batch": true} + """ + + out_data = run_tvnamer( + with_files = ['scrubs.s01e01.avi'], + with_config = conf, + with_input = "") + + expected_files = ['tv/Scrubs/season 1/Scrubs - [01x01] - My First Day.avi'] + + verify_out_data(out_data, expected_files)
56446567f764625e88d8efdbfa2849e0a579d5c4
indra/tests/test_rest_api.py
indra/tests/test_rest_api.py
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
Update REST API address in test
Update REST API address in test
Python
bsd-2-clause
sorgerlab/belpy,sorgerlab/indra,sorgerlab/belpy,bgyori/indra,pvtodorov/indra,bgyori/indra,sorgerlab/indra,johnbachman/belpy,pvtodorov/indra,pvtodorov/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/belpy,johnbachman/indra,johnbachman/indra,johnbachman/indra
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' - url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ + url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
Update REST API address in test
## Code Before: import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200 ## Instruction: Update REST API address in test ## Code After: import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
import requests from nose.plugins.attrib import attr @attr('webservice') def test_rest_api_responsive(): stmt_str = '{"statements": [{"sbo": "http://identifiers.org/sbo/SBO:0000526", "type": "Complex", "id": "acc6d47c-f622-41a4-8ae9-d7b0f3d24a2f", "members": [{"db_refs": {"TEXT": "MEK", "FPLX": "MEK"}, "name": "MEK"}, {"db_refs": {"TEXT": "ERK", "NCIT": "C26360", "FPLX": "ERK"}, "name": "ERK"}], "evidence": [{"text": "MEK binds ERK", "source_api": "trips"}]}]}' - url = 'http://ec2-54-88-146-250.compute-1.amazonaws.com:8080/' + \ + url = 'http://indra-api-72031e2dfde08e09.elb.us-east-1.amazonaws.com:8000/' + \ 'assemblers/cyjs' res = requests.post(url, stmt_str) assert res.status_code == 200
d407f1bcd95daf4f4bd8dfe8ae3b4b9e68061cb5
cref/sequence/fragment.py
cref/sequence/fragment.py
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ for i in range(len(sequence) - size + 1): yield sequence[i: i + size]
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ if size > 0: for i in range(len(sequence) - size + 1): yield sequence[i: i + size]
Handle sliding window with size 0
Handle sliding window with size 0
Python
mit
mchelem/cref2,mchelem/cref2,mchelem/cref2
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ + if size > 0: - for i in range(len(sequence) - size + 1): + for i in range(len(sequence) - size + 1): - yield sequence[i: i + size] + yield sequence[i: i + size] -
Handle sliding window with size 0
## Code Before: def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ for i in range(len(sequence) - size + 1): yield sequence[i: i + size] ## Instruction: Handle sliding window with size 0 ## Code After: def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ if size > 0: for i in range(len(sequence) - size + 1): yield sequence[i: i + size]
def fragment(sequence, size=5): """ Fragment a string sequence using a sliding window given by size :param sequence: String containing the sequence :param size: Size of the window :return: a fragment of the sequence with the given size """ + if size > 0: - for i in range(len(sequence) - size + 1): + for i in range(len(sequence) - size + 1): ? ++++ - yield sequence[i: i + size] + yield sequence[i: i + size] ? ++++ -
c6d949cbb32e095e5859aa22d11aa1566f5bc63f
website/util/mimetype.py
website/util/mimetype.py
import os import mimetypes HERE = os.path.dirname(os.path.abspath(__file__)) MIMEMAP = os.path.join(HERE, 'mime.types') def get_mimetype(path, data=None): mimetypes.init([MIMEMAP]) mimetype, _ = mimetypes.guess_type(path) if mimetype is None and data is not None: try: import magic mimetype = magic.from_buffer(data, mime=True) except ImportError: return mimetype return mimetype
import os import mimetypes HERE = os.path.dirname(os.path.abspath(__file__)) MIMEMAP = os.path.join(HERE, 'mime.types') def get_mimetype(path, file_contents=None): mimetypes.init([MIMEMAP]) mimetype, _ = mimetypes.guess_type(path) if mimetype is None and file_contents is not None: try: import magic mimetype = magic.from_buffer(file_contents, mime=True) except ImportError: return mimetype return mimetype
Make better name for argument.
Make better name for argument.
Python
apache-2.0
mfraezz/osf.io,saradbowman/osf.io,danielneis/osf.io,reinaH/osf.io,amyshi188/osf.io,GageGaskins/osf.io,wearpants/osf.io,KAsante95/osf.io,billyhunt/osf.io,petermalcolm/osf.io,danielneis/osf.io,cldershem/osf.io,samanehsan/osf.io,abought/osf.io,GaryKriebel/osf.io,CenterForOpenScience/osf.io,erinspace/osf.io,dplorimer/osf,adlius/osf.io,brandonPurvis/osf.io,himanshuo/osf.io,alexschiller/osf.io,erinspace/osf.io,baylee-d/osf.io,lyndsysimon/osf.io,RomanZWang/osf.io,billyhunt/osf.io,caseyrygt/osf.io,wearpants/osf.io,zkraime/osf.io,acshi/osf.io,fabianvf/osf.io,haoyuchen1992/osf.io,acshi/osf.io,icereval/osf.io,samchrisinger/osf.io,mluo613/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,wearpants/osf.io,doublebits/osf.io,HarryRybacki/osf.io,jolene-esposito/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,mfraezz/osf.io,revanthkolli/osf.io,petermalcolm/osf.io,cldershem/osf.io,jnayak1/osf.io,emetsger/osf.io,sbt9uc/osf.io,Johnetordoff/osf.io,caneruguz/osf.io,acshi/osf.io,zachjanicki/osf.io,emetsger/osf.io,lyndsysimon/osf.io,dplorimer/osf,binoculars/osf.io,felliott/osf.io,lamdnhan/osf.io,jnayak1/osf.io,SSJohns/osf.io,saradbowman/osf.io,mluke93/osf.io,aaxelb/osf.io,kch8qx/osf.io,arpitar/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,Nesiehr/osf.io,cwisecarver/osf.io,rdhyee/osf.io,cosenal/osf.io,barbour-em/osf.io,haoyuchen1992/osf.io,mluke93/osf.io,felliott/osf.io,hmoco/osf.io,jnayak1/osf.io,himanshuo/osf.io,HalcyonChimera/osf.io,ZobairAlijan/osf.io,sloria/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,laurenrevere/osf.io,caneruguz/osf.io,danielneis/osf.io,billyhunt/osf.io,arpitar/osf.io,samchrisinger/osf.io,mfraezz/osf.io,adlius/osf.io,ZobairAlijan/osf.io,GaryKriebel/osf.io,njantrania/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,Ghalko/osf.io,arpitar/osf.io,ckc6cz/osf.io,caseyrygt/osf.io,jinluyuan/osf.io,rdhyee/osf.io,pattisdr/osf.io,jmcarp/osf.io,reinaH/osf.io,DanielSBrown/osf.io,jolene-esposito/osf.io,samanehsan/osf.io,GageGaskins/osf.io,samanehsan/osf.io,brianjgeiger/osf.io,brianjgeiger/osf.io,samchrisinger/osf.io,caseyrollins/osf.io,jeffreyliu3230/osf.io,crcresearch/osf.io,RomanZWang/osf.io,TomHeatwole/osf.io,ticklemepierce/osf.io,petermalcolm/osf.io,amyshi188/osf.io,GageGaskins/osf.io,chrisseto/osf.io,lamdnhan/osf.io,RomanZWang/osf.io,emetsger/osf.io,adlius/osf.io,pattisdr/osf.io,aaxelb/osf.io,fabianvf/osf.io,asanfilippo7/osf.io,abought/osf.io,samchrisinger/osf.io,njantrania/osf.io,binoculars/osf.io,jeffreyliu3230/osf.io,mattclark/osf.io,cslzchen/osf.io,zkraime/osf.io,Nesiehr/osf.io,reinaH/osf.io,Nesiehr/osf.io,wearpants/osf.io,GageGaskins/osf.io,caseyrollins/osf.io,himanshuo/osf.io,jolene-esposito/osf.io,icereval/osf.io,kwierman/osf.io,binoculars/osf.io,Ghalko/osf.io,RomanZWang/osf.io,barbour-em/osf.io,KAsante95/osf.io,cwisecarver/osf.io,chennan47/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,reinaH/osf.io,leb2dg/osf.io,samanehsan/osf.io,jmcarp/osf.io,felliott/osf.io,zamattiac/osf.io,HarryRybacki/osf.io,kwierman/osf.io,caseyrollins/osf.io,sbt9uc/osf.io,amyshi188/osf.io,arpitar/osf.io,Nesiehr/osf.io,himanshuo/osf.io,leb2dg/osf.io,lyndsysimon/osf.io,zachjanicki/osf.io,brianjgeiger/osf.io,alexschiller/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,haoyuchen1992/osf.io,AndrewSallans/osf.io,kwierman/osf.io,mfraezz/osf.io,cosenal/osf.io,mattclark/osf.io,AndrewSallans/osf.io,acshi/osf.io,zkraime/osf.io,hmoco/osf.io,TomBaxter/osf.io,zachjanicki/osf.io,fabianvf/osf.io,mattclark/osf.io,jeffreyliu3230/osf.io,mluo613/osf.io,erinspace/osf.io,SSJohns/osf.io,alexschiller/osf.io,abought/osf.io,cwisecarver/osf.io,hmoco/osf.io,bdyetton/prettychart,chennan47/osf.io,kch8qx/osf.io,felliott/osf.io,sloria/osf.io,aaxelb/osf.io,cosenal/osf.io,kushG/osf.io,chennan47/osf.io,cslzchen/osf.io,jmcarp/osf.io,sloria/osf.io,doublebits/osf.io,rdhyee/osf.io,leb2dg/osf.io,CenterForOpenScience/osf.io,KAsante95/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,chrisseto/osf.io,barbour-em/osf.io,sbt9uc/osf.io,njantrania/osf.io,kch8qx/osf.io,Ghalko/osf.io,monikagrabowska/osf.io,GaryKriebel/osf.io,revanthkolli/osf.io,SSJohns/osf.io,leb2dg/osf.io,jinluyuan/osf.io,brandonPurvis/osf.io,revanthkolli/osf.io,Johnetordoff/osf.io,ZobairAlijan/osf.io,cosenal/osf.io,lamdnhan/osf.io,icereval/osf.io,jinluyuan/osf.io,crcresearch/osf.io,njantrania/osf.io,MerlinZhang/osf.io,jinluyuan/osf.io,zamattiac/osf.io,caseyrygt/osf.io,bdyetton/prettychart,adlius/osf.io,laurenrevere/osf.io,billyhunt/osf.io,KAsante95/osf.io,kch8qx/osf.io,TomBaxter/osf.io,mluo613/osf.io,kushG/osf.io,brandonPurvis/osf.io,kushG/osf.io,cldershem/osf.io,rdhyee/osf.io,mluo613/osf.io,jolene-esposito/osf.io,doublebits/osf.io,kwierman/osf.io,monikagrabowska/osf.io,barbour-em/osf.io,abought/osf.io,billyhunt/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,revanthkolli/osf.io,cwisecarver/osf.io,DanielSBrown/osf.io,petermalcolm/osf.io,caseyrygt/osf.io,GaryKriebel/osf.io,aaxelb/osf.io,crcresearch/osf.io,fabianvf/osf.io,ckc6cz/osf.io,jmcarp/osf.io,lamdnhan/osf.io,MerlinZhang/osf.io,pattisdr/osf.io,doublebits/osf.io,hmoco/osf.io,brianjgeiger/osf.io,MerlinZhang/osf.io,dplorimer/osf,amyshi188/osf.io,mluke93/osf.io,zkraime/osf.io,sbt9uc/osf.io,cldershem/osf.io,HalcyonChimera/osf.io,danielneis/osf.io,laurenrevere/osf.io,baylee-d/osf.io,ticklemepierce/osf.io,kushG/osf.io,HarryRybacki/osf.io,haoyuchen1992/osf.io,kch8qx/osf.io,doublebits/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,dplorimer/osf,HalcyonChimera/osf.io,brandonPurvis/osf.io,caneruguz/osf.io,SSJohns/osf.io,lyndsysimon/osf.io,HarryRybacki/osf.io,MerlinZhang/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,ckc6cz/osf.io,GageGaskins/osf.io,Johnetordoff/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,asanfilippo7/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,brandonPurvis/osf.io,ckc6cz/osf.io,ticklemepierce/osf.io,bdyetton/prettychart,ticklemepierce/osf.io,zamattiac/osf.io,RomanZWang/osf.io,TomBaxter/osf.io,KAsante95/osf.io,cslzchen/osf.io,mluke93/osf.io,zachjanicki/osf.io,acshi/osf.io,cslzchen/osf.io
import os import mimetypes HERE = os.path.dirname(os.path.abspath(__file__)) MIMEMAP = os.path.join(HERE, 'mime.types') - def get_mimetype(path, data=None): + def get_mimetype(path, file_contents=None): mimetypes.init([MIMEMAP]) mimetype, _ = mimetypes.guess_type(path) - if mimetype is None and data is not None: + if mimetype is None and file_contents is not None: try: import magic - mimetype = magic.from_buffer(data, mime=True) + mimetype = magic.from_buffer(file_contents, mime=True) except ImportError: return mimetype return mimetype
Make better name for argument.
## Code Before: import os import mimetypes HERE = os.path.dirname(os.path.abspath(__file__)) MIMEMAP = os.path.join(HERE, 'mime.types') def get_mimetype(path, data=None): mimetypes.init([MIMEMAP]) mimetype, _ = mimetypes.guess_type(path) if mimetype is None and data is not None: try: import magic mimetype = magic.from_buffer(data, mime=True) except ImportError: return mimetype return mimetype ## Instruction: Make better name for argument. ## Code After: import os import mimetypes HERE = os.path.dirname(os.path.abspath(__file__)) MIMEMAP = os.path.join(HERE, 'mime.types') def get_mimetype(path, file_contents=None): mimetypes.init([MIMEMAP]) mimetype, _ = mimetypes.guess_type(path) if mimetype is None and file_contents is not None: try: import magic mimetype = magic.from_buffer(file_contents, mime=True) except ImportError: return mimetype return mimetype
import os import mimetypes HERE = os.path.dirname(os.path.abspath(__file__)) MIMEMAP = os.path.join(HERE, 'mime.types') - def get_mimetype(path, data=None): ? ^^ ^ + def get_mimetype(path, file_contents=None): ? ^^^^^^^^ ^^^^ mimetypes.init([MIMEMAP]) mimetype, _ = mimetypes.guess_type(path) - if mimetype is None and data is not None: ? ^^ ^ + if mimetype is None and file_contents is not None: ? ^^^^^^^^ ^^^^ try: import magic - mimetype = magic.from_buffer(data, mime=True) ? ^^ ^ + mimetype = magic.from_buffer(file_contents, mime=True) ? ^^^^^^^^ ^^^^ except ImportError: return mimetype return mimetype
bd193b0fdb7fec412aed24ad8f4c6353372d634f
polling_stations/apps/data_collection/management/commands/import_westberks.py
polling_stations/apps/data_collection/management/commands/import_westberks.py
from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles class Command(BaseShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } def import_polling_stations(self): import_polling_station_shapefiles(self)
from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), }
Refactor West Berks to use new BaseShpShpImporter
Refactor West Berks to use new BaseShpShpImporter
Python
bsd-3-clause
chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations
- from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles + from data_collection.management.commands import BaseShpShpImporter - class Command(BaseShpImporter): + class Command(BaseShpShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' - + - def district_record_to_dict(self, record): + def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } - def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } - - def import_polling_stations(self): - import_polling_station_shapefiles(self)
Refactor West Berks to use new BaseShpShpImporter
## Code Before: from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles class Command(BaseShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } def import_polling_stations(self): import_polling_station_shapefiles(self) ## Instruction: Refactor West Berks to use new BaseShpShpImporter ## Code After: from data_collection.management.commands import BaseShpShpImporter class Command(BaseShpShpImporter): """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' def district_record_to_dict(self, record): return { 'internal_council_id': record[0], 'name': record[2], } def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), }
- from data_collection.management.commands import BaseShpImporter, import_polling_station_shapefiles ? ----------------------------------- + from data_collection.management.commands import BaseShpShpImporter ? +++ - class Command(BaseShpImporter): + class Command(BaseShpShpImporter): ? +++ """ Imports the Polling Station data from Wokingham Council """ council_id = 'E06000037' districts_name = 'polling_districts' stations_name = 'polling_places.shp' - + - def district_record_to_dict(self, record): + def district_record_to_dict(self, record): ? + return { 'internal_council_id': record[0], 'name': record[2], } - def station_record_to_dict(self, record): return { 'internal_council_id': record[4], 'postcode' : record[5].split(',')[-1], 'address' : "\n".join(record[5].split(',')[:-1]), } - - def import_polling_stations(self): - import_polling_station_shapefiles(self)
ee54f0ca3317b6f2119f15d42a7dd8d42d4f8059
standup/test_settings.py
standup/test_settings.py
from standup.settings import * DATABASE_URL = 'sqlite://'
from standup.settings import * # This looks wrong, but actually, it's an in-memory db uri # and it causes our tests to run super fast! DATABASE_URL = 'sqlite://'
Add comment of vital importance
Add comment of vital importance This bumps me up another shade of green! Yay!
Python
bsd-3-clause
safwanrahman/standup,rehandalal/standup,willkg/standup,rlr/standup,rehandalal/standup,rlr/standup,mozilla/standup,rlr/standup,safwanrahman/standup,mozilla/standup,willkg/standup,willkg/standup,safwanrahman/standup,willkg/standup,safwanrahman/standup,mozilla/standup,rehandalal/standup,mozilla/standup
from standup.settings import * + # This looks wrong, but actually, it's an in-memory db uri + # and it causes our tests to run super fast! DATABASE_URL = 'sqlite://'
Add comment of vital importance
## Code Before: from standup.settings import * DATABASE_URL = 'sqlite://' ## Instruction: Add comment of vital importance ## Code After: from standup.settings import * # This looks wrong, but actually, it's an in-memory db uri # and it causes our tests to run super fast! DATABASE_URL = 'sqlite://'
from standup.settings import * + # This looks wrong, but actually, it's an in-memory db uri + # and it causes our tests to run super fast! DATABASE_URL = 'sqlite://'
dda9904a756e309047bebcbfecd2120383a257cc
django_countries/settings.py
django_countries/settings.py
from django.conf import settings def _build_flag_url(): if hasattr(settings, 'COUNTRIES_FLAG_URL'): url = settings.COUNTRIES_FLAG_URL else: url = 'flags/%(code)s.gif' prefix = getattr(settings, 'STATIC_URL', '') or settings.MEDIA_URL if not prefix.endswith('/'): prefix = '%s/' % prefix return '%s%s' % (prefix, url) FLAG_URL = _build_flag_url()
from django.conf import settings def _build_flag_url(): if hasattr(settings, 'COUNTRIES_FLAG_URL'): url = settings.COUNTRIES_FLAG_URL else: url = 'flags/%(code)s.gif' prefix = getattr(settings, 'STATIC_URL', '') or \ getattr(settings, 'STATICFILES_URL', '') or \ settings.MEDIA_URL if not prefix.endswith('/'): prefix = '%s/' % prefix return '%s%s' % (prefix, url) FLAG_URL = _build_flag_url()
Add django 1.3 staticfiles compatibility
Add django 1.3 staticfiles compatibility
Python
mit
degenhard/django-countries
from django.conf import settings def _build_flag_url(): if hasattr(settings, 'COUNTRIES_FLAG_URL'): url = settings.COUNTRIES_FLAG_URL else: url = 'flags/%(code)s.gif' + - prefix = getattr(settings, 'STATIC_URL', '') or settings.MEDIA_URL + prefix = getattr(settings, 'STATIC_URL', '') or \ + getattr(settings, 'STATICFILES_URL', '') or \ + settings.MEDIA_URL + if not prefix.endswith('/'): prefix = '%s/' % prefix return '%s%s' % (prefix, url) FLAG_URL = _build_flag_url()
Add django 1.3 staticfiles compatibility
## Code Before: from django.conf import settings def _build_flag_url(): if hasattr(settings, 'COUNTRIES_FLAG_URL'): url = settings.COUNTRIES_FLAG_URL else: url = 'flags/%(code)s.gif' prefix = getattr(settings, 'STATIC_URL', '') or settings.MEDIA_URL if not prefix.endswith('/'): prefix = '%s/' % prefix return '%s%s' % (prefix, url) FLAG_URL = _build_flag_url() ## Instruction: Add django 1.3 staticfiles compatibility ## Code After: from django.conf import settings def _build_flag_url(): if hasattr(settings, 'COUNTRIES_FLAG_URL'): url = settings.COUNTRIES_FLAG_URL else: url = 'flags/%(code)s.gif' prefix = getattr(settings, 'STATIC_URL', '') or \ getattr(settings, 'STATICFILES_URL', '') or \ settings.MEDIA_URL if not prefix.endswith('/'): prefix = '%s/' % prefix return '%s%s' % (prefix, url) FLAG_URL = _build_flag_url()
from django.conf import settings def _build_flag_url(): if hasattr(settings, 'COUNTRIES_FLAG_URL'): url = settings.COUNTRIES_FLAG_URL else: url = 'flags/%(code)s.gif' + - prefix = getattr(settings, 'STATIC_URL', '') or settings.MEDIA_URL ? ^^^^^^^^^^^^^^^^^^ + prefix = getattr(settings, 'STATIC_URL', '') or \ ? ^ + getattr(settings, 'STATICFILES_URL', '') or \ + settings.MEDIA_URL + if not prefix.endswith('/'): prefix = '%s/' % prefix return '%s%s' % (prefix, url) FLAG_URL = _build_flag_url()
25325ee55852eb65e58c13c46660701b1cdd803f
music/migrations/0020_auto_20151028_0925.py
music/migrations/0020_auto_20151028_0925.py
from __future__ import unicode_literals from django.db import models, migrations def set_total_duration_as_duration(apps, schema_editor): Music = apps.get_model("music", "Music") for music in Music.objects.all(): music.total_duration = music.duration music.save() class Migration(migrations.Migration): dependencies = [ ('music', '0019_auto_20151006_1416'), ] operations = [ migrations.AddField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False, null=True), preserve_default=False, ), migrations.RunPython(set_total_duration_as_duration), migrations.AlterField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False), ), migrations.AlterField( model_name='music', name='duration', field=models.PositiveIntegerField(null=True), preserve_default=True, ), ]
from __future__ import unicode_literals from django.db import models, migrations def set_total_duration_as_duration(apps, schema_editor): Music = apps.get_model("music", "Music") for music in Music.objects.all(): music.total_duration = music.duration music.save() class Migration(migrations.Migration): dependencies = [ ('music', '0019_auto_20151006_1416'), ] operations = [ migrations.AddField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False, null=True), preserve_default=False, ), migrations.RunPython(set_total_duration_as_duration), migrations.AlterField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False), ), migrations.AlterField( model_name='music', name='duration', field=models.PositiveIntegerField(null=True), preserve_default=True, ), migrations.RemoveField( model_name='music', name='timer_end', ), ]
Delete timer_end in same migration as total_duration
Delete timer_end in same migration as total_duration
Python
mit
Amoki/Amoki-Music,Amoki/Amoki-Music,Amoki/Amoki-Music
from __future__ import unicode_literals from django.db import models, migrations def set_total_duration_as_duration(apps, schema_editor): Music = apps.get_model("music", "Music") for music in Music.objects.all(): music.total_duration = music.duration music.save() class Migration(migrations.Migration): dependencies = [ ('music', '0019_auto_20151006_1416'), ] operations = [ migrations.AddField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False, null=True), preserve_default=False, ), migrations.RunPython(set_total_duration_as_duration), migrations.AlterField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False), ), migrations.AlterField( model_name='music', name='duration', field=models.PositiveIntegerField(null=True), preserve_default=True, ), + migrations.RemoveField( + model_name='music', + name='timer_end', + ), ]
Delete timer_end in same migration as total_duration
## Code Before: from __future__ import unicode_literals from django.db import models, migrations def set_total_duration_as_duration(apps, schema_editor): Music = apps.get_model("music", "Music") for music in Music.objects.all(): music.total_duration = music.duration music.save() class Migration(migrations.Migration): dependencies = [ ('music', '0019_auto_20151006_1416'), ] operations = [ migrations.AddField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False, null=True), preserve_default=False, ), migrations.RunPython(set_total_duration_as_duration), migrations.AlterField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False), ), migrations.AlterField( model_name='music', name='duration', field=models.PositiveIntegerField(null=True), preserve_default=True, ), ] ## Instruction: Delete timer_end in same migration as total_duration ## Code After: from __future__ import unicode_literals from django.db import models, migrations def set_total_duration_as_duration(apps, schema_editor): Music = apps.get_model("music", "Music") for music in Music.objects.all(): music.total_duration = music.duration music.save() class Migration(migrations.Migration): dependencies = [ ('music', '0019_auto_20151006_1416'), ] operations = [ migrations.AddField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False, null=True), preserve_default=False, ), migrations.RunPython(set_total_duration_as_duration), migrations.AlterField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False), ), migrations.AlterField( model_name='music', name='duration', field=models.PositiveIntegerField(null=True), preserve_default=True, ), migrations.RemoveField( model_name='music', name='timer_end', ), ]
from __future__ import unicode_literals from django.db import models, migrations def set_total_duration_as_duration(apps, schema_editor): Music = apps.get_model("music", "Music") for music in Music.objects.all(): music.total_duration = music.duration music.save() class Migration(migrations.Migration): dependencies = [ ('music', '0019_auto_20151006_1416'), ] operations = [ migrations.AddField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False, null=True), preserve_default=False, ), migrations.RunPython(set_total_duration_as_duration), migrations.AlterField( model_name='music', name='total_duration', field=models.PositiveIntegerField(editable=False), ), migrations.AlterField( model_name='music', name='duration', field=models.PositiveIntegerField(null=True), preserve_default=True, ), + migrations.RemoveField( + model_name='music', + name='timer_end', + ), ]
973ff308f16fe033b5da60a28cb0d6448062a8f9
examples/basic_datalogger.py
examples/basic_datalogger.py
from pymoku import Moku from pymoku.instruments import * import time, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.INFO) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP m = Moku.get_by_name('example') i = Oscilloscope() m.attach_instrument(i) try: i.set_samplerate(10) i.set_xmode(OSC_ROLL) i.commit() i.datalogger_stop() i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin') while True: time.sleep(1) trems, treme = i.datalogger_remaining() samples = i.datalogger_samples() print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)) if i.datalogger_completed(): break e = i.datalogger_error() if e: print("Error occured: %s" % e) i.datalogger_stop() i.datalogger_upload() except Exception as e: print(e) finally: m.close()
from pymoku import Moku from pymoku.instruments import * import time, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.INFO) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP m = Moku('192.168.69.122')#.get_by_name('example') i = Oscilloscope() m.attach_instrument(i) try: i.set_samplerate(10) i.set_xmode(OSC_ROLL) i.commit() i.datalogger_stop() i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin') while True: time.sleep(1) trems, treme = i.datalogger_remaining() samples = i.datalogger_samples() print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)) if i.datalogger_completed(): break e = i.datalogger_error() if e: print("Error occured: %s" % e) i.datalogger_stop() i.datalogger_upload() except Exception as e: print(e) finally: m.close()
Make sure the schema gets pulled in as a module
HG-1494: Make sure the schema gets pulled in as a module
Python
mit
liquidinstruments/pymoku
from pymoku import Moku from pymoku.instruments import * import time, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.INFO) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP - m = Moku.get_by_name('example') + m = Moku('192.168.69.122')#.get_by_name('example') i = Oscilloscope() m.attach_instrument(i) try: i.set_samplerate(10) i.set_xmode(OSC_ROLL) i.commit() i.datalogger_stop() i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin') while True: time.sleep(1) trems, treme = i.datalogger_remaining() samples = i.datalogger_samples() print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)) if i.datalogger_completed(): break e = i.datalogger_error() if e: print("Error occured: %s" % e) i.datalogger_stop() i.datalogger_upload() except Exception as e: print(e) finally: m.close()
Make sure the schema gets pulled in as a module
## Code Before: from pymoku import Moku from pymoku.instruments import * import time, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.INFO) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP m = Moku.get_by_name('example') i = Oscilloscope() m.attach_instrument(i) try: i.set_samplerate(10) i.set_xmode(OSC_ROLL) i.commit() i.datalogger_stop() i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin') while True: time.sleep(1) trems, treme = i.datalogger_remaining() samples = i.datalogger_samples() print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)) if i.datalogger_completed(): break e = i.datalogger_error() if e: print("Error occured: %s" % e) i.datalogger_stop() i.datalogger_upload() except Exception as e: print(e) finally: m.close() ## Instruction: Make sure the schema gets pulled in as a module ## Code After: from pymoku import Moku from pymoku.instruments import * import time, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.INFO) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP m = Moku('192.168.69.122')#.get_by_name('example') i = Oscilloscope() m.attach_instrument(i) try: i.set_samplerate(10) i.set_xmode(OSC_ROLL) i.commit() i.datalogger_stop() i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin') while True: time.sleep(1) trems, treme = i.datalogger_remaining() samples = i.datalogger_samples() print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)) if i.datalogger_completed(): break e = i.datalogger_error() if e: print("Error occured: %s" % e) i.datalogger_stop() i.datalogger_upload() except Exception as e: print(e) finally: m.close()
from pymoku import Moku from pymoku.instruments import * import time, logging logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s') logging.getLogger('pymoku').setLevel(logging.INFO) # Use Moku.get_by_serial() or get_by_name() if you don't know the IP - m = Moku.get_by_name('example') + m = Moku('192.168.69.122')#.get_by_name('example') ? +++++++++++++++++++ i = Oscilloscope() m.attach_instrument(i) try: i.set_samplerate(10) i.set_xmode(OSC_ROLL) i.commit() i.datalogger_stop() i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin') while True: time.sleep(1) trems, treme = i.datalogger_remaining() samples = i.datalogger_samples() print("Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)) if i.datalogger_completed(): break e = i.datalogger_error() if e: print("Error occured: %s" % e) i.datalogger_stop() i.datalogger_upload() except Exception as e: print(e) finally: m.close()
d335fce6cea07df872d8cd7d70c6c3fea348e521
tests/__init__.py
tests/__init__.py
import os.path import unittest def get_tests(): start_dir = os.path.dirname(__file__) return unittest.TestLoader().discover(start_dir, pattern="*.py")
import os.path import unittest def get_tests(): return full_suite() def full_suite(): from .resource import ResourceTestCase from .serializer import ResourceTestCase as SerializerTestCase from .utils import UtilsTestCase resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase) serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase) utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase) return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.
Python
bsd-2-clause
jannon/slumber,IAlwaysBeCoding/More,zongxiao/slumber,infoxchange/slumber,futurice/slumber,IAlwaysBeCoding/slumber,samgiles/slumber,s-block/slumber,ministryofjustice/slumber
import os.path import unittest def get_tests(): + return full_suite() - start_dir = os.path.dirname(__file__) - return unittest.TestLoader().discover(start_dir, pattern="*.py") + def full_suite(): + from .resource import ResourceTestCase + from .serializer import ResourceTestCase as SerializerTestCase + from .utils import UtilsTestCase + + resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase) + serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase) + utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase) + + return unittest.TestSuite([resourcesuite, serializersuite, utilssuite]) + +
Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it.
## Code Before: import os.path import unittest def get_tests(): start_dir = os.path.dirname(__file__) return unittest.TestLoader().discover(start_dir, pattern="*.py") ## Instruction: Update get_tests to be backwards compatible with Python 2.6, since the library is compatible it seems worth this extra effort to test against it. ## Code After: import os.path import unittest def get_tests(): return full_suite() def full_suite(): from .resource import ResourceTestCase from .serializer import ResourceTestCase as SerializerTestCase from .utils import UtilsTestCase resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase) serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase) utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase) return unittest.TestSuite([resourcesuite, serializersuite, utilssuite])
import os.path import unittest def get_tests(): - start_dir = os.path.dirname(__file__) - return unittest.TestLoader().discover(start_dir, pattern="*.py") + return full_suite() + + def full_suite(): + from .resource import ResourceTestCase + from .serializer import ResourceTestCase as SerializerTestCase + from .utils import UtilsTestCase + + resourcesuite = unittest.TestLoader().loadTestsFromTestCase(ResourceTestCase) + serializersuite = unittest.TestLoader().loadTestsFromTestCase(SerializerTestCase) + utilssuite = unittest.TestLoader().loadTestsFromTestCase(UtilsTestCase) + + return unittest.TestSuite([resourcesuite, serializersuite, utilssuite]) +
91d6021fb0db6052570f1a0305a141e1af13b6e3
localeurl/models.py
localeurl/models.py
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
Handle situation when kwargs is None
Handle situation when kwargs is None
Python
mit
carljm/django-localeurl,gonnado/django-localeurl,extertioner/django-localeurl
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) + if reverse_kwargs!=None: - locale = utils.supported_language(reverse_kwargs.pop('locale', + locale = utils.supported_language(reverse_kwargs.pop('locale', - translation.get_language())) + translation.get_language())) + else: + locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
Handle situation when kwargs is None
## Code Before: from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse() ## Instruction: Handle situation when kwargs is None ## Code After: from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) if reverse_kwargs!=None: locale = utils.supported_language(reverse_kwargs.pop('locale', translation.get_language())) else: locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
from django.conf import settings from django.core import urlresolvers from django.utils import translation from localeurl import utils def reverse(*args, **kwargs): reverse_kwargs = kwargs.get('kwargs', {}) + if reverse_kwargs!=None: - locale = utils.supported_language(reverse_kwargs.pop('locale', + locale = utils.supported_language(reverse_kwargs.pop('locale', ? ++++ - translation.get_language())) + translation.get_language())) ? ++++ + else: + locale = translation.get_language() url = django_reverse(*args, **kwargs) _, path = utils.strip_script_prefix(url) return utils.locale_url(path, locale) django_reverse = None def patch_reverse(): """ Monkey-patches the urlresolvers.reverse function. Will not patch twice. """ global django_reverse if urlresolvers.reverse is not reverse: django_reverse = urlresolvers.reverse urlresolvers.reverse = reverse if settings.USE_I18N: patch_reverse()
e1569a514345a8c78d415011387d06aed5e6daa4
webshack/cli.py
webshack/cli.py
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log)
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) elif options['list']: for package in sorted(db): print(package)
Add a subcommand for listing packages
Add a subcommand for listing packages
Python
mit
prophile/webshack
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) + elif options['list']: + for package in sorted(db): + print(package)
Add a subcommand for listing packages
## Code Before: import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) ## Instruction: Add a subcommand for listing packages ## Code After: import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) elif options['list']: for package in sorted(db): print(package)
import sys from docopt import docopt from termcolor import colored from webshack.install_package import install_package_hierarchy import webshack.package_db as pdb from pathlib import Path VERSION="0.0.1" class CLIOutput: def __init__(self): self.shift_width = 0 def log(self, package): if package is None: self.end_package() else: self.begin_package(package) def begin_package(self, package): self.shift_width = 50 - len(package) sys.stdout.write("Installing {pkg}...".format(pkg=colored(package, 'blue'))) sys.stdout.flush() def end_package(self): sys.stdout.write(' '*self.shift_width) sys.stdout.write('[{}]\n'.format(colored('DONE', 'green', attrs=['bold']))) sys.stdout.flush() def main(): options = docopt(__doc__, version=VERSION) db = pdb.standard_package_db() components = Path('components') if options['get']: output = CLIOutput() for package in options['<package>']: install_package_hierarchy(package, db, components, log_output=output.log) + elif options['list']: + for package in sorted(db): + print(package)
bda9bc3574b14ead6f51e1fb0f6864e07ccefd88
Orange/classification/random_forest.py
Orange/classification/random_forest.py
from sklearn.ensemble import RandomForestClassifier as RandomForest from sklearn.preprocessing import Imputer from numpy import isnan import Orange.data import Orange.classification def replace_nan(X, imp_model): # Default scikit Imputer # Use Orange imputer when implemented if isnan(X).sum(): X = imp_model.transform(X) return X # TODO: implement sending a single decision tree class RandomForestLearner(Orange.classification.SklFitter): def __init__(self, n_estimators=10, max_features="auto", random_state=None, max_depth=3, max_leaf_nodes=5): self.params = vars() def fit(self, X, Y, W): self.imputer = Imputer() self.imputer.fit(X) X = replace_nan(X, self.imputer) rf_model = RandomForest(**self.params) rf_model.fit(X, Y.ravel()) return RandomForestClassifier(rf_model, self.imputer) class RandomForestClassifier(Orange.classification.SklModel): def __init__(self, clf, imp): self.clf = clf self.imputer = imp def predict(self, X): X = replace_nan(X, imp_model=self.imputer) value = self.clf.predict(X) prob = self.clf.predict_proba(X) return value, prob
import numbers from sklearn.ensemble import RandomForestClassifier as RandomForest from sklearn.preprocessing import Imputer from numpy import isnan import Orange.data import Orange.classification def replace_nan(X, imp_model): # Default scikit Imputer # Use Orange imputer when implemented if isnan(X).sum(): X = imp_model.transform(X) return X class RandomForestLearner(Orange.classification.SklFitter): def __init__(self, n_estimators=10, max_features="auto", random_state=None, max_depth=3, max_leaf_nodes=5): self.params = vars() def fit(self, X, Y, W): self.imputer = Imputer() self.imputer.fit(X) X = replace_nan(X, self.imputer) params = dict(self.params) max_features = params["max_features"] if isinstance(max_features, numbers.Integral) and \ X.shape[1] < max_features: params["max_features"] = X.shape[1] rf_model = RandomForest(**params) rf_model.fit(X, Y.ravel()) return RandomForestClassifier(rf_model, self.imputer) class RandomForestClassifier(Orange.classification.SklModel): def __init__(self, clf, imp): self.clf = clf self.imputer = imp def predict(self, X): X = replace_nan(X, imp_model=self.imputer) value = self.clf.predict(X) prob = self.clf.predict_proba(X) return value, prob
Fix an error when number of predictor columns is less than max_features.
Fix an error when number of predictor columns is less than max_features.
Python
bsd-2-clause
marinkaz/orange3,marinkaz/orange3,kwikadi/orange3,marinkaz/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,marinkaz/orange3,qusp/orange3,cheral/orange3,qusp/orange3,kwikadi/orange3,qusp/orange3,kwikadi/orange3,qusp/orange3,cheral/orange3,qPCR4vir/orange3,cheral/orange3,qPCR4vir/orange3,marinkaz/orange3
+ import numbers + from sklearn.ensemble import RandomForestClassifier as RandomForest from sklearn.preprocessing import Imputer from numpy import isnan + import Orange.data import Orange.classification + def replace_nan(X, imp_model): - # Default scikit Imputer + # Default scikit Imputer - # Use Orange imputer when implemented + # Use Orange imputer when implemented - if isnan(X).sum(): + if isnan(X).sum(): - X = imp_model.transform(X) + X = imp_model.transform(X) - return X + return X - # TODO: implement sending a single decision tree + class RandomForestLearner(Orange.classification.SklFitter): def __init__(self, n_estimators=10, max_features="auto", random_state=None, max_depth=3, max_leaf_nodes=5): self.params = vars() def fit(self, X, Y, W): self.imputer = Imputer() self.imputer.fit(X) X = replace_nan(X, self.imputer) + + params = dict(self.params) + max_features = params["max_features"] + if isinstance(max_features, numbers.Integral) and \ + X.shape[1] < max_features: + params["max_features"] = X.shape[1] + - rf_model = RandomForest(**self.params) + rf_model = RandomForest(**params) rf_model.fit(X, Y.ravel()) return RandomForestClassifier(rf_model, self.imputer) class RandomForestClassifier(Orange.classification.SklModel): def __init__(self, clf, imp): self.clf = clf self.imputer = imp def predict(self, X): X = replace_nan(X, imp_model=self.imputer) value = self.clf.predict(X) prob = self.clf.predict_proba(X) return value, prob
Fix an error when number of predictor columns is less than max_features.
## Code Before: from sklearn.ensemble import RandomForestClassifier as RandomForest from sklearn.preprocessing import Imputer from numpy import isnan import Orange.data import Orange.classification def replace_nan(X, imp_model): # Default scikit Imputer # Use Orange imputer when implemented if isnan(X).sum(): X = imp_model.transform(X) return X # TODO: implement sending a single decision tree class RandomForestLearner(Orange.classification.SklFitter): def __init__(self, n_estimators=10, max_features="auto", random_state=None, max_depth=3, max_leaf_nodes=5): self.params = vars() def fit(self, X, Y, W): self.imputer = Imputer() self.imputer.fit(X) X = replace_nan(X, self.imputer) rf_model = RandomForest(**self.params) rf_model.fit(X, Y.ravel()) return RandomForestClassifier(rf_model, self.imputer) class RandomForestClassifier(Orange.classification.SklModel): def __init__(self, clf, imp): self.clf = clf self.imputer = imp def predict(self, X): X = replace_nan(X, imp_model=self.imputer) value = self.clf.predict(X) prob = self.clf.predict_proba(X) return value, prob ## Instruction: Fix an error when number of predictor columns is less than max_features. ## Code After: import numbers from sklearn.ensemble import RandomForestClassifier as RandomForest from sklearn.preprocessing import Imputer from numpy import isnan import Orange.data import Orange.classification def replace_nan(X, imp_model): # Default scikit Imputer # Use Orange imputer when implemented if isnan(X).sum(): X = imp_model.transform(X) return X class RandomForestLearner(Orange.classification.SklFitter): def __init__(self, n_estimators=10, max_features="auto", random_state=None, max_depth=3, max_leaf_nodes=5): self.params = vars() def fit(self, X, Y, W): self.imputer = Imputer() self.imputer.fit(X) X = replace_nan(X, self.imputer) params = dict(self.params) max_features = params["max_features"] if isinstance(max_features, numbers.Integral) and \ X.shape[1] < max_features: params["max_features"] = X.shape[1] rf_model = RandomForest(**params) rf_model.fit(X, Y.ravel()) return RandomForestClassifier(rf_model, self.imputer) class RandomForestClassifier(Orange.classification.SklModel): def __init__(self, clf, imp): self.clf = clf self.imputer = imp def predict(self, X): X = replace_nan(X, imp_model=self.imputer) value = self.clf.predict(X) prob = self.clf.predict_proba(X) return value, prob
+ import numbers + from sklearn.ensemble import RandomForestClassifier as RandomForest from sklearn.preprocessing import Imputer from numpy import isnan + import Orange.data import Orange.classification + def replace_nan(X, imp_model): - # Default scikit Imputer ? ---- + # Default scikit Imputer - # Use Orange imputer when implemented ? ---- + # Use Orange imputer when implemented - if isnan(X).sum(): ? ---- + if isnan(X).sum(): - X = imp_model.transform(X) ? -------- + X = imp_model.transform(X) - return X ? ---- + return X - # TODO: implement sending a single decision tree + class RandomForestLearner(Orange.classification.SklFitter): def __init__(self, n_estimators=10, max_features="auto", random_state=None, max_depth=3, max_leaf_nodes=5): self.params = vars() def fit(self, X, Y, W): self.imputer = Imputer() self.imputer.fit(X) X = replace_nan(X, self.imputer) + + params = dict(self.params) + max_features = params["max_features"] + if isinstance(max_features, numbers.Integral) and \ + X.shape[1] < max_features: + params["max_features"] = X.shape[1] + - rf_model = RandomForest(**self.params) ? ----- + rf_model = RandomForest(**params) rf_model.fit(X, Y.ravel()) return RandomForestClassifier(rf_model, self.imputer) class RandomForestClassifier(Orange.classification.SklModel): def __init__(self, clf, imp): self.clf = clf self.imputer = imp def predict(self, X): X = replace_nan(X, imp_model=self.imputer) value = self.clf.predict(X) prob = self.clf.predict_proba(X) return value, prob
c8a7a53f09f72d9dbe44b1bcb5b85c8ee5ba2c2c
services/migrations/0012_unit_data_source.py
services/migrations/0012_unit_data_source.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20, default='tprek'), preserve_default=False ), ]
Add default to data_source migration.
Add default to data_source migration.
Python
agpl-3.0
City-of-Helsinki/smbackend,City-of-Helsinki/smbackend
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', - field=models.CharField(null=True, max_length=20), + field=models.CharField(null=True, max_length=20, default='tprek'), + preserve_default=False ), ]
Add default to data_source migration.
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20), ), ] ## Instruction: Add default to data_source migration. ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', field=models.CharField(null=True, max_length=20, default='tprek'), preserve_default=False ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('services', '0011_unit_extensions'), ] operations = [ migrations.AddField( model_name='unit', name='data_source', - field=models.CharField(null=True, max_length=20), + field=models.CharField(null=True, max_length=20, default='tprek'), ? +++++++++++++++++ + preserve_default=False ), ]
485bfd97d1b305ad0944192d4ea8c77a479936ad
util/log.py
util/log.py
import sys from colors import Colors class Log: @classmethod def print_msg(cls, title, msg, color, new_line = True): Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line) @classmethod def msg(cls, msg, new_line = True): Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line) @classmethod def info(cls, msg, new_line = True): Log.print_msg("Info", msg, Colors.CYAN_FG, new_line) @classmethod def warn(cls, msg, new_line = True): Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line) @classmethod def err(cls, msg, new_line = True): Log.print_msg("Error", msg, Colors.RED_FG, new_line) @classmethod def raw(cls, msg, new_line = True): if new_line and msg[-1:] != "\n": msg += "\n" sys.stdout.write("{0}".format(msg))
import sys from colors import Colors class Log: @classmethod def print_msg(cls, title, msg, color, new_line = True): Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line) @classmethod def msg(cls, msg, new_line = True): Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line) @classmethod def info(cls, msg, new_line = True): Log.print_msg("Info", msg, Colors.CYAN_FG, new_line) @classmethod def warn(cls, msg, new_line = True): Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line) @classmethod def note(cls, msg, new_line = True): Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line) @classmethod def err(cls, msg, new_line = True): Log.print_msg("Error", msg, Colors.RED_FG, new_line) @classmethod def fatal(cls, msg, new_line = True): Log.print_msg("Fatal", msg, Colors.RED_FG, new_line) exit(1) @classmethod def raw(cls, msg, new_line = True): if new_line and msg[-1:] != "\n": msg += "\n" sys.stdout.write("{0}".format(msg))
Add note and fatal to Log
Add note and fatal to Log
Python
mit
JBarberU/strawberry_py
import sys from colors import Colors class Log: @classmethod def print_msg(cls, title, msg, color, new_line = True): Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line) @classmethod def msg(cls, msg, new_line = True): Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line) @classmethod def info(cls, msg, new_line = True): Log.print_msg("Info", msg, Colors.CYAN_FG, new_line) @classmethod def warn(cls, msg, new_line = True): Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line) @classmethod + def note(cls, msg, new_line = True): + Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line) + + @classmethod def err(cls, msg, new_line = True): Log.print_msg("Error", msg, Colors.RED_FG, new_line) + + @classmethod + def fatal(cls, msg, new_line = True): + Log.print_msg("Fatal", msg, Colors.RED_FG, new_line) + exit(1) @classmethod def raw(cls, msg, new_line = True): if new_line and msg[-1:] != "\n": msg += "\n" sys.stdout.write("{0}".format(msg))
Add note and fatal to Log
## Code Before: import sys from colors import Colors class Log: @classmethod def print_msg(cls, title, msg, color, new_line = True): Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line) @classmethod def msg(cls, msg, new_line = True): Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line) @classmethod def info(cls, msg, new_line = True): Log.print_msg("Info", msg, Colors.CYAN_FG, new_line) @classmethod def warn(cls, msg, new_line = True): Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line) @classmethod def err(cls, msg, new_line = True): Log.print_msg("Error", msg, Colors.RED_FG, new_line) @classmethod def raw(cls, msg, new_line = True): if new_line and msg[-1:] != "\n": msg += "\n" sys.stdout.write("{0}".format(msg)) ## Instruction: Add note and fatal to Log ## Code After: import sys from colors import Colors class Log: @classmethod def print_msg(cls, title, msg, color, new_line = True): Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line) @classmethod def msg(cls, msg, new_line = True): Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line) @classmethod def info(cls, msg, new_line = True): Log.print_msg("Info", msg, Colors.CYAN_FG, new_line) @classmethod def warn(cls, msg, new_line = True): Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line) @classmethod def note(cls, msg, new_line = True): Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line) @classmethod def err(cls, msg, new_line = True): Log.print_msg("Error", msg, Colors.RED_FG, new_line) @classmethod def fatal(cls, msg, new_line = True): Log.print_msg("Fatal", msg, Colors.RED_FG, new_line) exit(1) @classmethod def raw(cls, msg, new_line = True): if new_line and msg[-1:] != "\n": msg += "\n" sys.stdout.write("{0}".format(msg))
import sys from colors import Colors class Log: @classmethod def print_msg(cls, title, msg, color, new_line = True): Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line) @classmethod def msg(cls, msg, new_line = True): Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line) @classmethod def info(cls, msg, new_line = True): Log.print_msg("Info", msg, Colors.CYAN_FG, new_line) @classmethod def warn(cls, msg, new_line = True): Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line) @classmethod + def note(cls, msg, new_line = True): + Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line) + + @classmethod def err(cls, msg, new_line = True): Log.print_msg("Error", msg, Colors.RED_FG, new_line) + + @classmethod + def fatal(cls, msg, new_line = True): + Log.print_msg("Fatal", msg, Colors.RED_FG, new_line) + exit(1) @classmethod def raw(cls, msg, new_line = True): if new_line and msg[-1:] != "\n": msg += "\n" sys.stdout.write("{0}".format(msg))
0983361e6fba5812416d8fb5b695f6b3034bc927
registration/management/commands/cleanupregistration.py
registration/management/commands/cleanupregistration.py
from django.core.management.base import NoArgsCommand from ...models import RegistrationProfile class Command(NoArgsCommand): help = "Delete expired user registrations from the database" def handle_noargs(self, **options): RegistrationProfile.objects.delete_expired_users()
from django.core.management.base import BaseCommand from ...models import RegistrationProfile class Command(BaseCommand): help = "Delete expired user registrations from the database" def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
Fix deprecated class NoArgsCommand class.
Fix deprecated class NoArgsCommand class. Solve the warning RemovedInDjango110Warning: NoArgsCommand class is deprecated and will be removed in Django 1.10. Use BaseCommand instead, which takes no arguments by default.
Python
bsd-3-clause
sergafts/django-registration,timgraham/django-registration,sergafts/django-registration,pando85/django-registration,pando85/django-registration,allo-/django-registration,allo-/django-registration,timgraham/django-registration
- from django.core.management.base import NoArgsCommand + from django.core.management.base import BaseCommand from ...models import RegistrationProfile - class Command(NoArgsCommand): + class Command(BaseCommand): help = "Delete expired user registrations from the database" - def handle_noargs(self, **options): + def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
Fix deprecated class NoArgsCommand class.
## Code Before: from django.core.management.base import NoArgsCommand from ...models import RegistrationProfile class Command(NoArgsCommand): help = "Delete expired user registrations from the database" def handle_noargs(self, **options): RegistrationProfile.objects.delete_expired_users() ## Instruction: Fix deprecated class NoArgsCommand class. ## Code After: from django.core.management.base import BaseCommand from ...models import RegistrationProfile class Command(BaseCommand): help = "Delete expired user registrations from the database" def handle(self, *args, **options): RegistrationProfile.objects.delete_expired_users()
- from django.core.management.base import NoArgsCommand ? ^^^^^ + from django.core.management.base import BaseCommand ? ^^ + from ...models import RegistrationProfile - class Command(NoArgsCommand): ? ^^^^^ + class Command(BaseCommand): ? ^^ + help = "Delete expired user registrations from the database" - def handle_noargs(self, **options): ? ------- + def handle(self, *args, **options): ? +++++++ RegistrationProfile.objects.delete_expired_users()
8cc88e1f6e09e91f2ffc5bbf43b58b2d129a12c9
bnc.py
bnc.py
import nltk.corpus.reader.bnc
import nltk.corpus.reader.bnc import time start_time = time.perf_counter() BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is time_taken = time.perf_counter() - start_time print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n')
Load BNC into memory and time process.
Load BNC into memory and time process.
Python
mit
albertomh/ug-dissertation
import nltk.corpus.reader.bnc + import time + + start_time = time.perf_counter() + BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', + fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') + lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is + time_taken = time.perf_counter() - start_time + print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n') +
Load BNC into memory and time process.
## Code Before: import nltk.corpus.reader.bnc ## Instruction: Load BNC into memory and time process. ## Code After: import nltk.corpus.reader.bnc import time start_time = time.perf_counter() BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is time_taken = time.perf_counter() - start_time print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n')
import nltk.corpus.reader.bnc + import time + + + start_time = time.perf_counter() + BNC_data = nltk.corpus.reader.bnc.BNCCorpusReader(root='/home/ubuntu/ug-d/bncbaby/', + fileids=r'aca/\w*\.xml', # r'aca/\w*\.xml', # r'[a-z]{3}/\w*\.xml') + lazy=False) # found here: https://github.com/nltk/nltk/issues/781 talk about how much more efficient it is + time_taken = time.perf_counter() - start_time + print('\n|| Successfully loaded the British National Corpus in {:.1f}'.format(time_taken), 'seconds. ||\n')
b973a1686f269044e670704b56c07ca79336c29c
mythril/laser/ethereum/strategy/basic.py
mythril/laser/ethereum/strategy/basic.py
class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
Add documentation and fix pop
Add documentation and fix pop
Python
mit
b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril
+ + class DepthFirstSearchStrategy: - + """ + Implements a depth first search strategy + I.E. Follow one path to a leaf, and then continue to the next one + """ - def __init__(self, content, max_depth): + def __init__(self, work_list, max_depth): - self.content = content + self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): + """ Picks the next state to execute """ try: + # This strategies assumes that new states are appended at the end of the work_list + # By taking the last element we effectively pick the "newest" states, which amounts to dfs - global_state = self.content.pop(0) + global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() +
Add documentation and fix pop
## Code Before: class DepthFirstSearchStrategy: def __init__(self, content, max_depth): self.content = content self.max_depth = max_depth def __iter__(self): return self def __next__(self): try: global_state = self.content.pop(0) if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() ## Instruction: Add documentation and fix pop ## Code After: class DepthFirstSearchStrategy: """ Implements a depth first search strategy I.E. Follow one path to a leaf, and then continue to the next one """ def __init__(self, work_list, max_depth): self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): """ Picks the next state to execute """ try: # This strategies assumes that new states are appended at the end of the work_list # By taking the last element we effectively pick the "newest" states, which amounts to dfs global_state = self.work_list.pop() if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration()
+ + class DepthFirstSearchStrategy: - + """ + Implements a depth first search strategy + I.E. Follow one path to a leaf, and then continue to the next one + """ - def __init__(self, content, max_depth): ? ^ ^^^^ + def __init__(self, work_list, max_depth): ? ^ ^^^^^^ - self.content = content + self.work_list = work_list self.max_depth = max_depth def __iter__(self): return self def __next__(self): + """ Picks the next state to execute """ try: + # This strategies assumes that new states are appended at the end of the work_list + # By taking the last element we effectively pick the "newest" states, which amounts to dfs - global_state = self.content.pop(0) ? ^ ^^^^ - + global_state = self.work_list.pop() ? ^ ^^^^^^ if global_state.mstate.depth >= self.max_depth: return self.__next__() return global_state except IndexError: raise StopIteration() +
5da928fd9b08aeb0028b71535413159da18393b4
comics/sets/forms.py
comics/sets/forms.py
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.all(), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
Exclude inactive comics from sets editing, effectively throwing them out of the set when saved
Exclude inactive comics from sets editing, effectively throwing them out of the set when saved
Python
agpl-3.0
datagutten/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,datagutten/comics
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( - Comic.objects.all(), + Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
Exclude inactive comics from sets editing, effectively throwing them out of the set when saved
## Code Before: import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.all(), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set ## Instruction: Exclude inactive comics from sets editing, effectively throwing them out of the set when saved ## Code After: import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
import datetime from django import forms from django.template.defaultfilters import slugify from comics.core.models import Comic from comics.sets.models import Set class NewSetForm(forms.ModelForm): class Meta: model = Set fields = ('name',) def save(self, commit=True): set = super(NewSetForm, self).save(commit=False) set.name = slugify(set.name) set.last_modified = datetime.datetime.now() set.last_loaded = datetime.datetime.now() if commit: set.save() return set class EditSetForm(forms.ModelForm): comics = forms.ModelMultipleChoiceField( - Comic.objects.all(), + Comic.objects.filter(active=True), required=False, widget=forms.CheckboxSelectMultiple) add_new_comics = forms.BooleanField( label='Automatically add new comics to the set', required=False) hide_empty_comics = forms.BooleanField( label='Hide comics without matching releases from view', required=False) class Meta: model = Set fields = ('comics', 'add_new_comics', 'hide_empty_comics') def save(self, commit=True): comics_set = super(EditSetForm, self).save(commit=False) comics_set.last_modified = datetime.datetime.now() if commit: comics_set.save() self.save_m2m() return comics_set
1028e1a6e15af5a20caedd5598cebc49c5486f64
scripts/iepy_runner.py
scripts/iepy_runner.py
import pprint from docopt import docopt from iepy.core import BootstrappedIEPipeline from iepy import db from iepy.human_validation import TerminalInterviewer from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv if __name__ == '__main__': opts = docopt(__doc__, version=0.1) connection = db.connect(opts['<dbname>']) seed_facts = load_facts_from_csv(opts['<seeds_file>']) p = BootstrappedIEPipeline(connection, seed_facts) STOP = 'STOP' p.start() # blocking keep_looping = True while keep_looping: qs = list(p.questions_available()) if not qs: keep_looping = False term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')]) result = term() if result == STOP: keep_looping = False p.force_process() facts = p.known_facts() # profit save_labeled_evidence_to_csv(facts.items(), "facts.csv")
import pprint from docopt import docopt from iepy.core import BootstrappedIEPipeline from iepy import db from iepy.human_validation import TerminalInterviewer from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv if __name__ == '__main__': opts = docopt(__doc__, version=0.1) connection = db.connect(opts['<dbname>']) seed_facts = load_facts_from_csv(opts['<seeds_file>']) output_file = load_facts_from_csv(opts['<output_file>']) p = BootstrappedIEPipeline(connection, seed_facts) STOP = 'STOP' p.start() # blocking keep_looping = True while keep_looping: qs = list(p.questions_available()) if not qs: keep_looping = False term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')]) result = term() if result == STOP: keep_looping = False p.force_process() facts = p.known_facts() # profit save_labeled_evidence_to_csv(facts.items(), output_file)
Add <output_file> parameter to the IEPY runner script.
Add <output_file> parameter to the IEPY runner script.
Python
bsd-3-clause
mrshu/iepy,machinalis/iepy,machinalis/iepy,mrshu/iepy,machinalis/iepy,mrshu/iepy
import pprint from docopt import docopt from iepy.core import BootstrappedIEPipeline from iepy import db from iepy.human_validation import TerminalInterviewer from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv if __name__ == '__main__': opts = docopt(__doc__, version=0.1) connection = db.connect(opts['<dbname>']) seed_facts = load_facts_from_csv(opts['<seeds_file>']) + output_file = load_facts_from_csv(opts['<output_file>']) p = BootstrappedIEPipeline(connection, seed_facts) STOP = 'STOP' p.start() # blocking keep_looping = True while keep_looping: qs = list(p.questions_available()) if not qs: keep_looping = False term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')]) result = term() if result == STOP: keep_looping = False p.force_process() facts = p.known_facts() # profit - save_labeled_evidence_to_csv(facts.items(), "facts.csv") + save_labeled_evidence_to_csv(facts.items(), output_file)
Add <output_file> parameter to the IEPY runner script.
## Code Before: import pprint from docopt import docopt from iepy.core import BootstrappedIEPipeline from iepy import db from iepy.human_validation import TerminalInterviewer from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv if __name__ == '__main__': opts = docopt(__doc__, version=0.1) connection = db.connect(opts['<dbname>']) seed_facts = load_facts_from_csv(opts['<seeds_file>']) p = BootstrappedIEPipeline(connection, seed_facts) STOP = 'STOP' p.start() # blocking keep_looping = True while keep_looping: qs = list(p.questions_available()) if not qs: keep_looping = False term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')]) result = term() if result == STOP: keep_looping = False p.force_process() facts = p.known_facts() # profit save_labeled_evidence_to_csv(facts.items(), "facts.csv") ## Instruction: Add <output_file> parameter to the IEPY runner script. ## Code After: import pprint from docopt import docopt from iepy.core import BootstrappedIEPipeline from iepy import db from iepy.human_validation import TerminalInterviewer from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv if __name__ == '__main__': opts = docopt(__doc__, version=0.1) connection = db.connect(opts['<dbname>']) seed_facts = load_facts_from_csv(opts['<seeds_file>']) output_file = load_facts_from_csv(opts['<output_file>']) p = BootstrappedIEPipeline(connection, seed_facts) STOP = 'STOP' p.start() # blocking keep_looping = True while keep_looping: qs = list(p.questions_available()) if not qs: keep_looping = False term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')]) result = term() if result == STOP: keep_looping = False p.force_process() facts = p.known_facts() # profit save_labeled_evidence_to_csv(facts.items(), output_file)
import pprint from docopt import docopt from iepy.core import BootstrappedIEPipeline from iepy import db from iepy.human_validation import TerminalInterviewer from iepy.utils import load_facts_from_csv, save_labeled_evidence_to_csv if __name__ == '__main__': opts = docopt(__doc__, version=0.1) connection = db.connect(opts['<dbname>']) seed_facts = load_facts_from_csv(opts['<seeds_file>']) + output_file = load_facts_from_csv(opts['<output_file>']) p = BootstrappedIEPipeline(connection, seed_facts) STOP = 'STOP' p.start() # blocking keep_looping = True while keep_looping: qs = list(p.questions_available()) if not qs: keep_looping = False term = TerminalInterviewer(qs, p.add_answer, [(STOP, 'Stop execution ASAP')]) result = term() if result == STOP: keep_looping = False p.force_process() facts = p.known_facts() # profit - save_labeled_evidence_to_csv(facts.items(), "facts.csv") ? ^ ^^^^^^^^^ + save_labeled_evidence_to_csv(facts.items(), output_file) ? ^^^^^^^ ^^^
74fa1bf956952df4cddd7420610475725a473831
userkit/__init__.py
userkit/__init__.py
from requestor import Requestor from users import UserManager from invites import InviteManager from emails import EmailManager from session import Session from widget import WidgetManager class UserKit(object): _rq = None api_version = 1.0 api_base_url = None api_key = None users = None invites = None emails = None widget = None def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) @classmethod def version(cls): return cls.api_version
from requestor import Requestor from users import UserManager from invites import InviteManager from emails import EmailManager from session import Session from widget import WidgetManager from logs import LogsManager class UserKit(object): _rq = None api_version = 1.0 api_base_url = None api_key = None users = None invites = None emails = None widget = None def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) self.logs = LogsManager(self._rq) @classmethod def version(cls): return cls.api_version
Add LogsManager to UserKit constructor
Add LogsManager to UserKit constructor
Python
mit
workpail/userkit-python
from requestor import Requestor from users import UserManager from invites import InviteManager from emails import EmailManager from session import Session from widget import WidgetManager + from logs import LogsManager class UserKit(object): _rq = None api_version = 1.0 api_base_url = None api_key = None users = None invites = None emails = None widget = None def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) + self.logs = LogsManager(self._rq) @classmethod def version(cls): return cls.api_version
Add LogsManager to UserKit constructor
## Code Before: from requestor import Requestor from users import UserManager from invites import InviteManager from emails import EmailManager from session import Session from widget import WidgetManager class UserKit(object): _rq = None api_version = 1.0 api_base_url = None api_key = None users = None invites = None emails = None widget = None def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) @classmethod def version(cls): return cls.api_version ## Instruction: Add LogsManager to UserKit constructor ## Code After: from requestor import Requestor from users import UserManager from invites import InviteManager from emails import EmailManager from session import Session from widget import WidgetManager from logs import LogsManager class UserKit(object): _rq = None api_version = 1.0 api_base_url = None api_key = None users = None invites = None emails = None widget = None def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) self.logs = LogsManager(self._rq) @classmethod def version(cls): return cls.api_version
from requestor import Requestor from users import UserManager from invites import InviteManager from emails import EmailManager from session import Session from widget import WidgetManager + from logs import LogsManager class UserKit(object): _rq = None api_version = 1.0 api_base_url = None api_key = None users = None invites = None emails = None widget = None def __init__(self, api_key, api_base_url=None, _requestor=None): if api_key is None: raise TypeError('api_key cannot be blank.') if api_base_url is None: api_base_url = 'https://api.userkit.io/v1' else: api_base_url += '/v1' self.api_key = api_key self.api_base_url = api_base_url # make the encapsulated objects self._rq = _requestor or Requestor(self.api_key, self.api_base_url) self.users = UserManager(self._rq) self.invites = InviteManager(self._rq) self.emails = EmailManager(self._rq) self.widget = WidgetManager(self._rq) + self.logs = LogsManager(self._rq) @classmethod def version(cls): return cls.api_version
5f1fa23dd8e0850a9f0e6a054ec6738e5a174ff7
database/tables.py
database/tables.py
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False))
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) Table("moderator", METADATA, Column("stream", String, primary_key=True), Column("name", String, primary_key=True))
Add a table for caching moderators
Add a table for caching moderators
Python
mit
pyrige/pump19
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) + Table("moderator", METADATA, + Column("stream", String, primary_key=True), + Column("name", String, primary_key=True)) +
Add a table for caching moderators
## Code Before: from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) ## Instruction: Add a table for caching moderators ## Code After: from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) Table("moderator", METADATA, Column("stream", String, primary_key=True), Column("name", String, primary_key=True))
from sqlalchemy import MetaData, Table, Column, Integer, String METADATA = MetaData() Table("quote", METADATA, Column("qid", Integer, primary_key=True), Column("text", String, nullable=False)) + + Table("moderator", METADATA, + Column("stream", String, primary_key=True), + Column("name", String, primary_key=True))
526b1028925a59957e805b29fc624dae318661ef
finances/models.py
finances/models.py
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save()
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() def __repr__(self): return '<User %r>' % self.username
Add __repr__ for User model
Add __repr__ for User model
Python
mit
Afonasev/YourFinances
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() + def __repr__(self): + return '<User %r>' % self.username +
Add __repr__ for User model
## Code Before: import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() ## Instruction: Add __repr__ for User model ## Code After: import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() def __repr__(self): return '<User %r>' % self.username
import os import hashlib import datetime import peewee database = peewee.Proxy() class BaseModel(peewee.Model): class Meta: database = database class User(BaseModel): id = peewee.IntegerField(primary_key=True) name = peewee.CharField(unique=True) password = peewee.CharField() salt = peewee.CharField(default=os.urandom(10).decode('cp1251', errors='replace')) join_date = peewee.DateTimeField(default=datetime.datetime.now) class AuthError(Exception): pass class RegisterError(Exception): pass @classmethod def auth(cls, name, password): user = User.get(name=name) pass_with_salt = password + user.salt pass_hash = hashlib.sha224(pass_with_salt.encode()).hexdigest() if not pass_hash == user.password: raise cls.AuthError('Wrong password!') return user @classmethod def register(cls, name, password): try: User.get(name=name) raise cls.RegisterError('User with that name does exist') except User.DoesNotExist: pass user = User(name=name) pass_with_salt = password + user.salt user.password = hashlib.sha224(pass_with_salt.encode()).hexdigest() user.save() + + def __repr__(self): + return '<User %r>' % self.username
d2b4ec50442a00df85ef525cc82aca971b72eb86
erpnext/patches/v11_0/rename_field_max_days_allowed.py
erpnext/patches/v11_0/rename_field_max_days_allowed.py
import frappe from frappe.model.utils.rename_field import rename_field def execute(): frappe.reload_doc("hr", "doctype", "leave_type") frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed")
import frappe def execute(): frappe.db.sql(""" UPDATE `tabLeave Type` SET max_days_allowed = '0' WHERE trim(coalesce(max_days_allowed, '')) = '' """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
Set null values to '0' before changing column type
[fix] Set null values to '0' before changing column type
Python
agpl-3.0
gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext,gsnbng/erpnext
import frappe - from frappe.model.utils.rename_field import rename_field def execute(): - frappe.reload_doc("hr", "doctype", "leave_type") + frappe.db.sql(""" + UPDATE `tabLeave Type` + SET max_days_allowed = '0' + WHERE trim(coalesce(max_days_allowed, '')) = '' + """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") - rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed") +
Set null values to '0' before changing column type
## Code Before: import frappe from frappe.model.utils.rename_field import rename_field def execute(): frappe.reload_doc("hr", "doctype", "leave_type") frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed") ## Instruction: Set null values to '0' before changing column type ## Code After: import frappe def execute(): frappe.db.sql(""" UPDATE `tabLeave Type` SET max_days_allowed = '0' WHERE trim(coalesce(max_days_allowed, '')) = '' """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""")
import frappe - from frappe.model.utils.rename_field import rename_field def execute(): - frappe.reload_doc("hr", "doctype", "leave_type") + frappe.db.sql(""" + UPDATE `tabLeave Type` + SET max_days_allowed = '0' + WHERE trim(coalesce(max_days_allowed, '')) = '' + """) frappe.db.sql_ddl("""ALTER table `tabLeave Type` modify max_days_allowed int(8) NOT NULL""") - rename_field("Leave Type", "max_days_allowed", "max_continuous_days_allowed") +
6be3a40010b7256cb5b8fadfe4ef40b6c5691a06
jungle/session.py
jungle/session.py
import boto3 def create_session(profile_name): if not profile_name: return boto3 else: return boto3.Session(profile_name=profile_name)
import sys import boto3 import botocore import click def create_session(profile_name): if profile_name is None: return boto3 else: try: session = boto3.Session(profile_name=profile_name) return session except botocore.exceptions.ProfileNotFound as e: click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) sys.exit(2)
Add error message when wrong AWS Profile Name is given
Add error message when wrong AWS Profile Name is given
Python
mit
achiku/jungle
+ import sys + import boto3 + import botocore + import click def create_session(profile_name): - if not profile_name: + if profile_name is None: return boto3 else: + try: - return boto3.Session(profile_name=profile_name) + session = boto3.Session(profile_name=profile_name) + return session + except botocore.exceptions.ProfileNotFound as e: + click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) + sys.exit(2)
Add error message when wrong AWS Profile Name is given
## Code Before: import boto3 def create_session(profile_name): if not profile_name: return boto3 else: return boto3.Session(profile_name=profile_name) ## Instruction: Add error message when wrong AWS Profile Name is given ## Code After: import sys import boto3 import botocore import click def create_session(profile_name): if profile_name is None: return boto3 else: try: session = boto3.Session(profile_name=profile_name) return session except botocore.exceptions.ProfileNotFound as e: click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) sys.exit(2)
+ import sys + import boto3 + import botocore + import click def create_session(profile_name): - if not profile_name: ? ---- + if profile_name is None: ? ++++++++ return boto3 else: + try: - return boto3.Session(profile_name=profile_name) ? ^ ^^^ + session = boto3.Session(profile_name=profile_name) ? ^^^^^ ^^^^ ++ + return session + except botocore.exceptions.ProfileNotFound as e: + click.echo("Invalid profile name: {0}".format(profile_name, e), err=True) + sys.exit(2)
6f4beaa772e9b8b9b1de6f6a92c0b7fd00bdd5af
mltsp/science_features/lomb_scargle_fast.py
mltsp/science_features/lomb_scargle_fast.py
import numpy as np import gatspy def lomb_scargle_fast_period(t, m, e): """Fits a simple sinuosidal model y(t) = A sin(2*pi*w*t + phi) + c and returns the estimated period 1/w. Much faster than fitting the full multi-frequency model used by `science_features.lomb_scargle`. """ opt_args = {'period_range': (2*t.max() / len(t), t.max()), 'quiet': True} model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args) model.fit(t, m, e) return model.best_period
import numpy as np import gatspy def lomb_scargle_fast_period(t, m, e): """Fits a simple sinuosidal model y(t) = A sin(2*pi*w*t + phi) + c and returns the estimated period 1/w. Much faster than fitting the full multi-frequency model used by `science_features.lomb_scargle`. """ dt = t.max() - t.min() opt_args = {'period_range': (2 * dt / len(t), dt), 'quiet': True} model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args) model.fit(t, m, e) return model.best_period
Use more sensible choice of period_range for `period_fast` feature
Use more sensible choice of period_range for `period_fast` feature Periods searched should depend only on the range of times, rather than the max time.
Python
bsd-3-clause
acrellin/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,bnaul/mltsp,acrellin/mltsp,bnaul/mltsp,mltsp/mltsp,mltsp/mltsp,acrellin/mltsp,bnaul/mltsp,mltsp/mltsp,acrellin/mltsp,bnaul/mltsp,bnaul/mltsp,acrellin/mltsp,mltsp/mltsp,bnaul/mltsp
import numpy as np import gatspy def lomb_scargle_fast_period(t, m, e): """Fits a simple sinuosidal model y(t) = A sin(2*pi*w*t + phi) + c and returns the estimated period 1/w. Much faster than fitting the full multi-frequency model used by `science_features.lomb_scargle`. """ + dt = t.max() - t.min() - opt_args = {'period_range': (2*t.max() / len(t), t.max()), 'quiet': True} + opt_args = {'period_range': (2 * dt / len(t), dt), 'quiet': True} model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args) model.fit(t, m, e) return model.best_period
Use more sensible choice of period_range for `period_fast` feature
## Code Before: import numpy as np import gatspy def lomb_scargle_fast_period(t, m, e): """Fits a simple sinuosidal model y(t) = A sin(2*pi*w*t + phi) + c and returns the estimated period 1/w. Much faster than fitting the full multi-frequency model used by `science_features.lomb_scargle`. """ opt_args = {'period_range': (2*t.max() / len(t), t.max()), 'quiet': True} model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args) model.fit(t, m, e) return model.best_period ## Instruction: Use more sensible choice of period_range for `period_fast` feature ## Code After: import numpy as np import gatspy def lomb_scargle_fast_period(t, m, e): """Fits a simple sinuosidal model y(t) = A sin(2*pi*w*t + phi) + c and returns the estimated period 1/w. Much faster than fitting the full multi-frequency model used by `science_features.lomb_scargle`. """ dt = t.max() - t.min() opt_args = {'period_range': (2 * dt / len(t), dt), 'quiet': True} model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args) model.fit(t, m, e) return model.best_period
import numpy as np import gatspy def lomb_scargle_fast_period(t, m, e): """Fits a simple sinuosidal model y(t) = A sin(2*pi*w*t + phi) + c and returns the estimated period 1/w. Much faster than fitting the full multi-frequency model used by `science_features.lomb_scargle`. """ + dt = t.max() - t.min() - opt_args = {'period_range': (2*t.max() / len(t), t.max()), 'quiet': True} ? ------ ------ + opt_args = {'period_range': (2 * dt / len(t), dt), 'quiet': True} ? + ++ + model = gatspy.periodic.LombScargleFast(fit_period=True, optimizer_kwds=opt_args) model.fit(t, m, e) return model.best_period
9056746db7406e6640607210bea9e00a12c63926
ci/fix_paths.py
ci/fix_paths.py
import distutils.sysconfig from glob import glob import os from os.path import join as pjoin, basename from shutil import copy from sys import platform def main(): """ Copy HDF5 DLLs into installed h5py package """ # This is the function Tox also uses to locate site-packages (Apr 2019) sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True) print("site packages dir:", sitepackagesdir) hdf5_path = os.environ.get("HDF5_DIR") print("HDF5_DIR", hdf5_path) # HDF5_DIR is not set when we're testing wheels; these should already have # the necessary libraries bundled in. if platform.startswith('win') and hdf5_path is not None: for f in glob(pjoin(hdf5_path, 'lib/*.dll')): copy(f, pjoin(sitepackagesdir, 'h5py', basename(f))) print("Copied", f) zlib_root = os.environ.get("ZLIB_ROOT") if zlib_root: f = pjoin(zlib_root, 'bin_release', 'zlib.dll') copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll')) print("Copied", f) print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py'))) if __name__ == '__main__': main()
import distutils.sysconfig from glob import glob import os from os.path import join as pjoin, basename from shutil import copy from sys import platform def main(): """ Copy HDF5 DLLs into installed h5py package """ # This is the function Tox also uses to locate site-packages (Apr 2019) sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True) print("site packages dir:", sitepackagesdir) hdf5_path = os.environ.get("HDF5_DIR") print("HDF5_DIR", hdf5_path) # HDF5_DIR is not set when we're testing wheels; these should already have # the necessary libraries bundled in. if platform.startswith('win') and hdf5_path is not None: for f in glob(pjoin(hdf5_path, 'lib/*.dll')): copy(f, pjoin(sitepackagesdir, 'h5py', basename(f))) print("Copied", f) zlib_root = os.environ.get("ZLIB_ROOT") if zlib_root: f = pjoin(zlib_root, 'bin_release', 'zlib.dll') copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll')) print("Copied", f) print("In installed h5py:", sorted(os.listdir(pjoin(sitepackagesdir, 'h5py')))) if __name__ == '__main__': main()
Sort list of files inside h5py
Sort list of files inside h5py
Python
bsd-3-clause
h5py/h5py,h5py/h5py,h5py/h5py
import distutils.sysconfig from glob import glob import os from os.path import join as pjoin, basename from shutil import copy from sys import platform def main(): """ Copy HDF5 DLLs into installed h5py package """ # This is the function Tox also uses to locate site-packages (Apr 2019) sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True) print("site packages dir:", sitepackagesdir) hdf5_path = os.environ.get("HDF5_DIR") print("HDF5_DIR", hdf5_path) # HDF5_DIR is not set when we're testing wheels; these should already have # the necessary libraries bundled in. if platform.startswith('win') and hdf5_path is not None: for f in glob(pjoin(hdf5_path, 'lib/*.dll')): copy(f, pjoin(sitepackagesdir, 'h5py', basename(f))) print("Copied", f) zlib_root = os.environ.get("ZLIB_ROOT") if zlib_root: f = pjoin(zlib_root, 'bin_release', 'zlib.dll') copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll')) print("Copied", f) - print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py'))) + print("In installed h5py:", sorted(os.listdir(pjoin(sitepackagesdir, 'h5py')))) if __name__ == '__main__': main()
Sort list of files inside h5py
## Code Before: import distutils.sysconfig from glob import glob import os from os.path import join as pjoin, basename from shutil import copy from sys import platform def main(): """ Copy HDF5 DLLs into installed h5py package """ # This is the function Tox also uses to locate site-packages (Apr 2019) sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True) print("site packages dir:", sitepackagesdir) hdf5_path = os.environ.get("HDF5_DIR") print("HDF5_DIR", hdf5_path) # HDF5_DIR is not set when we're testing wheels; these should already have # the necessary libraries bundled in. if platform.startswith('win') and hdf5_path is not None: for f in glob(pjoin(hdf5_path, 'lib/*.dll')): copy(f, pjoin(sitepackagesdir, 'h5py', basename(f))) print("Copied", f) zlib_root = os.environ.get("ZLIB_ROOT") if zlib_root: f = pjoin(zlib_root, 'bin_release', 'zlib.dll') copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll')) print("Copied", f) print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py'))) if __name__ == '__main__': main() ## Instruction: Sort list of files inside h5py ## Code After: import distutils.sysconfig from glob import glob import os from os.path import join as pjoin, basename from shutil import copy from sys import platform def main(): """ Copy HDF5 DLLs into installed h5py package """ # This is the function Tox also uses to locate site-packages (Apr 2019) sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True) print("site packages dir:", sitepackagesdir) hdf5_path = os.environ.get("HDF5_DIR") print("HDF5_DIR", hdf5_path) # HDF5_DIR is not set when we're testing wheels; these should already have # the necessary libraries bundled in. if platform.startswith('win') and hdf5_path is not None: for f in glob(pjoin(hdf5_path, 'lib/*.dll')): copy(f, pjoin(sitepackagesdir, 'h5py', basename(f))) print("Copied", f) zlib_root = os.environ.get("ZLIB_ROOT") if zlib_root: f = pjoin(zlib_root, 'bin_release', 'zlib.dll') copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll')) print("Copied", f) print("In installed h5py:", sorted(os.listdir(pjoin(sitepackagesdir, 'h5py')))) if __name__ == '__main__': main()
import distutils.sysconfig from glob import glob import os from os.path import join as pjoin, basename from shutil import copy from sys import platform def main(): """ Copy HDF5 DLLs into installed h5py package """ # This is the function Tox also uses to locate site-packages (Apr 2019) sitepackagesdir = distutils.sysconfig.get_python_lib(plat_specific=True) print("site packages dir:", sitepackagesdir) hdf5_path = os.environ.get("HDF5_DIR") print("HDF5_DIR", hdf5_path) # HDF5_DIR is not set when we're testing wheels; these should already have # the necessary libraries bundled in. if platform.startswith('win') and hdf5_path is not None: for f in glob(pjoin(hdf5_path, 'lib/*.dll')): copy(f, pjoin(sitepackagesdir, 'h5py', basename(f))) print("Copied", f) zlib_root = os.environ.get("ZLIB_ROOT") if zlib_root: f = pjoin(zlib_root, 'bin_release', 'zlib.dll') copy(f, pjoin(sitepackagesdir, 'h5py', 'zlib.dll')) print("Copied", f) - print("In installed h5py:", os.listdir(pjoin(sitepackagesdir, 'h5py'))) + print("In installed h5py:", sorted(os.listdir(pjoin(sitepackagesdir, 'h5py')))) ? ++++ +++++++ + if __name__ == '__main__': main()
50072e2e2fa2f650dd1899b14aaaecb2dfe909ef
tests/test_plugins.py
tests/test_plugins.py
import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
Clear signals after testing plugins
Clear signals after testing plugins
Python
mit
xouillet/sigal,t-animal/sigal,xouillet/sigal,saimn/sigal,xouillet/sigal,jasuarez/sigal,t-animal/sigal,t-animal/sigal,jasuarez/sigal,saimn/sigal,jasuarez/sigal,saimn/sigal
+ import blinker import os from sigal.gallery import Gallery - from sigal import init_plugins + from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] + try: - init_plugins(settings) + init_plugins(settings) - gal = Gallery(settings) + gal = Gallery(settings) - gal.build() + gal.build() + finally: + # Reset plugins + for name in dir(signals): + if not name.startswith('_'): + try: + sig = getattr(signals, name) + if isinstance(sig, blinker.Signal): + sig.receivers.clear() + except Exception: + pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
Clear signals after testing plugins
## Code Before: import os from sigal.gallery import Gallery from sigal import init_plugins CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] init_plugins(settings) gal = Gallery(settings) gal.build() out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file ## Instruction: Clear signals after testing plugins ## Code After: import blinker import os from sigal.gallery import Gallery from sigal import init_plugins, signals CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] try: init_plugins(settings) gal = Gallery(settings) gal.build() finally: # Reset plugins for name in dir(signals): if not name.startswith('_'): try: sig = getattr(signals, name) if isinstance(sig, blinker.Signal): sig.receivers.clear() except Exception: pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
+ import blinker import os from sigal.gallery import Gallery - from sigal import init_plugins + from sigal import init_plugins, signals ? +++++++++ CURRENT_DIR = os.path.dirname(__file__) def test_plugins(settings, tmpdir): settings['destination'] = str(tmpdir) if "sigal.plugins.nomedia" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.nomedia"] if "sigal.plugins.media_page" not in settings["plugins"]: settings['plugins'] += ["sigal.plugins.media_page"] + try: - init_plugins(settings) + init_plugins(settings) ? ++++ - gal = Gallery(settings) + gal = Gallery(settings) ? ++++ - gal.build() + gal.build() ? ++++ + finally: + # Reset plugins + for name in dir(signals): + if not name.startswith('_'): + try: + sig = getattr(signals, name) + if isinstance(sig, blinker.Signal): + sig.receivers.clear() + except Exception: + pass out_html = os.path.join(settings['destination'], 'dir2', 'exo20101028-b-full.jpg.html') assert os.path.isfile(out_html) for path, dirs, files in os.walk(os.path.join(str(tmpdir), "nomedia")): assert "ignore" not in path for file in files: assert "ignore" not in file
f7dd603d4e24134affda6430736838ecaaab9938
jungle/cli.py
jungle/cli.py
import click from . import __version__ class JungleCLI(click.MultiCommand): """Jangle CLI main class""" def list_commands(self, ctx): """return available modules""" return ['ec2', 'elb', 'emr', 'asg'] def get_command(self, ctx, name): """get command""" mod = __import__('jungle.' + name, None, None, ['cli']) return mod.cli cli = JungleCLI(help="aws operation cli (v{})".format(__version__)) if __name__ == '__main__': cli()
import click from . import __version__ class JungleCLI(click.MultiCommand): """Jangle CLI main class""" def list_commands(self, ctx): """return available modules""" return ['ec2', 'elb', 'emr', 'asg'] def get_command(self, ctx, name): """get command""" try: mod = __import__('jungle.' + name, None, None, ['cli']) return mod.cli except ImportError: pass cli = JungleCLI(help="aws operation cli (v{0})".format(__version__)) if __name__ == '__main__': cli()
Fix unintended ImportError for wrong subcommnad
Fix unintended ImportError for wrong subcommnad
Python
mit
achiku/jungle
import click from . import __version__ class JungleCLI(click.MultiCommand): """Jangle CLI main class""" def list_commands(self, ctx): """return available modules""" return ['ec2', 'elb', 'emr', 'asg'] def get_command(self, ctx, name): """get command""" + try: - mod = __import__('jungle.' + name, None, None, ['cli']) + mod = __import__('jungle.' + name, None, None, ['cli']) - return mod.cli + return mod.cli + except ImportError: + pass - cli = JungleCLI(help="aws operation cli (v{})".format(__version__)) + cli = JungleCLI(help="aws operation cli (v{0})".format(__version__)) if __name__ == '__main__': cli()
Fix unintended ImportError for wrong subcommnad
## Code Before: import click from . import __version__ class JungleCLI(click.MultiCommand): """Jangle CLI main class""" def list_commands(self, ctx): """return available modules""" return ['ec2', 'elb', 'emr', 'asg'] def get_command(self, ctx, name): """get command""" mod = __import__('jungle.' + name, None, None, ['cli']) return mod.cli cli = JungleCLI(help="aws operation cli (v{})".format(__version__)) if __name__ == '__main__': cli() ## Instruction: Fix unintended ImportError for wrong subcommnad ## Code After: import click from . import __version__ class JungleCLI(click.MultiCommand): """Jangle CLI main class""" def list_commands(self, ctx): """return available modules""" return ['ec2', 'elb', 'emr', 'asg'] def get_command(self, ctx, name): """get command""" try: mod = __import__('jungle.' + name, None, None, ['cli']) return mod.cli except ImportError: pass cli = JungleCLI(help="aws operation cli (v{0})".format(__version__)) if __name__ == '__main__': cli()
import click from . import __version__ class JungleCLI(click.MultiCommand): """Jangle CLI main class""" def list_commands(self, ctx): """return available modules""" return ['ec2', 'elb', 'emr', 'asg'] def get_command(self, ctx, name): """get command""" + try: - mod = __import__('jungle.' + name, None, None, ['cli']) + mod = __import__('jungle.' + name, None, None, ['cli']) ? ++++ - return mod.cli + return mod.cli ? ++++ + except ImportError: + pass - cli = JungleCLI(help="aws operation cli (v{})".format(__version__)) + cli = JungleCLI(help="aws operation cli (v{0})".format(__version__)) ? + if __name__ == '__main__': cli()
313aafc11f76888614e2a0523e9e858e71765eaa
tests/test_wc.py
tests/test_wc.py
"""Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version()))
"""Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
Add some more tests for wc module.
Add some more tests for wc module.
Python
lgpl-2.1
jelmer/subvertpy,jelmer/subvertpy
"""Subversion ra library tests.""" from bzrlib.tests import TestCase - import ra + import wc class VersionTest(TestCase): def test_version_length(self): - self.assertEquals(4, len(ra.version())) + self.assertEquals(4, len(wc.version())) + class WorkingCopyTests(TestCase): + def test_get_adm_dir(self): + self.assertEquals(".svn", wc.get_adm_dir()) + def test_is_normal_prop(self): + self.assertTrue(wc.is_normal_prop("svn:ignore")) + + def test_is_entry_prop(self): + self.assertTrue(wc.is_entry_prop("svn:entry:foo")) + + def test_is_wc_prop(self): + self.assertTrue(wc.is_wc_prop("svn:wc:foo")) + + def test_get_default_ignores(self): + self.assertIsInstance(wc.get_default_ignores({}), list) +
Add some more tests for wc module.
## Code Before: """Subversion ra library tests.""" from bzrlib.tests import TestCase import ra class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(ra.version())) ## Instruction: Add some more tests for wc module. ## Code After: """Subversion ra library tests.""" from bzrlib.tests import TestCase import wc class VersionTest(TestCase): def test_version_length(self): self.assertEquals(4, len(wc.version())) class WorkingCopyTests(TestCase): def test_get_adm_dir(self): self.assertEquals(".svn", wc.get_adm_dir()) def test_is_normal_prop(self): self.assertTrue(wc.is_normal_prop("svn:ignore")) def test_is_entry_prop(self): self.assertTrue(wc.is_entry_prop("svn:entry:foo")) def test_is_wc_prop(self): self.assertTrue(wc.is_wc_prop("svn:wc:foo")) def test_get_default_ignores(self): self.assertIsInstance(wc.get_default_ignores({}), list)
"""Subversion ra library tests.""" from bzrlib.tests import TestCase - import ra ? ^^ + import wc ? ^^ class VersionTest(TestCase): def test_version_length(self): - self.assertEquals(4, len(ra.version())) ? ^^ + self.assertEquals(4, len(wc.version())) ? ^^ + class WorkingCopyTests(TestCase): + def test_get_adm_dir(self): + self.assertEquals(".svn", wc.get_adm_dir()) + + def test_is_normal_prop(self): + self.assertTrue(wc.is_normal_prop("svn:ignore")) + + def test_is_entry_prop(self): + self.assertTrue(wc.is_entry_prop("svn:entry:foo")) + + def test_is_wc_prop(self): + self.assertTrue(wc.is_wc_prop("svn:wc:foo")) + + def test_get_default_ignores(self): + self.assertIsInstance(wc.get_default_ignores({}), list)
be1e23f068fbc34587caa0a796e259e42ed6f7c6
utils.py
utils.py
import re import textwrap import html2text text_maker = html2text.HTML2Text() text_maker.body_width = 0 def strip_html_tags(text): return re.sub('<[^<]+?>', '', text) def html_to_md(string, strip_html=True, markdown=False): if strip_html: string = strip_html_tags(string) if markdown: string = text_maker.handle(string) return string def get_formatted_book_data(book_data): template = textwrap.dedent("""\ *Title:* {0} by {1} *Rating:* {2} by {3} users *Description:* {4} *Link*: [click me]({5}) Tip: {6}""") title = book_data['title'] authors = book_data['authors'] average_rating = book_data['average_rating'] ratings_count = book_data['ratings_count'] description = html_to_md(book_data.get('description', '')) url = book_data['url'] tip = 'Use author name also for better search results' template = template.format(title, authors, average_rating, ratings_count, description, url, tip) return template
import re import textwrap import html2text text_maker = html2text.HTML2Text() text_maker.body_width = 0 def strip_html_tags(text): return re.sub('<[^<]+?>', '', text) def html_to_md(string, strip_html=True, markdown=False): if not string: return 'No Description Found' if strip_html: string = strip_html_tags(string) if markdown: string = text_maker.handle(string) return string def get_formatted_book_data(book_data): template = textwrap.dedent("""\ *Title:* {0} by {1} *Rating:* {2} by {3} users *Description:* {4} *Link*: [click me]({5}) Tip: {6}""") title = book_data['title'] authors = book_data['authors'] average_rating = book_data['average_rating'] ratings_count = book_data['ratings_count'] description = html_to_md(book_data.get('description', '')) url = book_data['url'] tip = 'Use author name also for better search results' template = template.format(title, authors, average_rating, ratings_count, description, url, tip) return template
Handle Nonetype values in `html_to_md`
Handle Nonetype values in `html_to_md`
Python
mit
avinassh/Laozi,avinassh/Laozi
import re import textwrap import html2text text_maker = html2text.HTML2Text() text_maker.body_width = 0 def strip_html_tags(text): return re.sub('<[^<]+?>', '', text) def html_to_md(string, strip_html=True, markdown=False): + if not string: + return 'No Description Found' if strip_html: string = strip_html_tags(string) if markdown: string = text_maker.handle(string) return string def get_formatted_book_data(book_data): template = textwrap.dedent("""\ *Title:* {0} by {1} *Rating:* {2} by {3} users *Description:* {4} *Link*: [click me]({5}) Tip: {6}""") title = book_data['title'] authors = book_data['authors'] average_rating = book_data['average_rating'] ratings_count = book_data['ratings_count'] description = html_to_md(book_data.get('description', '')) url = book_data['url'] tip = 'Use author name also for better search results' template = template.format(title, authors, average_rating, ratings_count, description, url, tip) return template
Handle Nonetype values in `html_to_md`
## Code Before: import re import textwrap import html2text text_maker = html2text.HTML2Text() text_maker.body_width = 0 def strip_html_tags(text): return re.sub('<[^<]+?>', '', text) def html_to_md(string, strip_html=True, markdown=False): if strip_html: string = strip_html_tags(string) if markdown: string = text_maker.handle(string) return string def get_formatted_book_data(book_data): template = textwrap.dedent("""\ *Title:* {0} by {1} *Rating:* {2} by {3} users *Description:* {4} *Link*: [click me]({5}) Tip: {6}""") title = book_data['title'] authors = book_data['authors'] average_rating = book_data['average_rating'] ratings_count = book_data['ratings_count'] description = html_to_md(book_data.get('description', '')) url = book_data['url'] tip = 'Use author name also for better search results' template = template.format(title, authors, average_rating, ratings_count, description, url, tip) return template ## Instruction: Handle Nonetype values in `html_to_md` ## Code After: import re import textwrap import html2text text_maker = html2text.HTML2Text() text_maker.body_width = 0 def strip_html_tags(text): return re.sub('<[^<]+?>', '', text) def html_to_md(string, strip_html=True, markdown=False): if not string: return 'No Description Found' if strip_html: string = strip_html_tags(string) if markdown: string = text_maker.handle(string) return string def get_formatted_book_data(book_data): template = textwrap.dedent("""\ *Title:* {0} by {1} *Rating:* {2} by {3} users *Description:* {4} *Link*: [click me]({5}) Tip: {6}""") title = book_data['title'] authors = book_data['authors'] average_rating = book_data['average_rating'] ratings_count = book_data['ratings_count'] description = html_to_md(book_data.get('description', '')) url = book_data['url'] tip = 'Use author name also for better search results' template = template.format(title, authors, average_rating, ratings_count, description, url, tip) return template
import re import textwrap import html2text text_maker = html2text.HTML2Text() text_maker.body_width = 0 def strip_html_tags(text): return re.sub('<[^<]+?>', '', text) def html_to_md(string, strip_html=True, markdown=False): + if not string: + return 'No Description Found' if strip_html: string = strip_html_tags(string) if markdown: string = text_maker.handle(string) return string def get_formatted_book_data(book_data): template = textwrap.dedent("""\ *Title:* {0} by {1} *Rating:* {2} by {3} users *Description:* {4} *Link*: [click me]({5}) Tip: {6}""") title = book_data['title'] authors = book_data['authors'] average_rating = book_data['average_rating'] ratings_count = book_data['ratings_count'] description = html_to_md(book_data.get('description', '')) url = book_data['url'] tip = 'Use author name also for better search results' template = template.format(title, authors, average_rating, ratings_count, description, url, tip) return template
fd4dc4bdd32283b67577630c38624d3df705efd3
mathphys/functions.py
mathphys/functions.py
"""Useful functions.""" import numpy as _np def polyfit(x, y, monomials, algorithm='lstsq'): """Implement Custom polyfit.""" X = _np.zeros((len(x), len(monomials))) N = _np.zeros((len(x), len(monomials))) for i in range(X.shape[1]): X[:, i] = x N[:, i] = monomials[i] XN = X ** N y_ = _np.zeros((len(y), 1)) y_[:, 0] = y XNt = _np.transpose(XN) b = _np.dot(XNt, y_) X = _np.dot(XNt, XN) if algorithm is 'lstsq': r = _np.linalg.lstsq(X, b) coeffs = r[0][:, 0] else: r = _np.linalg.solve(X, b) coeffs = r[:, 0] # finds maximum diff and its base value y_fitted = _np.dot(XN, coeffs) y_diff = abs(y_fitted - y_[:, 0]) max_error = max(y_diff) idx = [i for i, value in enumerate(y_diff) if value == max_error] base_value = y_[idx[0], 0] return (coeffs, (max_error, base_value))
"""Useful functions.""" import numpy as _np def polyfit(x, y, monomials): """Implement Custom polyfit.""" coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) # finds maximum diff and its base value y_fitted = _np.polynomial.polynomial.polyval(x, coef) y_diff = abs(y_fitted - y) idx = _np.argmax(y_diff) coeffs = coef[monomials] return (coeffs, (y_diff[idx], y[idx]))
Change implementaton of polyfit method.
API: Change implementaton of polyfit method. Use new numpy.polynomial.polynomial.polyfit instead of implementing leastsquares by hand. This method is supposed to be more robust to numerical errors. With this change, the keyword argument algorithm was removed.
Python
mit
lnls-fac/mathphys
"""Useful functions.""" import numpy as _np - def polyfit(x, y, monomials, algorithm='lstsq'): + def polyfit(x, y, monomials): """Implement Custom polyfit.""" + coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) - X = _np.zeros((len(x), len(monomials))) - N = _np.zeros((len(x), len(monomials))) - for i in range(X.shape[1]): - X[:, i] = x - N[:, i] = monomials[i] - XN = X ** N - y_ = _np.zeros((len(y), 1)) - y_[:, 0] = y - XNt = _np.transpose(XN) - b = _np.dot(XNt, y_) - X = _np.dot(XNt, XN) - - if algorithm is 'lstsq': - r = _np.linalg.lstsq(X, b) - coeffs = r[0][:, 0] - else: - r = _np.linalg.solve(X, b) - coeffs = r[:, 0] # finds maximum diff and its base value - y_fitted = _np.dot(XN, coeffs) + y_fitted = _np.polynomial.polynomial.polyval(x, coef) - y_diff = abs(y_fitted - y_[:, 0]) + y_diff = abs(y_fitted - y) + idx = _np.argmax(y_diff) - max_error = max(y_diff) - idx = [i for i, value in enumerate(y_diff) if value == max_error] - base_value = y_[idx[0], 0] - return (coeffs, (max_error, base_value)) + coeffs = coef[monomials] + return (coeffs, (y_diff[idx], y[idx]))
Change implementaton of polyfit method.
## Code Before: """Useful functions.""" import numpy as _np def polyfit(x, y, monomials, algorithm='lstsq'): """Implement Custom polyfit.""" X = _np.zeros((len(x), len(monomials))) N = _np.zeros((len(x), len(monomials))) for i in range(X.shape[1]): X[:, i] = x N[:, i] = monomials[i] XN = X ** N y_ = _np.zeros((len(y), 1)) y_[:, 0] = y XNt = _np.transpose(XN) b = _np.dot(XNt, y_) X = _np.dot(XNt, XN) if algorithm is 'lstsq': r = _np.linalg.lstsq(X, b) coeffs = r[0][:, 0] else: r = _np.linalg.solve(X, b) coeffs = r[:, 0] # finds maximum diff and its base value y_fitted = _np.dot(XN, coeffs) y_diff = abs(y_fitted - y_[:, 0]) max_error = max(y_diff) idx = [i for i, value in enumerate(y_diff) if value == max_error] base_value = y_[idx[0], 0] return (coeffs, (max_error, base_value)) ## Instruction: Change implementaton of polyfit method. ## Code After: """Useful functions.""" import numpy as _np def polyfit(x, y, monomials): """Implement Custom polyfit.""" coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) # finds maximum diff and its base value y_fitted = _np.polynomial.polynomial.polyval(x, coef) y_diff = abs(y_fitted - y) idx = _np.argmax(y_diff) coeffs = coef[monomials] return (coeffs, (y_diff[idx], y[idx]))
"""Useful functions.""" import numpy as _np - def polyfit(x, y, monomials, algorithm='lstsq'): ? ------------------- + def polyfit(x, y, monomials): """Implement Custom polyfit.""" + coef = _np.polynomial.polynomial.polyfit(x, y, deg=monomials) - X = _np.zeros((len(x), len(monomials))) - N = _np.zeros((len(x), len(monomials))) - for i in range(X.shape[1]): - X[:, i] = x - N[:, i] = monomials[i] - XN = X ** N - y_ = _np.zeros((len(y), 1)) - y_[:, 0] = y - XNt = _np.transpose(XN) - b = _np.dot(XNt, y_) - X = _np.dot(XNt, XN) - - if algorithm is 'lstsq': - r = _np.linalg.lstsq(X, b) - coeffs = r[0][:, 0] - else: - r = _np.linalg.solve(X, b) - coeffs = r[:, 0] # finds maximum diff and its base value - y_fitted = _np.dot(XN, coeffs) + y_fitted = _np.polynomial.polynomial.polyval(x, coef) - y_diff = abs(y_fitted - y_[:, 0]) ? ------- + y_diff = abs(y_fitted - y) + idx = _np.argmax(y_diff) - max_error = max(y_diff) - idx = [i for i, value in enumerate(y_diff) if value == max_error] - base_value = y_[idx[0], 0] - return (coeffs, (max_error, base_value)) + coeffs = coef[monomials] + return (coeffs, (y_diff[idx], y[idx]))
c08013dc2fc32582e8636d84be3e2f68dafe11a0
controller.py
controller.py
"""NM-Controller accounts are used to provide secure access to the XMLRPC API. They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server.""" import accounts import logger import tools class Controller(accounts.Account): SHELL = '/usr/bin/forward_api_calls' # tunneling shell TYPE = 'controller' @staticmethod def create(name, vref = None): add_shell(Delegate.SHELL) logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Delegate.SHELL, name) @staticmethod def destroy(name): logger.log_call('/usr/sbin/userdel', '-r', name) def add_shell(shell): """Add <shell> to /etc/shells if it's not already there.""" etc_shells = open('/etc/shells') valid_shells = etc_shells.read().split() etc_shells.close() if shell not in valid_shells: etc_shells = open('/etc/shells', 'a') print >>etc_shells, shell etc_shells.close()
"""NM-Controller accounts are used to provide secure access to the XMLRPC API. They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server.""" import accounts import logger import tools class Controller(accounts.Account): SHELL = '/usr/bin/forward_api_calls' # tunneling shell TYPE = 'controller' @staticmethod def create(name, vref = None): add_shell(Controller.SHELL) logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Controller.SHELL, name) @staticmethod def destroy(name): logger.log_call('/usr/sbin/userdel', '-r', name) def add_shell(shell): """Add <shell> to /etc/shells if it's not already there.""" etc_shells = open('/etc/shells') valid_shells = etc_shells.read().split() etc_shells.close() if shell not in valid_shells: etc_shells = open('/etc/shells', 'a') print >>etc_shells, shell etc_shells.close()
Change to Controller from Delegate shell
Change to Controller from Delegate shell
Python
bsd-3-clause
dreibh/planetlab-lxc-nodemanager,planetlab/NodeManager,planetlab/NodeManager,planetlab/NodeManager,dreibh/planetlab-lxc-nodemanager,planetlab/NodeManager,dreibh/planetlab-lxc-nodemanager
"""NM-Controller accounts are used to provide secure access to the XMLRPC API. They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server.""" import accounts import logger import tools class Controller(accounts.Account): SHELL = '/usr/bin/forward_api_calls' # tunneling shell TYPE = 'controller' @staticmethod def create(name, vref = None): - add_shell(Delegate.SHELL) + add_shell(Controller.SHELL) - logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Delegate.SHELL, name) + logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Controller.SHELL, name) @staticmethod def destroy(name): logger.log_call('/usr/sbin/userdel', '-r', name) def add_shell(shell): """Add <shell> to /etc/shells if it's not already there.""" etc_shells = open('/etc/shells') valid_shells = etc_shells.read().split() etc_shells.close() if shell not in valid_shells: etc_shells = open('/etc/shells', 'a') print >>etc_shells, shell etc_shells.close()
Change to Controller from Delegate shell
## Code Before: """NM-Controller accounts are used to provide secure access to the XMLRPC API. They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server.""" import accounts import logger import tools class Controller(accounts.Account): SHELL = '/usr/bin/forward_api_calls' # tunneling shell TYPE = 'controller' @staticmethod def create(name, vref = None): add_shell(Delegate.SHELL) logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Delegate.SHELL, name) @staticmethod def destroy(name): logger.log_call('/usr/sbin/userdel', '-r', name) def add_shell(shell): """Add <shell> to /etc/shells if it's not already there.""" etc_shells = open('/etc/shells') valid_shells = etc_shells.read().split() etc_shells.close() if shell not in valid_shells: etc_shells = open('/etc/shells', 'a') print >>etc_shells, shell etc_shells.close() ## Instruction: Change to Controller from Delegate shell ## Code After: """NM-Controller accounts are used to provide secure access to the XMLRPC API. They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server.""" import accounts import logger import tools class Controller(accounts.Account): SHELL = '/usr/bin/forward_api_calls' # tunneling shell TYPE = 'controller' @staticmethod def create(name, vref = None): add_shell(Controller.SHELL) logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Controller.SHELL, name) @staticmethod def destroy(name): logger.log_call('/usr/sbin/userdel', '-r', name) def add_shell(shell): """Add <shell> to /etc/shells if it's not already there.""" etc_shells = open('/etc/shells') valid_shells = etc_shells.read().split() etc_shells.close() if shell not in valid_shells: etc_shells = open('/etc/shells', 'a') print >>etc_shells, shell etc_shells.close()
"""NM-Controller accounts are used to provide secure access to the XMLRPC API. They are normal Unix accounts with a shell that tunnels XMLRPC requests to the API server.""" import accounts import logger import tools class Controller(accounts.Account): SHELL = '/usr/bin/forward_api_calls' # tunneling shell TYPE = 'controller' @staticmethod def create(name, vref = None): - add_shell(Delegate.SHELL) ? ^^ ^^^^ + add_shell(Controller.SHELL) ? ^^^^^^^ ^ - logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Delegate.SHELL, name) ? ^^ ^^^^ + logger.log_call('/usr/sbin/useradd', '-p', '*', '-s', Controller.SHELL, name) ? ^^^^^^^ ^ @staticmethod def destroy(name): logger.log_call('/usr/sbin/userdel', '-r', name) def add_shell(shell): """Add <shell> to /etc/shells if it's not already there.""" etc_shells = open('/etc/shells') valid_shells = etc_shells.read().split() etc_shells.close() if shell not in valid_shells: etc_shells = open('/etc/shells', 'a') print >>etc_shells, shell etc_shells.close()
65c712464813ba41b564aa3e0116e60805f6681e
storyboard/api/v1/system_info.py
storyboard/api/v1/system_info.py
from oslo_config import cfg from pbr.version import VersionInfo from pecan import rest from pecan.secure import secure from storyboard.api.auth import authorization_checks as checks from storyboard.api.v1 import wmodels import wsmeext.pecan as wsme_pecan CONF = cfg.CONF class SystemInfoController(rest.RestController): """REST controller for sysinfo endpoint. Provides Get methods for System information. """ @secure(checks.guest) @wsme_pecan.wsexpose(wmodels.SystemInfo) def get(self): """Retrieve the Storyboard system information. """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
from oslo_config import cfg from pbr.version import VersionInfo from pecan import rest from pecan.secure import secure from storyboard.api.auth import authorization_checks as checks from storyboard.api.v1 import wmodels import wsmeext.pecan as wsme_pecan CONF = cfg.CONF class SystemInfoController(rest.RestController): """REST controller for sysinfo endpoint. Provides Get methods for System information. """ @secure(checks.guest) @wsme_pecan.wsexpose(wmodels.SystemInfo) def get(self): """Retrieve the Storyboard system information. Example:: curl https://my.example.org/api/v1/systeminfo """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
Add example commands for the Systeminfo api
Add example commands for the Systeminfo api Currently the api documentation does not include example commands. It would be very friendly for our users to have some example commands to follow and use the api. This patch adds examples to the Systeminfo section of the api documentation. Change-Id: Ic3d56d207db696100754a5a1fd5764f2f3f0a7f3
Python
apache-2.0
ColdrickSotK/storyboard,ColdrickSotK/storyboard,ColdrickSotK/storyboard
from oslo_config import cfg from pbr.version import VersionInfo from pecan import rest from pecan.secure import secure from storyboard.api.auth import authorization_checks as checks from storyboard.api.v1 import wmodels import wsmeext.pecan as wsme_pecan CONF = cfg.CONF class SystemInfoController(rest.RestController): """REST controller for sysinfo endpoint. Provides Get methods for System information. """ @secure(checks.guest) @wsme_pecan.wsexpose(wmodels.SystemInfo) def get(self): """Retrieve the Storyboard system information. + + Example:: + + curl https://my.example.org/api/v1/systeminfo + """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
Add example commands for the Systeminfo api
## Code Before: from oslo_config import cfg from pbr.version import VersionInfo from pecan import rest from pecan.secure import secure from storyboard.api.auth import authorization_checks as checks from storyboard.api.v1 import wmodels import wsmeext.pecan as wsme_pecan CONF = cfg.CONF class SystemInfoController(rest.RestController): """REST controller for sysinfo endpoint. Provides Get methods for System information. """ @secure(checks.guest) @wsme_pecan.wsexpose(wmodels.SystemInfo) def get(self): """Retrieve the Storyboard system information. """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string()) ## Instruction: Add example commands for the Systeminfo api ## Code After: from oslo_config import cfg from pbr.version import VersionInfo from pecan import rest from pecan.secure import secure from storyboard.api.auth import authorization_checks as checks from storyboard.api.v1 import wmodels import wsmeext.pecan as wsme_pecan CONF = cfg.CONF class SystemInfoController(rest.RestController): """REST controller for sysinfo endpoint. Provides Get methods for System information. """ @secure(checks.guest) @wsme_pecan.wsexpose(wmodels.SystemInfo) def get(self): """Retrieve the Storyboard system information. Example:: curl https://my.example.org/api/v1/systeminfo """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
from oslo_config import cfg from pbr.version import VersionInfo from pecan import rest from pecan.secure import secure from storyboard.api.auth import authorization_checks as checks from storyboard.api.v1 import wmodels import wsmeext.pecan as wsme_pecan CONF = cfg.CONF class SystemInfoController(rest.RestController): """REST controller for sysinfo endpoint. Provides Get methods for System information. """ @secure(checks.guest) @wsme_pecan.wsexpose(wmodels.SystemInfo) def get(self): """Retrieve the Storyboard system information. + + Example:: + + curl https://my.example.org/api/v1/systeminfo + """ sb_ver = VersionInfo('storyboard') return wmodels.SystemInfo(version=sb_ver.version_string())
3b0608e11da620f1e12aeb270dbaf2f255a35cec
Cura/Qt/Bindings/ControllerProxy.py
Cura/Qt/Bindings/ControllerProxy.py
from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl from Cura.Application import Application from Cura.Scene.SceneNode import SceneNode class ControllerProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._controller = Application.getInstance().getController() @pyqtSlot(str) def setActiveView(self, view): self._controller.setActiveView(view) @pyqtSlot(str) def setActiveTool(self, tool): self._controller.setActiveTool(tool) @pyqtSlot(QUrl) def addMesh(self, file_name): if not file_name.isValid(): return mesh = SceneNode(self._controller.getScene().getRoot()) app = Application.getInstance() mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local')))
from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl from Cura.Application import Application from Cura.Scene.SceneNode import SceneNode from Cura.Scene.BoxRenderer import BoxRenderer class ControllerProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._controller = Application.getInstance().getController() @pyqtSlot(str) def setActiveView(self, view): self._controller.setActiveView(view) @pyqtSlot(str) def setActiveTool(self, tool): self._controller.setActiveTool(tool) @pyqtSlot(QUrl) def addMesh(self, file_name): if not file_name.isValid(): return mesh = SceneNode(self._controller.getScene().getRoot()) app = Application.getInstance() mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local'))) box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
Add a (temporary) bounding box around an added mesh
Add a (temporary) bounding box around an added mesh
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl from Cura.Application import Application from Cura.Scene.SceneNode import SceneNode + from Cura.Scene.BoxRenderer import BoxRenderer class ControllerProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._controller = Application.getInstance().getController() @pyqtSlot(str) def setActiveView(self, view): self._controller.setActiveView(view) @pyqtSlot(str) def setActiveTool(self, tool): self._controller.setActiveTool(tool) @pyqtSlot(QUrl) def addMesh(self, file_name): if not file_name.isValid(): return mesh = SceneNode(self._controller.getScene().getRoot()) app = Application.getInstance() mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local'))) + box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
Add a (temporary) bounding box around an added mesh
## Code Before: from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl from Cura.Application import Application from Cura.Scene.SceneNode import SceneNode class ControllerProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._controller = Application.getInstance().getController() @pyqtSlot(str) def setActiveView(self, view): self._controller.setActiveView(view) @pyqtSlot(str) def setActiveTool(self, tool): self._controller.setActiveTool(tool) @pyqtSlot(QUrl) def addMesh(self, file_name): if not file_name.isValid(): return mesh = SceneNode(self._controller.getScene().getRoot()) app = Application.getInstance() mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local'))) ## Instruction: Add a (temporary) bounding box around an added mesh ## Code After: from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl from Cura.Application import Application from Cura.Scene.SceneNode import SceneNode from Cura.Scene.BoxRenderer import BoxRenderer class ControllerProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._controller = Application.getInstance().getController() @pyqtSlot(str) def setActiveView(self, view): self._controller.setActiveView(view) @pyqtSlot(str) def setActiveTool(self, tool): self._controller.setActiveTool(tool) @pyqtSlot(QUrl) def addMesh(self, file_name): if not file_name.isValid(): return mesh = SceneNode(self._controller.getScene().getRoot()) app = Application.getInstance() mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local'))) box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
from PyQt5.QtCore import QObject, QCoreApplication, pyqtSlot, QUrl from Cura.Application import Application from Cura.Scene.SceneNode import SceneNode + from Cura.Scene.BoxRenderer import BoxRenderer class ControllerProxy(QObject): def __init__(self, parent = None): super().__init__(parent) self._controller = Application.getInstance().getController() @pyqtSlot(str) def setActiveView(self, view): self._controller.setActiveView(view) @pyqtSlot(str) def setActiveTool(self, tool): self._controller.setActiveTool(tool) @pyqtSlot(QUrl) def addMesh(self, file_name): if not file_name.isValid(): return mesh = SceneNode(self._controller.getScene().getRoot()) app = Application.getInstance() mesh.setMeshData(app.getMeshFileHandler().read(file_name.toLocalFile(), app.getStorageDevice('local'))) + box = BoxRenderer(mesh.getBoundingBox(), self._controller.getScene().getRoot())
fd061738d025b5371c1415a1f5466bcf5f6476b7
py2deb/config/__init__.py
py2deb/config/__init__.py
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. PKG_REPO = '/tmp/'
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. if os.getuid() == 0: PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' else: PKG_REPO = '/tmp'
Make it work out of the box on the build-server and locally
Make it work out of the box on the build-server and locally
Python
mit
paylogic/py2deb,paylogic/py2deb
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. + if os.getuid() == 0: + PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' + else: - PKG_REPO = '/tmp/' + PKG_REPO = '/tmp'
Make it work out of the box on the build-server and locally
## Code Before: import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. PKG_REPO = '/tmp/' ## Instruction: Make it work out of the box on the build-server and locally ## Code After: import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. if os.getuid() == 0: PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' else: PKG_REPO = '/tmp'
import os config_dir = os.path.dirname(os.path.abspath(__file__)) # Destination of built packages. + if os.getuid() == 0: + PKG_REPO = '/var/repos/deb-repo/repository/pl-py2deb' + else: - PKG_REPO = '/tmp/' ? - + PKG_REPO = '/tmp' ? ++++
ebbc68da19755097b2131d60bc9757ecb4dc6d4c
bundles/auth/models/token.py
bundles/auth/models/token.py
import hashlib import random import string from ext.aboard.model import * def set_value(token): """Randomly create and return a value.""" value = str(token.user) + "_" + str(token.timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True, default=set_value)
import hashlib import random import string from ext.aboard.model import * class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True) def __init__(self, user=None, timestamp=None): value = None if user and timestamp: value = Token.get_token_value(user, timestamp) Model.__init__(self, user=user, timestamp=timestamp, value=value) @staticmethod def get_token_value(user, timestamp): """Randomly create and return a token value.""" value = str(user) + "_" + str(timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value
Use the Model constructor to generate a default value
[user] Use the Model constructor to generate a default value
Python
bsd-3-clause
v-legoff/pa-poc2,v-legoff/pa-poc2
import hashlib import random import string from ext.aboard.model import * - - def set_value(token): - """Randomly create and return a value.""" - value = str(token.user) + "_" + str(token.timestamp) - len_rand = random.randint(20, 40) - to_pick = string.digits + string.ascii_letters + \ - "_-+^$" - for i in range(len_rand): - value += random.choice(to_pick) - - print("Private value", value) - # Hash the value - hashed = hashlib.sha512(value.encode()) - value = hashed.hexdigest() - print("Public value", value) - return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() - value = String(pkey=True, default=set_value) + value = String(pkey=True) + + def __init__(self, user=None, timestamp=None): + value = None + if user and timestamp: + value = Token.get_token_value(user, timestamp) + + Model.__init__(self, user=user, timestamp=timestamp, value=value) + + @staticmethod + def get_token_value(user, timestamp): + """Randomly create and return a token value.""" + value = str(user) + "_" + str(timestamp) + len_rand = random.randint(20, 40) + to_pick = string.digits + string.ascii_letters + \ + "_-+^$" + for i in range(len_rand): + value += random.choice(to_pick) + + print("Private value", value) + + # Hash the value + hashed = hashlib.sha512(value.encode()) + value = hashed.hexdigest() + print("Public value", value) + return value
Use the Model constructor to generate a default value
## Code Before: import hashlib import random import string from ext.aboard.model import * def set_value(token): """Randomly create and return a value.""" value = str(token.user) + "_" + str(token.timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True, default=set_value) ## Instruction: Use the Model constructor to generate a default value ## Code After: import hashlib import random import string from ext.aboard.model import * class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() value = String(pkey=True) def __init__(self, user=None, timestamp=None): value = None if user and timestamp: value = Token.get_token_value(user, timestamp) Model.__init__(self, user=user, timestamp=timestamp, value=value) @staticmethod def get_token_value(user, timestamp): """Randomly create and return a token value.""" value = str(user) + "_" + str(timestamp) len_rand = random.randint(20, 40) to_pick = string.digits + string.ascii_letters + \ "_-+^$" for i in range(len_rand): value += random.choice(to_pick) print("Private value", value) # Hash the value hashed = hashlib.sha512(value.encode()) value = hashed.hexdigest() print("Public value", value) return value
import hashlib import random import string from ext.aboard.model import * - - def set_value(token): - """Randomly create and return a value.""" - value = str(token.user) + "_" + str(token.timestamp) - len_rand = random.randint(20, 40) - to_pick = string.digits + string.ascii_letters + \ - "_-+^$" - for i in range(len_rand): - value += random.choice(to_pick) - - print("Private value", value) - # Hash the value - hashed = hashlib.sha512(value.encode()) - value = hashed.hexdigest() - print("Public value", value) - return value class Token(Model): """A token model.""" id = None user = Integer() timestamp = Integer() - value = String(pkey=True, default=set_value) ? ------------------- + value = String(pkey=True) + + def __init__(self, user=None, timestamp=None): + value = None + if user and timestamp: + value = Token.get_token_value(user, timestamp) + + Model.__init__(self, user=user, timestamp=timestamp, value=value) + + @staticmethod + def get_token_value(user, timestamp): + """Randomly create and return a token value.""" + value = str(user) + "_" + str(timestamp) + len_rand = random.randint(20, 40) + to_pick = string.digits + string.ascii_letters + \ + "_-+^$" + for i in range(len_rand): + value += random.choice(to_pick) + + print("Private value", value) + + # Hash the value + hashed = hashlib.sha512(value.encode()) + value = hashed.hexdigest() + print("Public value", value) + return value
bb229be50e37bb710c32541cec7b159da9508335
tests/functional/subcommands/test_subcommands.py
tests/functional/subcommands/test_subcommands.py
import subprocess def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
import sys import pytest import subprocess if (3, 6) <= sys.version_info < (3, 8): pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
Add workaround for Travis CI problems
Add workaround for Travis CI problems
Python
mit
igordejanovic/textX,igordejanovic/textX,igordejanovic/textX
+ import sys + import pytest import subprocess + + + if (3, 6) <= sys.version_info < (3, 8): + pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
Add workaround for Travis CI problems
## Code Before: import subprocess def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output ## Instruction: Add workaround for Travis CI problems ## Code After: import sys import pytest import subprocess if (3, 6) <= sys.version_info < (3, 8): pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
+ import sys + import pytest import subprocess + + + if (3, 6) <= sys.version_info < (3, 8): + pytest.skip("Temporary workaround for Travis problems", allow_module_level=True) def test_subcommand(): """ Test that a command from the example project is registered. """ output = subprocess.check_output(['textx'], stderr=subprocess.STDOUT) assert b'testcommand' in output def test_subcommand_group(): """ Test that a command group is registered. """ output = subprocess.check_output(['textx', 'testgroup'], stderr=subprocess.STDOUT) assert b'groupcommand1' in output assert b'groupcommand2' in output
5d5b59bde655fbeb2d07bd5539c2ff9b29879d1d
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
pythontutorials/books/AutomateTheBoringStuff/Ch14/P2_writeCSV.py
import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close()
def main(): import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() if __name__ == '__main__': main()
Update P1_writeCSV.py added docstring and wrapped in main function
Update P1_writeCSV.py added docstring and wrapped in main function
Python
mit
JoseALermaIII/python-tutorials,JoseALermaIII/python-tutorials
- import csv + def main(): + import csv - # Writer Objects - outputFile = open("output.csv", "w", newline='') - outputWriter = csv.writer(outputFile) - print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) - print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) - print(outputWriter.writerow([1, 2, 3.141592, 4])) - outputFile.close() + # Writer Objects + outputFile = open("output.csv", "w", newline='') + outputWriter = csv.writer(outputFile) - # Delimiter and lineterminator Keyword Arguments - csvFile = open("example.tsv", 'w', newline='') - csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') - print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) - print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) + print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) - print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) - csvFile.close() + print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) + print(outputWriter.writerow([1, 2, 3.141592, 4])) + outputFile.close() + # Delimiter and lineterminator Keyword Arguments + csvFile = open("example.tsv", 'w', newline='') + csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') + print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) + print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) + print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) + csvFile.close() + + + if __name__ == '__main__': + main() +
Update P1_writeCSV.py added docstring and wrapped in main function
## Code Before: import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() ## Instruction: Update P1_writeCSV.py added docstring and wrapped in main function ## Code After: def main(): import csv # Writer Objects outputFile = open("output.csv", "w", newline='') outputWriter = csv.writer(outputFile) print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) print(outputWriter.writerow([1, 2, 3.141592, 4])) outputFile.close() # Delimiter and lineterminator Keyword Arguments csvFile = open("example.tsv", 'w', newline='') csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) csvFile.close() if __name__ == '__main__': main()
- import csv + def main(): + import csv - # Writer Objects - outputFile = open("output.csv", "w", newline='') - outputWriter = csv.writer(outputFile) - print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) - print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) - print(outputWriter.writerow([1, 2, 3.141592, 4])) - outputFile.close() + # Writer Objects + outputFile = open("output.csv", "w", newline='') + outputWriter = csv.writer(outputFile) + print(outputWriter.writerow(['spam', 'eggs', 'bacon', 'ham'])) + print(outputWriter.writerow(['Hello, world!', 'eggs', 'bacon', 'ham'])) + print(outputWriter.writerow([1, 2, 3.141592, 4])) + outputFile.close() + - # Delimiter and lineterminator Keyword Arguments + # Delimiter and lineterminator Keyword Arguments ? ++++ - csvFile = open("example.tsv", 'w', newline='') + csvFile = open("example.tsv", 'w', newline='') ? ++++ - csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') + csvWriter = csv.writer(csvFile, delimiter='\t', lineterminator='\n\n') ? ++++ - print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) + print(csvWriter.writerow(['apples', 'oranges', 'grapes'])) ? ++++ - print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) + print(csvWriter.writerow(['eggs', 'bacon', 'ham'])) ? ++++ - print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) + print(csvWriter.writerow(['spam', 'spam', 'spam', 'spam', 'spam', 'spam'])) ? ++++ - csvFile.close() + csvFile.close() ? ++++ + + + if __name__ == '__main__': + main()
01b9d47976dd68a3fe9ae2656e423a03edb5187d
examples/source.py
examples/source.py
"""Test client that connects and sends infinite data.""" import sys from tulip import * def dprint(*args): print('source:', *args, file=sys.stderr) class Client(Protocol): data = b'x'*16*1024 def connection_made(self, tr): dprint('connecting to', tr.get_extra_info('addr')) self.tr = tr self.lost = False self.loop = get_event_loop() self.waiter = Future() self.write_some_data() def write_some_data(self): dprint('writing', len(self.data), 'bytes') self.tr.write(self.data) if not self.lost: self.loop.call_soon(self.write_some_data) def connection_lost(self, exc): dprint('lost connection', repr(exc)) self.lost = True self.waiter.set_result(None) @coroutine def start(loop): tr, pr = yield from loop.create_connection(Client, 'localhost', 1111) dprint('tr =', tr) dprint('pr =', pr) res = yield from pr.waiter return res def main(): loop = get_event_loop() loop.run_until_complete(start(loop)) if __name__ == '__main__': main()
"""Test client that connects and sends infinite data.""" import sys from tulip import * def dprint(*args): print('source:', *args, file=sys.stderr) class Client(Protocol): data = b'x'*16*1024 def connection_made(self, tr): dprint('connecting to', tr.get_extra_info('addr')) self.tr = tr self.lost = False self.loop = get_event_loop() self.waiter = Future() self.write_some_data() def write_some_data(self): if self.lost: dprint('lost already') return dprint('writing', len(self.data), 'bytes') self.tr.write(self.data) self.loop.call_soon(self.write_some_data) def connection_lost(self, exc): dprint('lost connection', repr(exc)) self.lost = True self.waiter.set_result(None) @coroutine def start(loop): tr, pr = yield from loop.create_connection(Client, 'localhost', 1111) dprint('tr =', tr) dprint('pr =', pr) res = yield from pr.waiter return res def main(): loop = get_event_loop() loop.run_until_complete(start(loop)) if __name__ == '__main__': main()
Fix logic around lost connection.
Fix logic around lost connection.
Python
apache-2.0
leetreveil/tulip,overcastcloud/trollius,leetreveil/tulip,overcastcloud/trollius,leetreveil/tulip,overcastcloud/trollius
"""Test client that connects and sends infinite data.""" import sys from tulip import * def dprint(*args): print('source:', *args, file=sys.stderr) class Client(Protocol): data = b'x'*16*1024 def connection_made(self, tr): dprint('connecting to', tr.get_extra_info('addr')) self.tr = tr self.lost = False self.loop = get_event_loop() self.waiter = Future() self.write_some_data() def write_some_data(self): + if self.lost: + dprint('lost already') + return dprint('writing', len(self.data), 'bytes') self.tr.write(self.data) - if not self.lost: - self.loop.call_soon(self.write_some_data) + self.loop.call_soon(self.write_some_data) def connection_lost(self, exc): dprint('lost connection', repr(exc)) self.lost = True self.waiter.set_result(None) @coroutine def start(loop): tr, pr = yield from loop.create_connection(Client, 'localhost', 1111) dprint('tr =', tr) dprint('pr =', pr) res = yield from pr.waiter return res def main(): loop = get_event_loop() loop.run_until_complete(start(loop)) if __name__ == '__main__': main()
Fix logic around lost connection.
## Code Before: """Test client that connects and sends infinite data.""" import sys from tulip import * def dprint(*args): print('source:', *args, file=sys.stderr) class Client(Protocol): data = b'x'*16*1024 def connection_made(self, tr): dprint('connecting to', tr.get_extra_info('addr')) self.tr = tr self.lost = False self.loop = get_event_loop() self.waiter = Future() self.write_some_data() def write_some_data(self): dprint('writing', len(self.data), 'bytes') self.tr.write(self.data) if not self.lost: self.loop.call_soon(self.write_some_data) def connection_lost(self, exc): dprint('lost connection', repr(exc)) self.lost = True self.waiter.set_result(None) @coroutine def start(loop): tr, pr = yield from loop.create_connection(Client, 'localhost', 1111) dprint('tr =', tr) dprint('pr =', pr) res = yield from pr.waiter return res def main(): loop = get_event_loop() loop.run_until_complete(start(loop)) if __name__ == '__main__': main() ## Instruction: Fix logic around lost connection. ## Code After: """Test client that connects and sends infinite data.""" import sys from tulip import * def dprint(*args): print('source:', *args, file=sys.stderr) class Client(Protocol): data = b'x'*16*1024 def connection_made(self, tr): dprint('connecting to', tr.get_extra_info('addr')) self.tr = tr self.lost = False self.loop = get_event_loop() self.waiter = Future() self.write_some_data() def write_some_data(self): if self.lost: dprint('lost already') return dprint('writing', len(self.data), 'bytes') self.tr.write(self.data) self.loop.call_soon(self.write_some_data) def connection_lost(self, exc): dprint('lost connection', repr(exc)) self.lost = True self.waiter.set_result(None) @coroutine def start(loop): tr, pr = yield from loop.create_connection(Client, 'localhost', 1111) dprint('tr =', tr) dprint('pr =', pr) res = yield from pr.waiter return res def main(): loop = get_event_loop() loop.run_until_complete(start(loop)) if __name__ == '__main__': main()
"""Test client that connects and sends infinite data.""" import sys from tulip import * def dprint(*args): print('source:', *args, file=sys.stderr) class Client(Protocol): data = b'x'*16*1024 def connection_made(self, tr): dprint('connecting to', tr.get_extra_info('addr')) self.tr = tr self.lost = False self.loop = get_event_loop() self.waiter = Future() self.write_some_data() def write_some_data(self): + if self.lost: + dprint('lost already') + return dprint('writing', len(self.data), 'bytes') self.tr.write(self.data) - if not self.lost: - self.loop.call_soon(self.write_some_data) ? ---- + self.loop.call_soon(self.write_some_data) def connection_lost(self, exc): dprint('lost connection', repr(exc)) self.lost = True self.waiter.set_result(None) @coroutine def start(loop): tr, pr = yield from loop.create_connection(Client, 'localhost', 1111) dprint('tr =', tr) dprint('pr =', pr) res = yield from pr.waiter return res def main(): loop = get_event_loop() loop.run_until_complete(start(loop)) if __name__ == '__main__': main()
3395943d4c202709c2f1f110e19a2aa0dc741e63
UM/Qt/Bindings/DirectoryListModel.py
UM/Qt/Bindings/DirectoryListModel.py
from UM.Qt.ListModel import ListModel from UM.Application import Application from PyQt5.QtCore import Qt, pyqtProperty, pyqtSignal, QUrl import os import os.path class DirectoryListModel(ListModel): NameRole = Qt.UserRole + 1 UrlRole = Qt.UserRole + 2 def __init__(self): super().__init__() self.addRoleName(self.NameRole, 'name') self.addRoleName(self.UrlRole, 'url') self._directory = None directoryChanged = pyqtSignal() def getDirectory(self): return self._directory def setDirectory(self, path): if path != self._directory: if path.startswith('file://'): path = path[7:] self._directory = os.path.dirname(path) self.clear() extensions = Application.getInstance().getMeshFileHandler().getSupportedFileTypesRead() for entry in os.listdir(self._directory): if os.path.splitext(entry)[1] in extensions: self.appendItem({ 'name': os.path.basename(entry), 'url': QUrl.fromLocalFile(os.path.join(self._directory, entry)) }) self.sort(lambda e: e['name']) directory = pyqtProperty(str, fget = getDirectory, fset = setDirectory, notify = directoryChanged)
from UM.Qt.ListModel import ListModel from UM.Application import Application from PyQt5.QtCore import Qt, pyqtProperty, pyqtSignal, QUrl import os import os.path import platform class DirectoryListModel(ListModel): NameRole = Qt.UserRole + 1 UrlRole = Qt.UserRole + 2 def __init__(self): super().__init__() self.addRoleName(self.NameRole, 'name') self.addRoleName(self.UrlRole, 'url') self._directory = None directoryChanged = pyqtSignal() def getDirectory(self): return self._directory def setDirectory(self, path): if path != self._directory: if path.startswith('file://'): if platform.system() == "Windows" and path.startswith('file:///'): path = path[8:] else: path = path[7:] self._directory = os.path.dirname(path) self.clear() extensions = Application.getInstance().getMeshFileHandler().getSupportedFileTypesRead() for entry in os.listdir(self._directory): if os.path.splitext(entry)[1] in extensions: self.appendItem({ 'name': os.path.basename(entry), 'url': QUrl.fromLocalFile(os.path.join(self._directory, entry)) }) self.sort(lambda e: e['name']) directory = pyqtProperty(str, fget = getDirectory, fset = setDirectory, notify = directoryChanged)
Fix directory listing on windows.
Fix directory listing on windows.
Python
agpl-3.0
onitake/Uranium,onitake/Uranium
from UM.Qt.ListModel import ListModel from UM.Application import Application from PyQt5.QtCore import Qt, pyqtProperty, pyqtSignal, QUrl import os import os.path + import platform class DirectoryListModel(ListModel): NameRole = Qt.UserRole + 1 UrlRole = Qt.UserRole + 2 def __init__(self): super().__init__() self.addRoleName(self.NameRole, 'name') self.addRoleName(self.UrlRole, 'url') self._directory = None directoryChanged = pyqtSignal() def getDirectory(self): return self._directory def setDirectory(self, path): if path != self._directory: if path.startswith('file://'): + if platform.system() == "Windows" and path.startswith('file:///'): + path = path[8:] + else: - path = path[7:] + path = path[7:] self._directory = os.path.dirname(path) self.clear() extensions = Application.getInstance().getMeshFileHandler().getSupportedFileTypesRead() for entry in os.listdir(self._directory): if os.path.splitext(entry)[1] in extensions: self.appendItem({ 'name': os.path.basename(entry), 'url': QUrl.fromLocalFile(os.path.join(self._directory, entry)) }) self.sort(lambda e: e['name']) directory = pyqtProperty(str, fget = getDirectory, fset = setDirectory, notify = directoryChanged)
Fix directory listing on windows.
## Code Before: from UM.Qt.ListModel import ListModel from UM.Application import Application from PyQt5.QtCore import Qt, pyqtProperty, pyqtSignal, QUrl import os import os.path class DirectoryListModel(ListModel): NameRole = Qt.UserRole + 1 UrlRole = Qt.UserRole + 2 def __init__(self): super().__init__() self.addRoleName(self.NameRole, 'name') self.addRoleName(self.UrlRole, 'url') self._directory = None directoryChanged = pyqtSignal() def getDirectory(self): return self._directory def setDirectory(self, path): if path != self._directory: if path.startswith('file://'): path = path[7:] self._directory = os.path.dirname(path) self.clear() extensions = Application.getInstance().getMeshFileHandler().getSupportedFileTypesRead() for entry in os.listdir(self._directory): if os.path.splitext(entry)[1] in extensions: self.appendItem({ 'name': os.path.basename(entry), 'url': QUrl.fromLocalFile(os.path.join(self._directory, entry)) }) self.sort(lambda e: e['name']) directory = pyqtProperty(str, fget = getDirectory, fset = setDirectory, notify = directoryChanged) ## Instruction: Fix directory listing on windows. ## Code After: from UM.Qt.ListModel import ListModel from UM.Application import Application from PyQt5.QtCore import Qt, pyqtProperty, pyqtSignal, QUrl import os import os.path import platform class DirectoryListModel(ListModel): NameRole = Qt.UserRole + 1 UrlRole = Qt.UserRole + 2 def __init__(self): super().__init__() self.addRoleName(self.NameRole, 'name') self.addRoleName(self.UrlRole, 'url') self._directory = None directoryChanged = pyqtSignal() def getDirectory(self): return self._directory def setDirectory(self, path): if path != self._directory: if path.startswith('file://'): if platform.system() == "Windows" and path.startswith('file:///'): path = path[8:] else: path = path[7:] self._directory = os.path.dirname(path) self.clear() extensions = Application.getInstance().getMeshFileHandler().getSupportedFileTypesRead() for entry in os.listdir(self._directory): if os.path.splitext(entry)[1] in extensions: self.appendItem({ 'name': os.path.basename(entry), 'url': QUrl.fromLocalFile(os.path.join(self._directory, entry)) }) self.sort(lambda e: e['name']) directory = pyqtProperty(str, fget = getDirectory, fset = setDirectory, notify = directoryChanged)
from UM.Qt.ListModel import ListModel from UM.Application import Application from PyQt5.QtCore import Qt, pyqtProperty, pyqtSignal, QUrl import os import os.path + import platform class DirectoryListModel(ListModel): NameRole = Qt.UserRole + 1 UrlRole = Qt.UserRole + 2 def __init__(self): super().__init__() self.addRoleName(self.NameRole, 'name') self.addRoleName(self.UrlRole, 'url') self._directory = None directoryChanged = pyqtSignal() def getDirectory(self): return self._directory def setDirectory(self, path): if path != self._directory: if path.startswith('file://'): + if platform.system() == "Windows" and path.startswith('file:///'): + path = path[8:] + else: - path = path[7:] + path = path[7:] ? ++++ self._directory = os.path.dirname(path) self.clear() extensions = Application.getInstance().getMeshFileHandler().getSupportedFileTypesRead() for entry in os.listdir(self._directory): if os.path.splitext(entry)[1] in extensions: self.appendItem({ 'name': os.path.basename(entry), 'url': QUrl.fromLocalFile(os.path.join(self._directory, entry)) }) self.sort(lambda e: e['name']) directory = pyqtProperty(str, fget = getDirectory, fset = setDirectory, notify = directoryChanged)
d1aa094c9b0988c12100c8300aae4b390bb276f8
zonetruck/__main__.py
zonetruck/__main__.py
import yaml from zonetruck.WorkManager import WorkManager from zonetruck.ZoneUpdater import ZoneUpdater from zonetruck.ZoneFilter import ZoneFilter from zonetruck.zone_xfer import zone_xfer def main(argv): config = yaml.safe_load(open(argv[1], 'r')) zone_filter = ZoneFilter(config['filter_rules']).filter zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']] subsequent_tasks = [[zone_filter], zone_updaters] work_manager = WorkManager() for source in config['sources']: for zone in source['zones']: work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks) work_manager.start() work_manager.join() if __name__ == '__main__': import sys main(sys.argv)
import yaml from zonetruck.WorkManager import WorkManager from zonetruck.ZoneUpdater import ZoneUpdater from zonetruck.ZoneFilter import ZoneFilter from zonetruck.zone_xfer import zone_xfer import sys def main(argv=None): argv = argv or sys.argv config = yaml.safe_load(open(argv[1], 'r')) zone_filter = ZoneFilter(config['filter_rules']).filter zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']] subsequent_tasks = [[zone_filter], zone_updaters] work_manager = WorkManager() for source in config['sources']: for zone in source['zones']: work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks) work_manager.start() work_manager.join() if __name__ == '__main__': main()
Make argv arguments optional, fixes pip installed script
Make argv arguments optional, fixes pip installed script
Python
mit
pv2b/zonetruck
import yaml from zonetruck.WorkManager import WorkManager from zonetruck.ZoneUpdater import ZoneUpdater from zonetruck.ZoneFilter import ZoneFilter from zonetruck.zone_xfer import zone_xfer + import sys - def main(argv): + def main(argv=None): + argv = argv or sys.argv config = yaml.safe_load(open(argv[1], 'r')) zone_filter = ZoneFilter(config['filter_rules']).filter zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']] subsequent_tasks = [[zone_filter], zone_updaters] work_manager = WorkManager() for source in config['sources']: for zone in source['zones']: work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks) work_manager.start() work_manager.join() if __name__ == '__main__': + main() - import sys - main(sys.argv)
Make argv arguments optional, fixes pip installed script
## Code Before: import yaml from zonetruck.WorkManager import WorkManager from zonetruck.ZoneUpdater import ZoneUpdater from zonetruck.ZoneFilter import ZoneFilter from zonetruck.zone_xfer import zone_xfer def main(argv): config = yaml.safe_load(open(argv[1], 'r')) zone_filter = ZoneFilter(config['filter_rules']).filter zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']] subsequent_tasks = [[zone_filter], zone_updaters] work_manager = WorkManager() for source in config['sources']: for zone in source['zones']: work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks) work_manager.start() work_manager.join() if __name__ == '__main__': import sys main(sys.argv) ## Instruction: Make argv arguments optional, fixes pip installed script ## Code After: import yaml from zonetruck.WorkManager import WorkManager from zonetruck.ZoneUpdater import ZoneUpdater from zonetruck.ZoneFilter import ZoneFilter from zonetruck.zone_xfer import zone_xfer import sys def main(argv=None): argv = argv or sys.argv config = yaml.safe_load(open(argv[1], 'r')) zone_filter = ZoneFilter(config['filter_rules']).filter zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']] subsequent_tasks = [[zone_filter], zone_updaters] work_manager = WorkManager() for source in config['sources']: for zone in source['zones']: work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks) work_manager.start() work_manager.join() if __name__ == '__main__': main()
import yaml from zonetruck.WorkManager import WorkManager from zonetruck.ZoneUpdater import ZoneUpdater from zonetruck.ZoneFilter import ZoneFilter from zonetruck.zone_xfer import zone_xfer + import sys - def main(argv): + def main(argv=None): ? +++++ + argv = argv or sys.argv config = yaml.safe_load(open(argv[1], 'r')) zone_filter = ZoneFilter(config['filter_rules']).filter zone_updaters = [ZoneUpdater(**o).task for o in config['outputs']] subsequent_tasks = [[zone_filter], zone_updaters] work_manager = WorkManager() for source in config['sources']: for zone in source['zones']: work_manager.submit_work(100, zone_xfer, (source['masters'], zone), subsequent_tasks) work_manager.start() work_manager.join() if __name__ == '__main__': + main() - import sys - main(sys.argv)
0eafac86c679689c77e371150c173c351d0aa926
appex_dump.py
appex_dump.py
import appex def main(): if appex.is_running_extension(): for func in (appex.get_attachments, appex.get_file_path, appex.get_file_paths, appex.get_image, appex.get_images, appex.get_text, appex.get_url, appex.get_urls, appex.get_vcard, appex.get_vcards): print('{:<11} : {}'.format(func.func_name.partition('_')[2], func())) if __name__ == '__main__': main()
import appex, inspect def main(): if appex.is_running_extension(): for name_func in inspect.getmembers(appex): name, func = name_func if name.startswith('get_'): # find all appex.get_xxx() methods print('{:<11} : {}'.format(name.partition('_')[2], func())) if __name__ == '__main__': main()
Use inspect to remove hardcoding of method names
Use inspect to remove hardcoding of method names
Python
apache-2.0
cclauss/Ten-lines-or-less
- import appex + import appex, inspect def main(): if appex.is_running_extension(): + for name_func in inspect.getmembers(appex): + name, func = name_func + if name.startswith('get_'): # find all appex.get_xxx() methods - for func in (appex.get_attachments, appex.get_file_path, - appex.get_file_paths, appex.get_image, appex.get_images, - appex.get_text, appex.get_url, appex.get_urls, - appex.get_vcard, appex.get_vcards): - print('{:<11} : {}'.format(func.func_name.partition('_')[2], func())) + print('{:<11} : {}'.format(name.partition('_')[2], func())) if __name__ == '__main__': main()
Use inspect to remove hardcoding of method names
## Code Before: import appex def main(): if appex.is_running_extension(): for func in (appex.get_attachments, appex.get_file_path, appex.get_file_paths, appex.get_image, appex.get_images, appex.get_text, appex.get_url, appex.get_urls, appex.get_vcard, appex.get_vcards): print('{:<11} : {}'.format(func.func_name.partition('_')[2], func())) if __name__ == '__main__': main() ## Instruction: Use inspect to remove hardcoding of method names ## Code After: import appex, inspect def main(): if appex.is_running_extension(): for name_func in inspect.getmembers(appex): name, func = name_func if name.startswith('get_'): # find all appex.get_xxx() methods print('{:<11} : {}'.format(name.partition('_')[2], func())) if __name__ == '__main__': main()
- import appex + import appex, inspect def main(): if appex.is_running_extension(): + for name_func in inspect.getmembers(appex): + name, func = name_func + if name.startswith('get_'): # find all appex.get_xxx() methods - for func in (appex.get_attachments, appex.get_file_path, - appex.get_file_paths, appex.get_image, appex.get_images, - appex.get_text, appex.get_url, appex.get_urls, - appex.get_vcard, appex.get_vcards): - print('{:<11} : {}'.format(func.func_name.partition('_')[2], func())) ? ---------- + print('{:<11} : {}'.format(name.partition('_')[2], func())) ? ++++ if __name__ == '__main__': main()
3f3818e4a21ffc4e1b8d4426093fc093396b5a5b
pandas_finance.py
pandas_finance.py
import datetime import scraperwiki import numpy import pandas.io.data as web def get_stock(stock, start, end, service): """ Return data frame of finance data for stock. Takes start and end datetimes, and service name of 'google' or 'yahoo'. """ return web.DataReader(stock, service, start, end) def parse_finance_frame(stock, start, end, service='google'): """ Return rows of dicts from a finance data frame for scraperwiki.sqlite. service can also be 'yahoo', start and end are datetimes. """ frame = get_stock(stock, start, end, service) rows = [] for idx in range(len(frame)): current_row_as_dict = frame.ix[idx].to_dict() # have to convert dates because these are Pandas timestamps and # dumptruck doesn't support them current_row_as_dict['Date'] = frame.index[idx].to_datetime() current_row_as_dict['Stock'] = stock # horrible hack because data values are numpy.float64 and dumptruck # doesn't support them for key in current_row_as_dict: if isinstance(current_row_as_dict[key], numpy.float64): current_row_as_dict[key] = float(current_row_as_dict[key]) rows.append(current_row_as_dict) return rows def main(): """ Dump stock data into scraperwiki.sqlite using pandas.io.data. """ # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() stock_list = ['TWTR', 'FB'] rows = [] for stock in stock_list: rows.extend(parse_finance_frame(stock, start, end)) scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) if __name__ == '__main__': main()
import datetime import sqlite3 import pandas.io.data as web import pandas.io.sql as sql def get_stock(stock, start, end): """ Return data frame of Yahoo Finance data for stock. Takes start and end datetimes. """ return web.DataReader(stock, 'yahoo', start, end) def scrape_stock(stock, start, end): sqlite_db.execute("drop table if exists {};".format(stock)) frame = (get_stock(stock, start, end)) # make Date not an index so it appears in table frame = frame.reset_index() # force Date datetime to string frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) sql.write_frame(frame, stock, sqlite_db) def main(): global sqlite_db sqlite_db = sqlite3.connect("scraperwiki.sqlite") start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() for ticker in ['TWTR', 'FB']: scrape_stock(ticker, start, end) if __name__ == '__main__': main()
Use pandas native saving by forcing date to not be index, and be string
Use pandas native saving by forcing date to not be index, and be string
Python
agpl-3.0
scraperwiki/stock-tool,scraperwiki/stock-tool
import datetime + import sqlite3 - - import scraperwiki - import numpy import pandas.io.data as web + import pandas.io.sql as sql - def get_stock(stock, start, end, service): + def get_stock(stock, start, end): """ - Return data frame of finance data for stock. + Return data frame of Yahoo Finance data for stock. - Takes start and end datetimes, and service name of 'google' or 'yahoo'. + Takes start and end datetimes. """ - return web.DataReader(stock, service, start, end) + return web.DataReader(stock, 'yahoo', start, end) + def scrape_stock(stock, start, end): + sqlite_db.execute("drop table if exists {};".format(stock)) - - def parse_finance_frame(stock, start, end, service='google'): - """ - Return rows of dicts from a finance data frame for scraperwiki.sqlite. - - service can also be 'yahoo', start and end are datetimes. - """ - frame = get_stock(stock, start, end, service) + frame = (get_stock(stock, start, end)) + # make Date not an index so it appears in table + frame = frame.reset_index() + # force Date datetime to string + frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) + sql.write_frame(frame, stock, sqlite_db) - rows = [] - for idx in range(len(frame)): - current_row_as_dict = frame.ix[idx].to_dict() - # have to convert dates because these are Pandas timestamps and - # dumptruck doesn't support them - current_row_as_dict['Date'] = frame.index[idx].to_datetime() - current_row_as_dict['Stock'] = stock - # horrible hack because data values are numpy.float64 and dumptruck - # doesn't support them - for key in current_row_as_dict: - if isinstance(current_row_as_dict[key], numpy.float64): - current_row_as_dict[key] = float(current_row_as_dict[key]) - rows.append(current_row_as_dict) - return rows - def main(): + global sqlite_db + sqlite_db = sqlite3.connect("scraperwiki.sqlite") - """ - Dump stock data into scraperwiki.sqlite using pandas.io.data. - """ - # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() + for ticker in ['TWTR', 'FB']: + scrape_stock(ticker, start, end) + - - stock_list = ['TWTR', 'FB'] - rows = [] - for stock in stock_list: - rows.extend(parse_finance_frame(stock, start, end)) - scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) - if __name__ == '__main__': main()
Use pandas native saving by forcing date to not be index, and be string
## Code Before: import datetime import scraperwiki import numpy import pandas.io.data as web def get_stock(stock, start, end, service): """ Return data frame of finance data for stock. Takes start and end datetimes, and service name of 'google' or 'yahoo'. """ return web.DataReader(stock, service, start, end) def parse_finance_frame(stock, start, end, service='google'): """ Return rows of dicts from a finance data frame for scraperwiki.sqlite. service can also be 'yahoo', start and end are datetimes. """ frame = get_stock(stock, start, end, service) rows = [] for idx in range(len(frame)): current_row_as_dict = frame.ix[idx].to_dict() # have to convert dates because these are Pandas timestamps and # dumptruck doesn't support them current_row_as_dict['Date'] = frame.index[idx].to_datetime() current_row_as_dict['Stock'] = stock # horrible hack because data values are numpy.float64 and dumptruck # doesn't support them for key in current_row_as_dict: if isinstance(current_row_as_dict[key], numpy.float64): current_row_as_dict[key] = float(current_row_as_dict[key]) rows.append(current_row_as_dict) return rows def main(): """ Dump stock data into scraperwiki.sqlite using pandas.io.data. """ # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() stock_list = ['TWTR', 'FB'] rows = [] for stock in stock_list: rows.extend(parse_finance_frame(stock, start, end)) scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) if __name__ == '__main__': main() ## Instruction: Use pandas native saving by forcing date to not be index, and be string ## Code After: import datetime import sqlite3 import pandas.io.data as web import pandas.io.sql as sql def get_stock(stock, start, end): """ Return data frame of Yahoo Finance data for stock. Takes start and end datetimes. """ return web.DataReader(stock, 'yahoo', start, end) def scrape_stock(stock, start, end): sqlite_db.execute("drop table if exists {};".format(stock)) frame = (get_stock(stock, start, end)) # make Date not an index so it appears in table frame = frame.reset_index() # force Date datetime to string frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) sql.write_frame(frame, stock, sqlite_db) def main(): global sqlite_db sqlite_db = sqlite3.connect("scraperwiki.sqlite") start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() for ticker in ['TWTR', 'FB']: scrape_stock(ticker, start, end) if __name__ == '__main__': main()
import datetime + import sqlite3 - - import scraperwiki - import numpy import pandas.io.data as web + import pandas.io.sql as sql - def get_stock(stock, start, end, service): ? --------- + def get_stock(stock, start, end): """ - Return data frame of finance data for stock. ? ^ + Return data frame of Yahoo Finance data for stock. ? ^^^^^^^ - Takes start and end datetimes, and service name of 'google' or 'yahoo'. + Takes start and end datetimes. """ - return web.DataReader(stock, service, start, end) ? ^^^^^^^ + return web.DataReader(stock, 'yahoo', start, end) ? ^^^^^^^ + def scrape_stock(stock, start, end): + sqlite_db.execute("drop table if exists {};".format(stock)) - - def parse_finance_frame(stock, start, end, service='google'): - """ - Return rows of dicts from a finance data frame for scraperwiki.sqlite. - - service can also be 'yahoo', start and end are datetimes. - """ - frame = get_stock(stock, start, end, service) ? --------- + frame = (get_stock(stock, start, end)) ? + + + # make Date not an index so it appears in table + frame = frame.reset_index() + # force Date datetime to string + frame[['Date']] = frame[['Date']].applymap(lambda x: x.isoformat()) + sql.write_frame(frame, stock, sqlite_db) - rows = [] - for idx in range(len(frame)): - current_row_as_dict = frame.ix[idx].to_dict() - # have to convert dates because these are Pandas timestamps and - # dumptruck doesn't support them - current_row_as_dict['Date'] = frame.index[idx].to_datetime() - current_row_as_dict['Stock'] = stock - # horrible hack because data values are numpy.float64 and dumptruck - # doesn't support them - for key in current_row_as_dict: - if isinstance(current_row_as_dict[key], numpy.float64): - current_row_as_dict[key] = float(current_row_as_dict[key]) - rows.append(current_row_as_dict) - return rows - def main(): + global sqlite_db + sqlite_db = sqlite3.connect("scraperwiki.sqlite") - """ - Dump stock data into scraperwiki.sqlite using pandas.io.data. - """ - # arbitrary start chosen start = datetime.datetime(2014, 3, 1) end = datetime.datetime.today() + for ticker in ['TWTR', 'FB']: + scrape_stock(ticker, start, end) + - - stock_list = ['TWTR', 'FB'] - rows = [] - for stock in stock_list: - rows.extend(parse_finance_frame(stock, start, end)) - scraperwiki.sqlite.save(data=rows, unique_keys=['Stock', 'Date']) - if __name__ == '__main__': main()
da91f170c106c46a0d858e887220bc691066cdaa
tests/dtypes_test.py
tests/dtypes_test.py
from common import * def test_dtype(ds_local): ds = ds_local for name in ds.column_names: assert ds[name].values.dtype == ds.dtype(ds[name]) def test_dtypes(ds_local): ds = ds_local all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object] np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes) some_dtypes = [np.float64, np.int64, 'S25', np.object] np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
from common import * def test_dtype(ds_local): ds = ds_local for name in ds.column_names: assert ds[name].values.dtype == ds.dtype(ds[name]) def test_dtypes(ds_local): ds = ds_local assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
Update of the dtypes unit-test.
Update of the dtypes unit-test.
Python
mit
maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex
from common import * def test_dtype(ds_local): ds = ds_local for name in ds.column_names: - assert ds[name].values.dtype == ds.dtype(ds[name]) + assert ds[name].values.dtype == ds.dtype(ds[name]) def test_dtypes(ds_local): ds = ds_local + assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all() - all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object] - np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes) - some_dtypes = [np.float64, np.int64, 'S25', np.object] - np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
Update of the dtypes unit-test.
## Code Before: from common import * def test_dtype(ds_local): ds = ds_local for name in ds.column_names: assert ds[name].values.dtype == ds.dtype(ds[name]) def test_dtypes(ds_local): ds = ds_local all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object] np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes) some_dtypes = [np.float64, np.int64, 'S25', np.object] np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes) ## Instruction: Update of the dtypes unit-test. ## Code After: from common import * def test_dtype(ds_local): ds = ds_local for name in ds.column_names: assert ds[name].values.dtype == ds.dtype(ds[name]) def test_dtypes(ds_local): ds = ds_local assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
from common import * def test_dtype(ds_local): ds = ds_local for name in ds.column_names: - assert ds[name].values.dtype == ds.dtype(ds[name]) ? -- + assert ds[name].values.dtype == ds.dtype(ds[name]) def test_dtypes(ds_local): ds = ds_local + assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all() - all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object] - np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes) - some_dtypes = [np.float64, np.int64, 'S25', np.object] - np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
a79801b4f67f3372231b1df2c73e52a7cc6b35a3
perfiles_usuario/models.py
perfiles_usuario/models.py
from django.db import models from django.conf import settings from django.contrib.auth.models import Group from .utils import CAPTURISTA_GROUP class Capturista(models.Model): """ Extension of Django's User Model for Capturistas. We extend the Django User Model to identify Capturistas since they have relations with other models and close interaction with the API. Attributes: ---------- user : django.contrib.auth.models.User A mock user to use across all tests. activo : BooleanField Indicates whether the profile is active or not. """ user = models.OneToOneField(settings.AUTH_USER_MODEL) activo = models.BooleanField(default=True) def save(self, *args, **kwargs): """ Override the save method to add the capturista group. """ user_group = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0] self.user.groups.add(user_group) return super(Capturista, self).save(*args, **kwargs)
from django.db import models from django.conf import settings from django.contrib.auth.models import Group from .utils import CAPTURISTA_GROUP class Capturista(models.Model): """ Extension of Django's User Model for Capturistas. We extend the Django User Model to identify Capturistas since they have relations with other models and close interaction with the API. Attributes: ---------- user : django.contrib.auth.models.User The django User related to Capturista (i.e. contains the actual user information). activo : BooleanField Indicates whether the profile is active or not. """ user = models.OneToOneField(settings.AUTH_USER_MODEL) activo = models.BooleanField(default=True) def save(self, *args, **kwargs): """ Override the save method to add the capturista group. """ user_group = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0] self.user.groups.add(user_group) return super(Capturista, self).save(*args, **kwargs)
Fix user documentation in Capturista.
Fix user documentation in Capturista.
Python
mit
erikiado/jp2_online,erikiado/jp2_online,erikiado/jp2_online
from django.db import models from django.conf import settings from django.contrib.auth.models import Group from .utils import CAPTURISTA_GROUP class Capturista(models.Model): """ Extension of Django's User Model for Capturistas. We extend the Django User Model to identify Capturistas since they have relations with other models and close interaction with the API. Attributes: ---------- user : django.contrib.auth.models.User - A mock user to use across all tests. + The django User related to Capturista (i.e. contains the actual user information). activo : BooleanField Indicates whether the profile is active or not. """ user = models.OneToOneField(settings.AUTH_USER_MODEL) activo = models.BooleanField(default=True) def save(self, *args, **kwargs): """ Override the save method to add the capturista group. """ user_group = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0] self.user.groups.add(user_group) return super(Capturista, self).save(*args, **kwargs)
Fix user documentation in Capturista.
## Code Before: from django.db import models from django.conf import settings from django.contrib.auth.models import Group from .utils import CAPTURISTA_GROUP class Capturista(models.Model): """ Extension of Django's User Model for Capturistas. We extend the Django User Model to identify Capturistas since they have relations with other models and close interaction with the API. Attributes: ---------- user : django.contrib.auth.models.User A mock user to use across all tests. activo : BooleanField Indicates whether the profile is active or not. """ user = models.OneToOneField(settings.AUTH_USER_MODEL) activo = models.BooleanField(default=True) def save(self, *args, **kwargs): """ Override the save method to add the capturista group. """ user_group = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0] self.user.groups.add(user_group) return super(Capturista, self).save(*args, **kwargs) ## Instruction: Fix user documentation in Capturista. ## Code After: from django.db import models from django.conf import settings from django.contrib.auth.models import Group from .utils import CAPTURISTA_GROUP class Capturista(models.Model): """ Extension of Django's User Model for Capturistas. We extend the Django User Model to identify Capturistas since they have relations with other models and close interaction with the API. Attributes: ---------- user : django.contrib.auth.models.User The django User related to Capturista (i.e. contains the actual user information). activo : BooleanField Indicates whether the profile is active or not. """ user = models.OneToOneField(settings.AUTH_USER_MODEL) activo = models.BooleanField(default=True) def save(self, *args, **kwargs): """ Override the save method to add the capturista group. """ user_group = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0] self.user.groups.add(user_group) return super(Capturista, self).save(*args, **kwargs)
from django.db import models from django.conf import settings from django.contrib.auth.models import Group from .utils import CAPTURISTA_GROUP class Capturista(models.Model): """ Extension of Django's User Model for Capturistas. We extend the Django User Model to identify Capturistas since they have relations with other models and close interaction with the API. Attributes: ---------- user : django.contrib.auth.models.User - A mock user to use across all tests. + The django User related to Capturista (i.e. contains the actual user information). activo : BooleanField Indicates whether the profile is active or not. """ user = models.OneToOneField(settings.AUTH_USER_MODEL) activo = models.BooleanField(default=True) def save(self, *args, **kwargs): """ Override the save method to add the capturista group. """ user_group = Group.objects.get_or_create(name=CAPTURISTA_GROUP)[0] self.user.groups.add(user_group) return super(Capturista, self).save(*args, **kwargs)
a8c8b136f081e3a2c7f1fd1f833a85288a358e42
vumi_http_retry/workers/api/validate.py
vumi_http_retry/workers/api/validate.py
import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req, *a, **kw): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body, *a, **kw): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
Change validators to allow additional arguments to be given to the functions they are wrapping
Change validators to allow additional arguments to be given to the functions they are wrapping
Python
bsd-3-clause
praekelt/vumi-http-retry-api,praekelt/vumi-http-retry-api
import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): - def validator(req): + def validator(req, *a, **kw): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) - def validator(req, body): + def validator(req, body, *a, **kw): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
Change validators to allow additional arguments to be given to the functions they are wrapping
## Code Before: import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator ## Instruction: Change validators to allow additional arguments to be given to the functions they are wrapping ## Code After: import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): def validator(req, *a, **kw): if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) def validator(req, body, *a, **kw): return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
import json from functools import wraps from twisted.web import http from jsonschema import Draft4Validator from vumi_http_retry.workers.api.utils import response def validate(*validators): def validator(fn): @wraps(fn) def wrapper(api, req, *a, **kw): errors = [] for v in validators: errors.extend(v(req, *a, **kw) or []) if not errors: return fn(api, req, *a, **kw) else: return response(req, {'errors': errors}, code=http.BAD_REQUEST) return wrapper return validator def has_header(name): - def validator(req): + def validator(req, *a, **kw): ? ++++++++++ if not req.requestHeaders.hasHeader(name): return [{ 'type': 'header_missing', 'message': "Header '%s' is missing" % (name,) }] else: return [] return validator def body_schema(schema): json_validator = Draft4Validator(schema) - def validator(req, body): + def validator(req, body, *a, **kw): ? ++++++++++ return [{ 'type': 'invalid_body', 'message': e.message } for e in json_validator.iter_errors(body)] return validator
b60fb0db2cc1ab3605f34e9b604e920279434c36
vterm_test.py
vterm_test.py
import urwid def main(): event_loop = urwid.SelectEventLoop() mainframe = urwid.Frame( urwid.Columns([ ('fixed', 3, urwid.SolidFill('|')), urwid.Pile([ ('weight', 70, urwid.TerminalWidget(None, event_loop)), ('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))), ]), ('fixed', 3, urwid.SolidFill('|')), ], box_columns=[1]), header=urwid.Columns([ ('fixed', 3, urwid.Text('.,:')), urwid.Divider('-'), ('fixed', 3, urwid.Text(':,.')), ]), footer=urwid.Columns([ ('fixed', 3, urwid.Text('`"*')), urwid.Divider('-'), ('fixed', 3, urwid.Text('*"\'')), ]), ) def quit(key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() loop = urwid.MainLoop( mainframe, unhandled_input=quit, event_loop=event_loop ).run() if __name__ == '__main__': main()
import urwid def main(): event_loop = urwid.SelectEventLoop() mainframe = urwid.Frame( urwid.Columns([ ('fixed', 3, urwid.SolidFill('|')), urwid.Pile([ ('weight', 70, urwid.TerminalWidget(None, event_loop)), ('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))), ]), ('fixed', 3, urwid.SolidFill('|')), ], box_columns=[1]), header=urwid.Columns([ ('fixed', 3, urwid.Text('.,:')), urwid.Divider('-'), ('fixed', 3, urwid.Text(':,.')), ]), footer=urwid.Columns([ ('fixed', 3, urwid.Text('`"*')), urwid.Divider('-'), ('fixed', 3, urwid.Text('*"\'')), ]), ) def quit(key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() loop = urwid.MainLoop( mainframe, handle_mouse=False, unhandled_input=quit, event_loop=event_loop ).run() if __name__ == '__main__': main()
Disable mouse handling in vterm example.
Disable mouse handling in vterm example.
Python
lgpl-2.1
westurner/urwid,wardi/urwid,zyga/urwid,douglas-larocca/urwid,harlowja/urwid,drestebon/urwid,hkoof/urwid,bk2204/urwid,urwid/urwid,drestebon/urwid,inducer/urwid,hkoof/urwid,hkoof/urwid,bk2204/urwid,rndusr/urwid,rndusr/urwid,zyga/urwid,westurner/urwid,inducer/urwid,rndusr/urwid,mountainstorm/urwid,foreni-packages/urwid,foreni-packages/urwid,ivanov/urwid,tonycpsu/urwid,harlowja/urwid,zyga/urwid,drestebon/urwid,tonycpsu/urwid,mountainstorm/urwid,urwid/urwid,foreni-packages/urwid,douglas-larocca/urwid,ivanov/urwid,douglas-larocca/urwid,Julian/urwid,westurner/urwid,harlowja/urwid,wardi/urwid,Julian/urwid,Julian/urwid,ivanov/urwid,mountainstorm/urwid,tonycpsu/urwid,urwid/urwid,inducer/urwid,wardi/urwid,bk2204/urwid
import urwid def main(): event_loop = urwid.SelectEventLoop() mainframe = urwid.Frame( urwid.Columns([ ('fixed', 3, urwid.SolidFill('|')), urwid.Pile([ ('weight', 70, urwid.TerminalWidget(None, event_loop)), ('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))), ]), ('fixed', 3, urwid.SolidFill('|')), ], box_columns=[1]), header=urwid.Columns([ ('fixed', 3, urwid.Text('.,:')), urwid.Divider('-'), ('fixed', 3, urwid.Text(':,.')), ]), footer=urwid.Columns([ ('fixed', 3, urwid.Text('`"*')), urwid.Divider('-'), ('fixed', 3, urwid.Text('*"\'')), ]), ) def quit(key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() loop = urwid.MainLoop( mainframe, + handle_mouse=False, unhandled_input=quit, event_loop=event_loop ).run() if __name__ == '__main__': main()
Disable mouse handling in vterm example.
## Code Before: import urwid def main(): event_loop = urwid.SelectEventLoop() mainframe = urwid.Frame( urwid.Columns([ ('fixed', 3, urwid.SolidFill('|')), urwid.Pile([ ('weight', 70, urwid.TerminalWidget(None, event_loop)), ('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))), ]), ('fixed', 3, urwid.SolidFill('|')), ], box_columns=[1]), header=urwid.Columns([ ('fixed', 3, urwid.Text('.,:')), urwid.Divider('-'), ('fixed', 3, urwid.Text(':,.')), ]), footer=urwid.Columns([ ('fixed', 3, urwid.Text('`"*')), urwid.Divider('-'), ('fixed', 3, urwid.Text('*"\'')), ]), ) def quit(key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() loop = urwid.MainLoop( mainframe, unhandled_input=quit, event_loop=event_loop ).run() if __name__ == '__main__': main() ## Instruction: Disable mouse handling in vterm example. ## Code After: import urwid def main(): event_loop = urwid.SelectEventLoop() mainframe = urwid.Frame( urwid.Columns([ ('fixed', 3, urwid.SolidFill('|')), urwid.Pile([ ('weight', 70, urwid.TerminalWidget(None, event_loop)), ('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))), ]), ('fixed', 3, urwid.SolidFill('|')), ], box_columns=[1]), header=urwid.Columns([ ('fixed', 3, urwid.Text('.,:')), urwid.Divider('-'), ('fixed', 3, urwid.Text(':,.')), ]), footer=urwid.Columns([ ('fixed', 3, urwid.Text('`"*')), urwid.Divider('-'), ('fixed', 3, urwid.Text('*"\'')), ]), ) def quit(key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() loop = urwid.MainLoop( mainframe, handle_mouse=False, unhandled_input=quit, event_loop=event_loop ).run() if __name__ == '__main__': main()
import urwid def main(): event_loop = urwid.SelectEventLoop() mainframe = urwid.Frame( urwid.Columns([ ('fixed', 3, urwid.SolidFill('|')), urwid.Pile([ ('weight', 70, urwid.TerminalWidget(None, event_loop)), ('fixed', 1, urwid.Filler(urwid.Edit('focus test edit: '))), ]), ('fixed', 3, urwid.SolidFill('|')), ], box_columns=[1]), header=urwid.Columns([ ('fixed', 3, urwid.Text('.,:')), urwid.Divider('-'), ('fixed', 3, urwid.Text(':,.')), ]), footer=urwid.Columns([ ('fixed', 3, urwid.Text('`"*')), urwid.Divider('-'), ('fixed', 3, urwid.Text('*"\'')), ]), ) def quit(key): if key in ('q', 'Q'): raise urwid.ExitMainLoop() loop = urwid.MainLoop( mainframe, + handle_mouse=False, unhandled_input=quit, event_loop=event_loop ).run() if __name__ == '__main__': main()
78154a63e86774fb8952f42883f7788e94d0c8d2
lib/spack/spack/operating_systems/linux_distro.py
lib/spack/spack/operating_systems/linux_distro.py
import re from external.distro import linux_distribution from spack.architecture import OperatingSystem class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of Linux, this class will attempt to encompass them all through autodetection using the python module platform and the method platform.dist() """ def __init__(self): distname, version, _ = linux_distribution( full_distribution_name=False) distname, version = str(distname), str(version) # Grabs major version from tuple on redhat; on other platforms # grab the first legal identifier in the version field. On # debian you get things like 'wheezy/sid'; sid means unstable. # We just record 'wheezy' and don't get quite so detailed. version = re.split(r'[^\w-]', version)[0] super(LinuxDistro, self).__init__(distname, version)
import re from spack.architecture import OperatingSystem class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of Linux, this class will attempt to encompass them all through autodetection using the python module platform and the method platform.dist() """ def __init__(self): try: # This will throw an error if imported on a non-Linux platform. from external.distro import linux_distribution distname, version, _ = linux_distribution( full_distribution_name=False) distname, version = str(distname), str(version) except ImportError as e: distname, version = 'unknown', '' # Grabs major version from tuple on redhat; on other platforms # grab the first legal identifier in the version field. On # debian you get things like 'wheezy/sid'; sid means unstable. # We just record 'wheezy' and don't get quite so detailed. version = re.split(r'[^\w-]', version)[0] super(LinuxDistro, self).__init__(distname, version)
Fix bug in distribution detection on unsupported platforms.
Fix bug in distribution detection on unsupported platforms.
Python
lgpl-2.1
EmreAtes/spack,TheTimmy/spack,mfherbst/spack,tmerrick1/spack,iulian787/spack,skosukhin/spack,matthiasdiener/spack,mfherbst/spack,lgarren/spack,EmreAtes/spack,iulian787/spack,EmreAtes/spack,matthiasdiener/spack,mfherbst/spack,LLNL/spack,TheTimmy/spack,skosukhin/spack,skosukhin/spack,LLNL/spack,krafczyk/spack,krafczyk/spack,mfherbst/spack,matthiasdiener/spack,LLNL/spack,tmerrick1/spack,krafczyk/spack,TheTimmy/spack,iulian787/spack,krafczyk/spack,TheTimmy/spack,matthiasdiener/spack,lgarren/spack,iulian787/spack,tmerrick1/spack,EmreAtes/spack,mfherbst/spack,matthiasdiener/spack,lgarren/spack,tmerrick1/spack,iulian787/spack,lgarren/spack,lgarren/spack,krafczyk/spack,skosukhin/spack,LLNL/spack,EmreAtes/spack,skosukhin/spack,TheTimmy/spack,tmerrick1/spack,LLNL/spack
import re - from external.distro import linux_distribution from spack.architecture import OperatingSystem class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of Linux, this class will attempt to encompass them all through autodetection using the python module platform and the method platform.dist() """ def __init__(self): + try: + # This will throw an error if imported on a non-Linux platform. + from external.distro import linux_distribution - distname, version, _ = linux_distribution( + distname, version, _ = linux_distribution( - full_distribution_name=False) + full_distribution_name=False) - distname, version = str(distname), str(version) + distname, version = str(distname), str(version) + except ImportError as e: + distname, version = 'unknown', '' # Grabs major version from tuple on redhat; on other platforms # grab the first legal identifier in the version field. On # debian you get things like 'wheezy/sid'; sid means unstable. # We just record 'wheezy' and don't get quite so detailed. version = re.split(r'[^\w-]', version)[0] super(LinuxDistro, self).__init__(distname, version)
Fix bug in distribution detection on unsupported platforms.
## Code Before: import re from external.distro import linux_distribution from spack.architecture import OperatingSystem class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of Linux, this class will attempt to encompass them all through autodetection using the python module platform and the method platform.dist() """ def __init__(self): distname, version, _ = linux_distribution( full_distribution_name=False) distname, version = str(distname), str(version) # Grabs major version from tuple on redhat; on other platforms # grab the first legal identifier in the version field. On # debian you get things like 'wheezy/sid'; sid means unstable. # We just record 'wheezy' and don't get quite so detailed. version = re.split(r'[^\w-]', version)[0] super(LinuxDistro, self).__init__(distname, version) ## Instruction: Fix bug in distribution detection on unsupported platforms. ## Code After: import re from spack.architecture import OperatingSystem class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of Linux, this class will attempt to encompass them all through autodetection using the python module platform and the method platform.dist() """ def __init__(self): try: # This will throw an error if imported on a non-Linux platform. from external.distro import linux_distribution distname, version, _ = linux_distribution( full_distribution_name=False) distname, version = str(distname), str(version) except ImportError as e: distname, version = 'unknown', '' # Grabs major version from tuple on redhat; on other platforms # grab the first legal identifier in the version field. On # debian you get things like 'wheezy/sid'; sid means unstable. # We just record 'wheezy' and don't get quite so detailed. version = re.split(r'[^\w-]', version)[0] super(LinuxDistro, self).__init__(distname, version)
import re - from external.distro import linux_distribution from spack.architecture import OperatingSystem class LinuxDistro(OperatingSystem): """ This class will represent the autodetected operating system for a Linux System. Since there are many different flavors of Linux, this class will attempt to encompass them all through autodetection using the python module platform and the method platform.dist() """ def __init__(self): + try: + # This will throw an error if imported on a non-Linux platform. + from external.distro import linux_distribution - distname, version, _ = linux_distribution( + distname, version, _ = linux_distribution( ? ++++ - full_distribution_name=False) + full_distribution_name=False) ? ++++ - distname, version = str(distname), str(version) + distname, version = str(distname), str(version) ? ++++ + except ImportError as e: + distname, version = 'unknown', '' # Grabs major version from tuple on redhat; on other platforms # grab the first legal identifier in the version field. On # debian you get things like 'wheezy/sid'; sid means unstable. # We just record 'wheezy' and don't get quite so detailed. version = re.split(r'[^\w-]', version)[0] super(LinuxDistro, self).__init__(distname, version)
17fbd2f3fa24da128cb5cabef4a8c94b59b50b0c
sqrl/client/crypt.py
sqrl/client/crypt.py
import ed25519 import hmac from sqrl.utils import baseconv class Crypt: """ Crypt - Creating site specific key pair - Signing SRQL response - Providing public key """ def __init__(self, masterkey): self.masterkey = masterkey def _site_key_pair(self, domain): seed = self._site_seed(domain) sk = ed25519.SigningKey(seed) vk = sk.get_verifying_key() return sk, vk def _site_seed(self, domain): """ Generates a seed to based on the masterkey and the current site you authenicating with The seed is used to generate the key pair used for signing the request body """ key = self.masterkey local_hmac = hmac.new(key) local_hmac.update(domain) return local_hmac.hexdigest() def sign(self, value): signed = self.sk.sign(value) return baseconv.encode(signed) def getPublicKey(self, domain): self.sk, self.vk = self._site_key_pair(domain) key = self.vk.to_bytes() return baseconv.encode(key)
import ed25519 import hmac import baseconv class Crypt: """ Crypt - Creating site specific key pair - Signing SRQL response - Providing public key """ def __init__(self, masterkey): self.masterkey = masterkey def _site_key_pair(self, domain): seed = self._site_seed(domain) sk = ed25519.SigningKey(seed) vk = sk.get_verifying_key() return sk, vk def _site_seed(self, domain): """ Generates a seed to based on the masterkey and the current site you authenicating with The seed is used to generate the key pair used for signing the request body """ key = self.masterkey local_hmac = hmac.new(key) local_hmac.update(domain) return local_hmac.hexdigest() def sign(self, value): signed = self.sk.sign(value) return baseconv.encode(signed) def getPublicKey(self, domain): self.sk, self.vk = self._site_key_pair(domain) key = self.vk.to_bytes() return baseconv.encode(key)
Fix up imports after module has moved
Fix up imports after module has moved
Python
mit
vegarwe/sqrl,vegarwe/sqrl,vegarwe/sqrl,vegarwe/sqrl
import ed25519 import hmac - from sqrl.utils import baseconv + import baseconv class Crypt: """ Crypt - Creating site specific key pair - Signing SRQL response - Providing public key """ def __init__(self, masterkey): self.masterkey = masterkey def _site_key_pair(self, domain): seed = self._site_seed(domain) sk = ed25519.SigningKey(seed) vk = sk.get_verifying_key() return sk, vk def _site_seed(self, domain): """ Generates a seed to based on the masterkey and the current site you authenicating with The seed is used to generate the key pair used for signing the request body """ key = self.masterkey local_hmac = hmac.new(key) local_hmac.update(domain) return local_hmac.hexdigest() def sign(self, value): signed = self.sk.sign(value) return baseconv.encode(signed) def getPublicKey(self, domain): self.sk, self.vk = self._site_key_pair(domain) key = self.vk.to_bytes() return baseconv.encode(key)
Fix up imports after module has moved
## Code Before: import ed25519 import hmac from sqrl.utils import baseconv class Crypt: """ Crypt - Creating site specific key pair - Signing SRQL response - Providing public key """ def __init__(self, masterkey): self.masterkey = masterkey def _site_key_pair(self, domain): seed = self._site_seed(domain) sk = ed25519.SigningKey(seed) vk = sk.get_verifying_key() return sk, vk def _site_seed(self, domain): """ Generates a seed to based on the masterkey and the current site you authenicating with The seed is used to generate the key pair used for signing the request body """ key = self.masterkey local_hmac = hmac.new(key) local_hmac.update(domain) return local_hmac.hexdigest() def sign(self, value): signed = self.sk.sign(value) return baseconv.encode(signed) def getPublicKey(self, domain): self.sk, self.vk = self._site_key_pair(domain) key = self.vk.to_bytes() return baseconv.encode(key) ## Instruction: Fix up imports after module has moved ## Code After: import ed25519 import hmac import baseconv class Crypt: """ Crypt - Creating site specific key pair - Signing SRQL response - Providing public key """ def __init__(self, masterkey): self.masterkey = masterkey def _site_key_pair(self, domain): seed = self._site_seed(domain) sk = ed25519.SigningKey(seed) vk = sk.get_verifying_key() return sk, vk def _site_seed(self, domain): """ Generates a seed to based on the masterkey and the current site you authenicating with The seed is used to generate the key pair used for signing the request body """ key = self.masterkey local_hmac = hmac.new(key) local_hmac.update(domain) return local_hmac.hexdigest() def sign(self, value): signed = self.sk.sign(value) return baseconv.encode(signed) def getPublicKey(self, domain): self.sk, self.vk = self._site_key_pair(domain) key = self.vk.to_bytes() return baseconv.encode(key)
import ed25519 import hmac - from sqrl.utils import baseconv + import baseconv class Crypt: """ Crypt - Creating site specific key pair - Signing SRQL response - Providing public key """ def __init__(self, masterkey): self.masterkey = masterkey def _site_key_pair(self, domain): seed = self._site_seed(domain) sk = ed25519.SigningKey(seed) vk = sk.get_verifying_key() return sk, vk def _site_seed(self, domain): """ Generates a seed to based on the masterkey and the current site you authenicating with The seed is used to generate the key pair used for signing the request body """ key = self.masterkey local_hmac = hmac.new(key) local_hmac.update(domain) return local_hmac.hexdigest() def sign(self, value): signed = self.sk.sign(value) return baseconv.encode(signed) def getPublicKey(self, domain): self.sk, self.vk = self._site_key_pair(domain) key = self.vk.to_bytes() return baseconv.encode(key)
42869823b4af024906606c5caf50e5dc5de69a57
api/mcapi/user/projects.py
api/mcapi/user/projects.py
from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/project/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return ""
from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') @apikey @jsonp def get_datadirs_for_project(user, project_id): rr = r.table('project2datadir').filter({'project_id': project_id}) rr = rr.eq_join('project_id', r.table('projects')).zip() rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() selection = list(rr.run(g.conn, time_format='raw')) if len(selection) > 0 and selection[0]['owner'] == user: return args.json_as_format_arg(selection) return args.json_as_format_arg([])
Add call to get datadirs for a project.
Add call to get datadirs for a project.
Python
mit
materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org,materials-commons/materialscommons.org
from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) - @app.route('/v1.0/user/<user>/project/<project_id>/datafiles') + @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" + @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') + @apikey + @jsonp + def get_datadirs_for_project(user, project_id): + rr = r.table('project2datadir').filter({'project_id': project_id}) + rr = rr.eq_join('project_id', r.table('projects')).zip() + rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() + selection = list(rr.run(g.conn, time_format='raw')) + if len(selection) > 0 and selection[0]['owner'] == user: + return args.json_as_format_arg(selection) + return args.json_as_format_arg([]) +
Add call to get datadirs for a project.
## Code Before: from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/project/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" ## Instruction: Add call to get datadirs for a project. ## Code After: from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') @apikey @jsonp def get_datadirs_for_project(user, project_id): rr = r.table('project2datadir').filter({'project_id': project_id}) rr = rr.eq_join('project_id', r.table('projects')).zip() rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() selection = list(rr.run(g.conn, time_format='raw')) if len(selection) > 0 and selection[0]['owner'] == user: return args.json_as_format_arg(selection) return args.json_as_format_arg([])
from ..mcapp import app from ..decorators import apikey, jsonp from flask import g import rethinkdb as r #from .. import dmutil from .. import args from ..utils import error_response @app.route('/v1.0/user/<user>/projects', methods=['GET']) @apikey @jsonp def get_all_projects(user): rr = r.table('projects').filter({'owner': user}) rr = args.add_all_arg_options(rr) items = list(rr.run(g.conn, time_format='raw')) return args.json_as_format_arg(items) - @app.route('/v1.0/user/<user>/project/<project_id>/datafiles') + @app.route('/v1.0/user/<user>/projects/<project_id>/datafiles') ? + @apikey @jsonp def get_all_datafiles_for_project(user, project_id): project = r.table('projects').get(project_id).run(g.conn) if project is None: return error_response(400) if project['owner'] != user: return error_response(400) return "" + + @app.route('/v1.0/user/<user>/projects/<project_id>/datadirs') + @apikey + @jsonp + def get_datadirs_for_project(user, project_id): + rr = r.table('project2datadir').filter({'project_id': project_id}) + rr = rr.eq_join('project_id', r.table('projects')).zip() + rr = rr.eq_join('datadir_id', r.table('datadirs')).zip() + selection = list(rr.run(g.conn, time_format='raw')) + if len(selection) > 0 and selection[0]['owner'] == user: + return args.json_as_format_arg(selection) + return args.json_as_format_arg([])
d731ad50b863d32740bec857d46cc0c80e440185
tests/melopy_tests.py
tests/melopy_tests.py
from unittest import TestCase from nose.tools import * from melopy.melopy import * class MelopyTests(TestCase): def test_dummy(self): assert True
from unittest import TestCase from nose.tools import * from melopy import * class LibraryFunctionsTests(TestCase): def test_frequency_from_key(self): key = 49 assert frequency_from_key(key) == 440 def test_frequency_from_note(self): note = 'A4' assert frequency_from_note(note) == 440 def test_key_from_note(self): note = 'A4' assert key_from_note(note) == 49 def test_note_from_key(self): key = 49 assert note_from_key(key) == 'A4' def test_iterate(self): start = 'D4' pattern = [2, 2, 1, 2, 2, 2] should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] assert iterate(start, pattern) == should_be def test_generate_major_scale(self): start = 'D4' should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] assert generate_major_scale(start) == should_be def test_generate_minor_scale(self): start = 'C4' should_be = ['C4', 'D4', 'Eb4', 'F4', 'G4', 'Ab4', 'Bb4'] assert generate_minor_scale(start) == should_be def test_generate_major_triad(self): start = 'A4' should_be = ['A4', 'C#5', 'E5'] assert generate_major_triad(start) == should_be def test_generate_minor_triad(self): start = 'C5' should_be = ['C5', 'Eb5', 'G5'] assert generate_minor_triad(start) == should_be class MelopyTests(TestCase): def test_dummy(self): assert True
Add tests for the library methods. All except 2 pass right now.
Add tests for the library methods. All except 2 pass right now. The two that don't pass, fail because I have changed what their output should be. In the docs, it is shown that the output of `generate_minor_scale`, given 'C4', is: ['C4', 'D4', 'E4', 'F4', 'G4', 'A4', 'A#4'] This is incorrect. The actual minor scale is: ['C4', 'D4', 'Eb4', 'F4', 'G4', 'Ab4', 'Bb4'] The same kind of inconsistency is found in the `generate_minor_triad` output. This is not a proper minor triad: ['C5', 'D#5', 'G5'] because C -> D# is not a minor third interval, it is an augmented second interval. I know, for all practical purposes it will generate the same tone, but my musical OCD can't stand to see it this way lol!
Python
mit
jdan/Melopy,juliowaissman/Melopy
from unittest import TestCase from nose.tools import * - from melopy.melopy import * + from melopy import * + + class LibraryFunctionsTests(TestCase): + def test_frequency_from_key(self): + key = 49 + assert frequency_from_key(key) == 440 + + def test_frequency_from_note(self): + note = 'A4' + assert frequency_from_note(note) == 440 + + def test_key_from_note(self): + note = 'A4' + assert key_from_note(note) == 49 + + def test_note_from_key(self): + key = 49 + assert note_from_key(key) == 'A4' + + def test_iterate(self): + start = 'D4' + pattern = [2, 2, 1, 2, 2, 2] + should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] + assert iterate(start, pattern) == should_be + + def test_generate_major_scale(self): + start = 'D4' + should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] + assert generate_major_scale(start) == should_be + + def test_generate_minor_scale(self): + start = 'C4' + should_be = ['C4', 'D4', 'Eb4', 'F4', 'G4', 'Ab4', 'Bb4'] + assert generate_minor_scale(start) == should_be + + def test_generate_major_triad(self): + start = 'A4' + should_be = ['A4', 'C#5', 'E5'] + assert generate_major_triad(start) == should_be + + def test_generate_minor_triad(self): + start = 'C5' + should_be = ['C5', 'Eb5', 'G5'] + assert generate_minor_triad(start) == should_be + class MelopyTests(TestCase): def test_dummy(self): assert True
Add tests for the library methods. All except 2 pass right now.
## Code Before: from unittest import TestCase from nose.tools import * from melopy.melopy import * class MelopyTests(TestCase): def test_dummy(self): assert True ## Instruction: Add tests for the library methods. All except 2 pass right now. ## Code After: from unittest import TestCase from nose.tools import * from melopy import * class LibraryFunctionsTests(TestCase): def test_frequency_from_key(self): key = 49 assert frequency_from_key(key) == 440 def test_frequency_from_note(self): note = 'A4' assert frequency_from_note(note) == 440 def test_key_from_note(self): note = 'A4' assert key_from_note(note) == 49 def test_note_from_key(self): key = 49 assert note_from_key(key) == 'A4' def test_iterate(self): start = 'D4' pattern = [2, 2, 1, 2, 2, 2] should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] assert iterate(start, pattern) == should_be def test_generate_major_scale(self): start = 'D4' should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] assert generate_major_scale(start) == should_be def test_generate_minor_scale(self): start = 'C4' should_be = ['C4', 'D4', 'Eb4', 'F4', 'G4', 'Ab4', 'Bb4'] assert generate_minor_scale(start) == should_be def test_generate_major_triad(self): start = 'A4' should_be = ['A4', 'C#5', 'E5'] assert generate_major_triad(start) == should_be def test_generate_minor_triad(self): start = 'C5' should_be = ['C5', 'Eb5', 'G5'] assert generate_minor_triad(start) == should_be class MelopyTests(TestCase): def test_dummy(self): assert True
from unittest import TestCase from nose.tools import * - from melopy.melopy import * ? ------- + from melopy import * + + class LibraryFunctionsTests(TestCase): + def test_frequency_from_key(self): + key = 49 + assert frequency_from_key(key) == 440 + + def test_frequency_from_note(self): + note = 'A4' + assert frequency_from_note(note) == 440 + + def test_key_from_note(self): + note = 'A4' + assert key_from_note(note) == 49 + + def test_note_from_key(self): + key = 49 + assert note_from_key(key) == 'A4' + + def test_iterate(self): + start = 'D4' + pattern = [2, 2, 1, 2, 2, 2] + should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] + assert iterate(start, pattern) == should_be + + def test_generate_major_scale(self): + start = 'D4' + should_be = ['D4', 'E4', 'F#4', 'G4', 'A4', 'B4', 'C#5'] + assert generate_major_scale(start) == should_be + + def test_generate_minor_scale(self): + start = 'C4' + should_be = ['C4', 'D4', 'Eb4', 'F4', 'G4', 'Ab4', 'Bb4'] + assert generate_minor_scale(start) == should_be + + def test_generate_major_triad(self): + start = 'A4' + should_be = ['A4', 'C#5', 'E5'] + assert generate_major_triad(start) == should_be + + def test_generate_minor_triad(self): + start = 'C5' + should_be = ['C5', 'Eb5', 'G5'] + assert generate_minor_triad(start) == should_be + class MelopyTests(TestCase): def test_dummy(self): assert True
df4967b5e71e32f70e97d52a320d9b32d70095b7
main.py
main.py
import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def now_playing(itunes): track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def is_playing(itunes): return itunes.player_state.get() == k.playing def now_playing(itunes): if not is_playing(itunes): return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): if is_playing(itunes): return play_next(itunes) itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
Check if song is_playing before play
Check if song is_playing before play
Python
mit
kshvmdn/nowplaying
import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() + def is_playing(itunes): + return itunes.player_state.get() == k.playing + + def now_playing(itunes): + if not is_playing(itunes): + return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): + if is_playing(itunes): + return play_next(itunes) itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
Check if song is_playing before play
## Code Before: import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def now_playing(itunes): track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main() ## Instruction: Check if song is_playing before play ## Code After: import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() def is_playing(itunes): return itunes.player_state.get() == k.playing def now_playing(itunes): if not is_playing(itunes): return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): if is_playing(itunes): return play_next(itunes) itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
import sys from appscript import * from termcolor import colored, cprint def open(itunes): return itunes.activate() def close(itunes): return itunes.quit() + def is_playing(itunes): + return itunes.player_state.get() == k.playing + + def now_playing(itunes): + if not is_playing(itunes): + return play(itunes) track = itunes.current_track.get() return print('{} - {}\n{}'.format(colored(track.name(), attrs=['bold']), track.artist(), track.album())) def play(itunes): + if is_playing(itunes): + return play_next(itunes) itunes.play() return now_playing(itunes) def stop(itunes): return itunes.stop() def main(): cmd, is_open, itunes = None if len(sys.argv) == 1 else sys.argv[1], \ app('System Events').processes[its.name == 'iTunes'].count(), \ app('iTunes') if not is_open == 1: open(itunes) cmds = { 'np': now_playing, 'play': play, 'show': open, 'stop': stop, 'close': close } cmd = cmds[cmd] if cmd in cmds else now_playing return cmd(itunes) if __name__ == '__main__': main()
7c5061e4fbf0737ce07f13cb9102cdbbacf73115
pyethapp/tests/test_genesis.py
pyethapp/tests/test_genesis.py
import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES def check_genesis(profile): config = dict(eth=dict()) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) print config['eth'].keys() bc = config['eth']['block'] print bc.keys() env = Env(DB(), bc) genesis = blocks.genesis(env) print 'genesis.hash', genesis.hash.encode('hex') print 'expected', config['eth']['genesis_hash'] assert genesis.hash == config['eth']['genesis_hash'].decode('hex') @pytest.mark.xfail # FIXME def test_olympic(): check_genesis('olympic') def test_frontier(): check_genesis('frontier') if __name__ == '__main__': test_genesis()
from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES @pytest.mark.parametrize('profile', PROFILES.keys()) def test_profile(profile): config = dict(eth=dict()) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) bc = config['eth']['block'] pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
Fix & cleanup profile genesis tests
Fix & cleanup profile genesis tests
Python
mit
ethereum/pyethapp,gsalgado/pyethapp,gsalgado/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,changwu-tw/pyethapp,RomanZacharia/pyethapp,ethereum/pyethapp
+ from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES - def check_genesis(profile): + @pytest.mark.parametrize('profile', PROFILES.keys()) + def test_profile(profile): config = dict(eth=dict()) + + konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) - konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) - - print config['eth'].keys() bc = config['eth']['block'] - print bc.keys() + pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) - print 'genesis.hash', genesis.hash.encode('hex') - print 'expected', config['eth']['genesis_hash'] - assert genesis.hash == config['eth']['genesis_hash'].decode('hex') + assert genesis.hash.encode('hex') == config['eth']['genesis_hash'] - - @pytest.mark.xfail # FIXME - def test_olympic(): - check_genesis('olympic') - - - def test_frontier(): - check_genesis('frontier') - - - if __name__ == '__main__': - test_genesis() -
Fix & cleanup profile genesis tests
## Code Before: import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES def check_genesis(profile): config = dict(eth=dict()) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) print config['eth'].keys() bc = config['eth']['block'] print bc.keys() env = Env(DB(), bc) genesis = blocks.genesis(env) print 'genesis.hash', genesis.hash.encode('hex') print 'expected', config['eth']['genesis_hash'] assert genesis.hash == config['eth']['genesis_hash'].decode('hex') @pytest.mark.xfail # FIXME def test_olympic(): check_genesis('olympic') def test_frontier(): check_genesis('frontier') if __name__ == '__main__': test_genesis() ## Instruction: Fix & cleanup profile genesis tests ## Code After: from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES @pytest.mark.parametrize('profile', PROFILES.keys()) def test_profile(profile): config = dict(eth=dict()) konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) bc = config['eth']['block'] pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) assert genesis.hash.encode('hex') == config['eth']['genesis_hash']
+ from pprint import pprint import pytest from ethereum import blocks from ethereum.db import DB from ethereum.config import Env from pyethapp.utils import merge_dict from pyethapp.utils import update_config_from_genesis_json import pyethapp.config as konfig from pyethapp.profiles import PROFILES - def check_genesis(profile): + @pytest.mark.parametrize('profile', PROFILES.keys()) + def test_profile(profile): config = dict(eth=dict()) + + konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) # Set config values based on profile selection merge_dict(config, PROFILES[profile]) # Load genesis config update_config_from_genesis_json(config, config['eth']['genesis']) - konfig.update_config_with_defaults(config, {'eth': {'block': blocks.default_config}}) - - print config['eth'].keys() bc = config['eth']['block'] - print bc.keys() + pprint(bc) env = Env(DB(), bc) genesis = blocks.genesis(env) - print 'genesis.hash', genesis.hash.encode('hex') - print 'expected', config['eth']['genesis_hash'] - assert genesis.hash == config['eth']['genesis_hash'].decode('hex') ? -------------- + assert genesis.hash.encode('hex') == config['eth']['genesis_hash'] ? ++++++++++++++ - - - @pytest.mark.xfail # FIXME - def test_olympic(): - check_genesis('olympic') - - - def test_frontier(): - check_genesis('frontier') - - - if __name__ == '__main__': - test_genesis()
fb8fb61303dd567038ca812a61e6702b8b3f4edc
tests/test_exceptions.py
tests/test_exceptions.py
from cookiecutter import exceptions def test_undefined_variable_to_str(): undefined_var_error = exceptions.UndefinedVariableInTemplate( 'Beautiful is better than ugly', exceptions.CookiecutterException('Errors should never pass silently'), {'cookiecutter': {'foo': 'bar'}} ) expected_str = ( "Beautiful is better than ugly. " "Error message: Errors should never pass silently. " "Context: {'cookiecutter': {'foo': 'bar'}}" ) assert str(undefined_var_error) == expected_str
from jinja2.exceptions import UndefinedError from cookiecutter import exceptions def test_undefined_variable_to_str(): undefined_var_error = exceptions.UndefinedVariableInTemplate( 'Beautiful is better than ugly', UndefinedError('Errors should never pass silently'), {'cookiecutter': {'foo': 'bar'}} ) expected_str = ( "Beautiful is better than ugly. " "Error message: Errors should never pass silently. " "Context: {'cookiecutter': {'foo': 'bar'}}" ) assert str(undefined_var_error) == expected_str
Create a jinja2 error in the test to ensure it has a message attribute
Create a jinja2 error in the test to ensure it has a message attribute
Python
bsd-3-clause
hackebrot/cookiecutter,dajose/cookiecutter,dajose/cookiecutter,willingc/cookiecutter,audreyr/cookiecutter,Springerle/cookiecutter,terryjbates/cookiecutter,hackebrot/cookiecutter,Springerle/cookiecutter,willingc/cookiecutter,pjbull/cookiecutter,stevepiercy/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,audreyr/cookiecutter,michaeljoseph/cookiecutter,luzfcb/cookiecutter,pjbull/cookiecutter,terryjbates/cookiecutter,stevepiercy/cookiecutter
+ + from jinja2.exceptions import UndefinedError from cookiecutter import exceptions def test_undefined_variable_to_str(): undefined_var_error = exceptions.UndefinedVariableInTemplate( 'Beautiful is better than ugly', - exceptions.CookiecutterException('Errors should never pass silently'), + UndefinedError('Errors should never pass silently'), {'cookiecutter': {'foo': 'bar'}} ) expected_str = ( "Beautiful is better than ugly. " "Error message: Errors should never pass silently. " "Context: {'cookiecutter': {'foo': 'bar'}}" ) assert str(undefined_var_error) == expected_str
Create a jinja2 error in the test to ensure it has a message attribute
## Code Before: from cookiecutter import exceptions def test_undefined_variable_to_str(): undefined_var_error = exceptions.UndefinedVariableInTemplate( 'Beautiful is better than ugly', exceptions.CookiecutterException('Errors should never pass silently'), {'cookiecutter': {'foo': 'bar'}} ) expected_str = ( "Beautiful is better than ugly. " "Error message: Errors should never pass silently. " "Context: {'cookiecutter': {'foo': 'bar'}}" ) assert str(undefined_var_error) == expected_str ## Instruction: Create a jinja2 error in the test to ensure it has a message attribute ## Code After: from jinja2.exceptions import UndefinedError from cookiecutter import exceptions def test_undefined_variable_to_str(): undefined_var_error = exceptions.UndefinedVariableInTemplate( 'Beautiful is better than ugly', UndefinedError('Errors should never pass silently'), {'cookiecutter': {'foo': 'bar'}} ) expected_str = ( "Beautiful is better than ugly. " "Error message: Errors should never pass silently. " "Context: {'cookiecutter': {'foo': 'bar'}}" ) assert str(undefined_var_error) == expected_str
+ + from jinja2.exceptions import UndefinedError from cookiecutter import exceptions def test_undefined_variable_to_str(): undefined_var_error = exceptions.UndefinedVariableInTemplate( 'Beautiful is better than ugly', - exceptions.CookiecutterException('Errors should never pass silently'), + UndefinedError('Errors should never pass silently'), {'cookiecutter': {'foo': 'bar'}} ) expected_str = ( "Beautiful is better than ugly. " "Error message: Errors should never pass silently. " "Context: {'cookiecutter': {'foo': 'bar'}}" ) assert str(undefined_var_error) == expected_str
c02c3f4603c967c4e8df8314bfe0f4759cb0bca4
openprescribing/manage.py
openprescribing/manage.py
import os import sys import dotenv if __name__ == "__main__": # We can't do read_dotenv('../environment') because that assumes that when # manage.py we are in its current directory, which isn't the case for cron # jobs. env_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'environment' ) dotenv.read_dotenv(env_path, override=True) if len(sys.argv) > 1 and sys.argv[1] in ['test', 'pipeline_e2e_tests']: os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
import os import sys import dotenv if __name__ == "__main__": # We can't do read_dotenv('../environment') because that assumes that when # manage.py we are in its current directory, which isn't the case for cron # jobs. env_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'environment' ) dotenv.read_dotenv(env_path, override=True) if len(sys.argv) > 1: if sys.argv[1] == 'test': os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" elif sys.argv[1] == 'pipeline_e2e_tests': os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.e2etest" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Set settings for e2e tests correctly
Set settings for e2e tests correctly
Python
mit
annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing
import os import sys import dotenv if __name__ == "__main__": # We can't do read_dotenv('../environment') because that assumes that when # manage.py we are in its current directory, which isn't the case for cron # jobs. env_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'environment' ) dotenv.read_dotenv(env_path, override=True) - if len(sys.argv) > 1 and sys.argv[1] in ['test', 'pipeline_e2e_tests']: + if len(sys.argv) > 1: + if sys.argv[1] == 'test': - os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" + os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" + elif sys.argv[1] == 'pipeline_e2e_tests': + os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.e2etest" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
Set settings for e2e tests correctly
## Code Before: import os import sys import dotenv if __name__ == "__main__": # We can't do read_dotenv('../environment') because that assumes that when # manage.py we are in its current directory, which isn't the case for cron # jobs. env_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'environment' ) dotenv.read_dotenv(env_path, override=True) if len(sys.argv) > 1 and sys.argv[1] in ['test', 'pipeline_e2e_tests']: os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv) ## Instruction: Set settings for e2e tests correctly ## Code After: import os import sys import dotenv if __name__ == "__main__": # We can't do read_dotenv('../environment') because that assumes that when # manage.py we are in its current directory, which isn't the case for cron # jobs. env_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'environment' ) dotenv.read_dotenv(env_path, override=True) if len(sys.argv) > 1: if sys.argv[1] == 'test': os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" elif sys.argv[1] == 'pipeline_e2e_tests': os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.e2etest" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
import os import sys import dotenv if __name__ == "__main__": # We can't do read_dotenv('../environment') because that assumes that when # manage.py we are in its current directory, which isn't the case for cron # jobs. env_path = os.path.join( os.path.dirname(os.path.realpath(__file__)), '..', 'environment' ) dotenv.read_dotenv(env_path, override=True) - if len(sys.argv) > 1 and sys.argv[1] in ['test', 'pipeline_e2e_tests']: + if len(sys.argv) > 1: + if sys.argv[1] == 'test': - os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" + os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.test" ? ++++ + elif sys.argv[1] == 'pipeline_e2e_tests': + os.environ["DJANGO_SETTINGS_MODULE"] = "openprescribing.settings.e2etest" from django.core.management import execute_from_command_line execute_from_command_line(sys.argv)
96871ef8de84653406749a2e503ef4b4fb800b2f
src/mist/io/tests/features/steps/shell.py
src/mist/io/tests/features/steps/shell.py
@when(u'I type the "{command}" shell command') def shell_command(context, command): shell_input = context.browser.find_by_css('#shell-input input') shell_input.type(command) shell_enter = context.browser.find_by_css('#shell-submit .ui-btn') shell_enter.click() @then(u'I should see the "{command}" result in shell output') def shell_output(context, command): shell_output = context.browser.find_by_css('#shell-response .ui-btn') for output in shell_output: if command in output.text: return assert False, u'Could not find the output of %s command' % command
@when(u'I type the "{command}" shell command') def shell_command(context, command): shell_input = context.browser.find_by_css('#shell-input input') shell_input.type(command) shell_enter = context.browser.find_by_css('#shell-submit .ui-btn') shell_enter.click() @then(u'I should see the "{command}" result in shell output') def shell_output(context, command): shell_output = context.browser.find_by_css('#shell-return h3') for output in shell_output: if command in output.text: return assert False, u'Could not find the output of %s command' % command
Fix Shell tests according to css changes
Fix Shell tests according to css changes
Python
agpl-3.0
kelonye/mist.io,Lao-liu/mist.io,DimensionDataCBUSydney/mist.io,Lao-liu/mist.io,johnnyWalnut/mist.io,DimensionDataCBUSydney/mist.io,zBMNForks/mist.io,munkiat/mist.io,Lao-liu/mist.io,afivos/mist.io,munkiat/mist.io,munkiat/mist.io,afivos/mist.io,johnnyWalnut/mist.io,Lao-liu/mist.io,munkiat/mist.io,DimensionDataCBUSydney/mist.io,DimensionDataCBUSydney/mist.io,zBMNForks/mist.io,zBMNForks/mist.io,johnnyWalnut/mist.io,afivos/mist.io,kelonye/mist.io,kelonye/mist.io
@when(u'I type the "{command}" shell command') def shell_command(context, command): shell_input = context.browser.find_by_css('#shell-input input') shell_input.type(command) shell_enter = context.browser.find_by_css('#shell-submit .ui-btn') shell_enter.click() @then(u'I should see the "{command}" result in shell output') def shell_output(context, command): - shell_output = context.browser.find_by_css('#shell-response .ui-btn') + shell_output = context.browser.find_by_css('#shell-return h3') for output in shell_output: if command in output.text: return assert False, u'Could not find the output of %s command' % command
Fix Shell tests according to css changes
## Code Before: @when(u'I type the "{command}" shell command') def shell_command(context, command): shell_input = context.browser.find_by_css('#shell-input input') shell_input.type(command) shell_enter = context.browser.find_by_css('#shell-submit .ui-btn') shell_enter.click() @then(u'I should see the "{command}" result in shell output') def shell_output(context, command): shell_output = context.browser.find_by_css('#shell-response .ui-btn') for output in shell_output: if command in output.text: return assert False, u'Could not find the output of %s command' % command ## Instruction: Fix Shell tests according to css changes ## Code After: @when(u'I type the "{command}" shell command') def shell_command(context, command): shell_input = context.browser.find_by_css('#shell-input input') shell_input.type(command) shell_enter = context.browser.find_by_css('#shell-submit .ui-btn') shell_enter.click() @then(u'I should see the "{command}" result in shell output') def shell_output(context, command): shell_output = context.browser.find_by_css('#shell-return h3') for output in shell_output: if command in output.text: return assert False, u'Could not find the output of %s command' % command
@when(u'I type the "{command}" shell command') def shell_command(context, command): shell_input = context.browser.find_by_css('#shell-input input') shell_input.type(command) shell_enter = context.browser.find_by_css('#shell-submit .ui-btn') shell_enter.click() @then(u'I should see the "{command}" result in shell output') def shell_output(context, command): - shell_output = context.browser.find_by_css('#shell-response .ui-btn') ? ^^^ -- ^^^^^^^ + shell_output = context.browser.find_by_css('#shell-return h3') ? ^^^ ^^ for output in shell_output: if command in output.text: return assert False, u'Could not find the output of %s command' % command
da3995150d6eacf7695c4606e83c24c82a17546d
autogenerate_config_docs/hooks.py
autogenerate_config_docs/hooks.py
def keystone_config(): from keystone.common import config config.configure() def glance_store_config(): try: import glance_store from oslo.config import cfg glance_store.backend.register_opts(cfg.CONF) except ImportError: # glance_store is not available before Juno pass HOOKS = {'keystone.common.config': keystone_config, 'glance.common.config': glance_store_config}
def keystone_config(): from keystone.common import config config.configure() def glance_store_config(): try: import glance_store from oslo.config import cfg glance_store.backend.register_opts(cfg.CONF) except ImportError: # glance_store is not available before Juno pass def nova_spice(): import nova.cmd.spicehtml5proxy # noqa HOOKS = {'keystone.common.config': keystone_config, 'glance.common.config': glance_store_config, 'nova.spice': nova_spice}
Add a hook for nova.cmd.spicehtml5proxy
Add a hook for nova.cmd.spicehtml5proxy The cmd/ folders are excluded from the autohelp imports to avoid ending up with a bunch of CLI options. nova/cmd/spicehtml5proxy.py holds real configuration options and needs to be imported. Change-Id: Ic0f8066332a45cae253ad3e03f4717f1887e16ee Partial-Bug: #1394595
Python
apache-2.0
openstack/openstack-doc-tools,savinash47/openstack-doc-tools,savinash47/openstack-doc-tools,savinash47/openstack-doc-tools,openstack/openstack-doc-tools
def keystone_config(): from keystone.common import config config.configure() def glance_store_config(): try: import glance_store from oslo.config import cfg glance_store.backend.register_opts(cfg.CONF) except ImportError: # glance_store is not available before Juno pass + def nova_spice(): + import nova.cmd.spicehtml5proxy # noqa + + HOOKS = {'keystone.common.config': keystone_config, - 'glance.common.config': glance_store_config} + 'glance.common.config': glance_store_config, + 'nova.spice': nova_spice}
Add a hook for nova.cmd.spicehtml5proxy
## Code Before: def keystone_config(): from keystone.common import config config.configure() def glance_store_config(): try: import glance_store from oslo.config import cfg glance_store.backend.register_opts(cfg.CONF) except ImportError: # glance_store is not available before Juno pass HOOKS = {'keystone.common.config': keystone_config, 'glance.common.config': glance_store_config} ## Instruction: Add a hook for nova.cmd.spicehtml5proxy ## Code After: def keystone_config(): from keystone.common import config config.configure() def glance_store_config(): try: import glance_store from oslo.config import cfg glance_store.backend.register_opts(cfg.CONF) except ImportError: # glance_store is not available before Juno pass def nova_spice(): import nova.cmd.spicehtml5proxy # noqa HOOKS = {'keystone.common.config': keystone_config, 'glance.common.config': glance_store_config, 'nova.spice': nova_spice}
def keystone_config(): from keystone.common import config config.configure() def glance_store_config(): try: import glance_store from oslo.config import cfg glance_store.backend.register_opts(cfg.CONF) except ImportError: # glance_store is not available before Juno pass + def nova_spice(): + import nova.cmd.spicehtml5proxy # noqa + + HOOKS = {'keystone.common.config': keystone_config, - 'glance.common.config': glance_store_config} ? ^ + 'glance.common.config': glance_store_config, ? ^ + 'nova.spice': nova_spice}
d0bf235af3742a17c722488fe3679d5b73a0d945
thinc/neural/_classes/softmax.py
thinc/neural/_classes/softmax.py
from .affine import Affine from ... import describe from ...describe import Dimension, Synapses, Biases from ...check import has_shape from ... import check @describe.attributes( W=Synapses("Weights matrix", lambda obj: (obj.nO, obj.nI), lambda W, ops: None) ) class Softmax(Affine): name = 'softmax' @check.arg(1, has_shape(('nB', 'nI'))) def predict(self, input__BI): output__BO = self.ops.affine(self.W, self.b, input__BI) output__BO = self.ops.softmax(output__BO, inplace=False) return output__BO @check.arg(1, has_shape(('nB', 'nI'))) def begin_update(self, input__BI, drop=0.): output__BO = self.predict(input__BI) @check.arg(0, has_shape(('nB', 'nO'))) def finish_update(grad__BO, sgd=None): self.d_W += self.ops.batch_outer(grad__BO, input__BI) self.d_b += grad__BO.sum(axis=0) grad__BI = self.ops.dot(grad__BO, self.W) if sgd is not None: sgd(self._mem.weights, self._mem.gradient, key=self.id) return grad__BI return output__BO, finish_update
from .affine import Affine from ... import describe from ...describe import Dimension, Synapses, Biases from ...check import has_shape from ... import check @describe.attributes( W=Synapses("Weights matrix", lambda obj: (obj.nO, obj.nI), lambda W, ops: None) ) class Softmax(Affine): name = 'softmax' @check.arg(1, has_shape(('nB', 'nI'))) def predict(self, input__BI): output__BO = self.ops.affine(self.W, self.b, input__BI) output__BO = self.ops.softmax(output__BO, inplace=False) return output__BO @check.arg(1, has_shape(('nB', 'nI'))) def begin_update(self, input__BI, drop=0.): output__BO = self.predict(input__BI) @check.arg(0, has_shape(('nB', 'nO'))) def finish_update(grad__BO, sgd=None): self.d_W += self.ops.gemm(grad__BO, input__BI, trans1=True) self.d_b += grad__BO.sum(axis=0) grad__BI = self.ops.gemm(grad__BO, self.W) if sgd is not None: sgd(self._mem.weights, self._mem.gradient, key=self.id) return grad__BI return output__BO, finish_update
Fix gemm calls in Softmax
Fix gemm calls in Softmax
Python
mit
spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc
from .affine import Affine from ... import describe from ...describe import Dimension, Synapses, Biases from ...check import has_shape from ... import check @describe.attributes( W=Synapses("Weights matrix", lambda obj: (obj.nO, obj.nI), lambda W, ops: None) ) class Softmax(Affine): name = 'softmax' @check.arg(1, has_shape(('nB', 'nI'))) def predict(self, input__BI): output__BO = self.ops.affine(self.W, self.b, input__BI) output__BO = self.ops.softmax(output__BO, inplace=False) return output__BO @check.arg(1, has_shape(('nB', 'nI'))) def begin_update(self, input__BI, drop=0.): output__BO = self.predict(input__BI) @check.arg(0, has_shape(('nB', 'nO'))) def finish_update(grad__BO, sgd=None): - self.d_W += self.ops.batch_outer(grad__BO, input__BI) + self.d_W += self.ops.gemm(grad__BO, input__BI, trans1=True) self.d_b += grad__BO.sum(axis=0) - grad__BI = self.ops.dot(grad__BO, self.W) + grad__BI = self.ops.gemm(grad__BO, self.W) if sgd is not None: sgd(self._mem.weights, self._mem.gradient, key=self.id) return grad__BI return output__BO, finish_update
Fix gemm calls in Softmax
## Code Before: from .affine import Affine from ... import describe from ...describe import Dimension, Synapses, Biases from ...check import has_shape from ... import check @describe.attributes( W=Synapses("Weights matrix", lambda obj: (obj.nO, obj.nI), lambda W, ops: None) ) class Softmax(Affine): name = 'softmax' @check.arg(1, has_shape(('nB', 'nI'))) def predict(self, input__BI): output__BO = self.ops.affine(self.W, self.b, input__BI) output__BO = self.ops.softmax(output__BO, inplace=False) return output__BO @check.arg(1, has_shape(('nB', 'nI'))) def begin_update(self, input__BI, drop=0.): output__BO = self.predict(input__BI) @check.arg(0, has_shape(('nB', 'nO'))) def finish_update(grad__BO, sgd=None): self.d_W += self.ops.batch_outer(grad__BO, input__BI) self.d_b += grad__BO.sum(axis=0) grad__BI = self.ops.dot(grad__BO, self.W) if sgd is not None: sgd(self._mem.weights, self._mem.gradient, key=self.id) return grad__BI return output__BO, finish_update ## Instruction: Fix gemm calls in Softmax ## Code After: from .affine import Affine from ... import describe from ...describe import Dimension, Synapses, Biases from ...check import has_shape from ... import check @describe.attributes( W=Synapses("Weights matrix", lambda obj: (obj.nO, obj.nI), lambda W, ops: None) ) class Softmax(Affine): name = 'softmax' @check.arg(1, has_shape(('nB', 'nI'))) def predict(self, input__BI): output__BO = self.ops.affine(self.W, self.b, input__BI) output__BO = self.ops.softmax(output__BO, inplace=False) return output__BO @check.arg(1, has_shape(('nB', 'nI'))) def begin_update(self, input__BI, drop=0.): output__BO = self.predict(input__BI) @check.arg(0, has_shape(('nB', 'nO'))) def finish_update(grad__BO, sgd=None): self.d_W += self.ops.gemm(grad__BO, input__BI, trans1=True) self.d_b += grad__BO.sum(axis=0) grad__BI = self.ops.gemm(grad__BO, self.W) if sgd is not None: sgd(self._mem.weights, self._mem.gradient, key=self.id) return grad__BI return output__BO, finish_update
from .affine import Affine from ... import describe from ...describe import Dimension, Synapses, Biases from ...check import has_shape from ... import check @describe.attributes( W=Synapses("Weights matrix", lambda obj: (obj.nO, obj.nI), lambda W, ops: None) ) class Softmax(Affine): name = 'softmax' @check.arg(1, has_shape(('nB', 'nI'))) def predict(self, input__BI): output__BO = self.ops.affine(self.W, self.b, input__BI) output__BO = self.ops.softmax(output__BO, inplace=False) return output__BO @check.arg(1, has_shape(('nB', 'nI'))) def begin_update(self, input__BI, drop=0.): output__BO = self.predict(input__BI) @check.arg(0, has_shape(('nB', 'nO'))) def finish_update(grad__BO, sgd=None): - self.d_W += self.ops.batch_outer(grad__BO, input__BI) ? ^^^^^^^^^ ^ + self.d_W += self.ops.gemm(grad__BO, input__BI, trans1=True) ? ^ ^^ +++++++++++++ self.d_b += grad__BO.sum(axis=0) - grad__BI = self.ops.dot(grad__BO, self.W) ? ^^^ + grad__BI = self.ops.gemm(grad__BO, self.W) ? ^^^^ if sgd is not None: sgd(self._mem.weights, self._mem.gradient, key=self.id) return grad__BI return output__BO, finish_update
624276b80b6d69b788b2f48691941cd89847237b
software/Pi/ui.py
software/Pi/ui.py
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
Disable warnings for GPIO channels...
Disable warnings for GPIO channels...
Python
mit
AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking,AdlerFarHorizons/eclipse-tracking
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) + gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
Disable warnings for GPIO channels...
## Code Before: import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5) ## Instruction: Disable warnings for GPIO channels... ## Code After: import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
import RPi.GPIO as gpio import time ledPin = 16 #GPIO23 #Set up RPi GPIO def setup(): gpio.setmode(gpio.BOARD) + gpio.setwarnings(False) gpio.setup(ledPin, gpio.OUT) def blink(n): for i in range(0, n): gpio.output(ledPin, True) time.sleep(0.5) gpio.output(ledPin, False) time.sleep(0.5)
faac7b98d3270267b731c97aa0318d532f75610c
dash_table/__init__.py
dash_table/__init__.py
from dash.dash_table import * # noqa: F401, F403, E402 import warnings warnings.warn( """ The dash_table package is deprecated. Please replace `import dash_table` with `from dash import dash_table`""", stacklevel=2, )
from dash.dash_table import * # noqa: F401, F403, E402 import warnings warnings.warn( """ The dash_table package is deprecated. Please replace `import dash_table` with `from dash import dash_table` Also, if you're using any of the table format helpers (e.g. Group), replace `from dash_table.Format import Group` with `from dash.dash_table.Format import Group`""", stacklevel=2, )
Add info on table format helpers to warning message
Add info on table format helpers to warning message
Python
mit
plotly/dash-table,plotly/dash-table,plotly/dash-table
from dash.dash_table import * # noqa: F401, F403, E402 import warnings warnings.warn( """ The dash_table package is deprecated. Please replace - `import dash_table` with `from dash import dash_table`""", + `import dash_table` with `from dash import dash_table` + + Also, if you're using any of the table format helpers (e.g. Group), replace + `from dash_table.Format import Group` with + `from dash.dash_table.Format import Group`""", stacklevel=2, )
Add info on table format helpers to warning message
## Code Before: from dash.dash_table import * # noqa: F401, F403, E402 import warnings warnings.warn( """ The dash_table package is deprecated. Please replace `import dash_table` with `from dash import dash_table`""", stacklevel=2, ) ## Instruction: Add info on table format helpers to warning message ## Code After: from dash.dash_table import * # noqa: F401, F403, E402 import warnings warnings.warn( """ The dash_table package is deprecated. Please replace `import dash_table` with `from dash import dash_table` Also, if you're using any of the table format helpers (e.g. Group), replace `from dash_table.Format import Group` with `from dash.dash_table.Format import Group`""", stacklevel=2, )
from dash.dash_table import * # noqa: F401, F403, E402 import warnings warnings.warn( """ The dash_table package is deprecated. Please replace - `import dash_table` with `from dash import dash_table`""", ? ---- + `import dash_table` with `from dash import dash_table` + + Also, if you're using any of the table format helpers (e.g. Group), replace + `from dash_table.Format import Group` with + `from dash.dash_table.Format import Group`""", stacklevel=2, )
1b44849f9fac68c6ce0732baded63681cbf58ccb
osmaxx-py/osmaxx/contrib/auth/tests/test_frontend_permissions.py
osmaxx-py/osmaxx/contrib/auth/tests/test_frontend_permissions.py
from django.test import TestCase from django.contrib.auth.models import User, Group from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP class TestFrontendPermissions(TestCase): def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self): an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password') self.assertTrue(user_in_osmaxx_group(an_admin)) def test_user_can_access_frontend_when_in_osmaxx_group(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP)) self.assertTrue(user_in_osmaxx_group(a_user))
from django.test import TestCase from django.contrib.auth.models import User, Group from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP class TestFrontendPermissions(TestCase): def test_user_can_not_access_frontend_by_default(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') self.assertFalse(user_in_osmaxx_group(a_user)) def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self): an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password') self.assertTrue(user_in_osmaxx_group(an_admin)) def test_user_can_access_frontend_when_in_osmaxx_group(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP)) self.assertTrue(user_in_osmaxx_group(a_user))
Test that users can not access frontend by default
Test that users can not access frontend by default
Python
mit
geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/osmaxx
from django.test import TestCase from django.contrib.auth.models import User, Group from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP class TestFrontendPermissions(TestCase): + def test_user_can_not_access_frontend_by_default(self): + a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') + self.assertFalse(user_in_osmaxx_group(a_user)) + def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self): an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password') self.assertTrue(user_in_osmaxx_group(an_admin)) def test_user_can_access_frontend_when_in_osmaxx_group(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP)) self.assertTrue(user_in_osmaxx_group(a_user))
Test that users can not access frontend by default
## Code Before: from django.test import TestCase from django.contrib.auth.models import User, Group from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP class TestFrontendPermissions(TestCase): def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self): an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password') self.assertTrue(user_in_osmaxx_group(an_admin)) def test_user_can_access_frontend_when_in_osmaxx_group(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP)) self.assertTrue(user_in_osmaxx_group(a_user)) ## Instruction: Test that users can not access frontend by default ## Code After: from django.test import TestCase from django.contrib.auth.models import User, Group from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP class TestFrontendPermissions(TestCase): def test_user_can_not_access_frontend_by_default(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') self.assertFalse(user_in_osmaxx_group(a_user)) def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self): an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password') self.assertTrue(user_in_osmaxx_group(an_admin)) def test_user_can_access_frontend_when_in_osmaxx_group(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP)) self.assertTrue(user_in_osmaxx_group(a_user))
from django.test import TestCase from django.contrib.auth.models import User, Group from osmaxx.contrib.auth.frontend_permissions import user_in_osmaxx_group, FRONTEND_USER_GROUP class TestFrontendPermissions(TestCase): + def test_user_can_not_access_frontend_by_default(self): + a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') + self.assertFalse(user_in_osmaxx_group(a_user)) + def test_superuser_can_access_frontend_even_if_not_in_osmaxx_group(self): an_admin = User.objects.create_superuser('A. D. Min', 'admin@example.com', 'password') self.assertTrue(user_in_osmaxx_group(an_admin)) def test_user_can_access_frontend_when_in_osmaxx_group(self): a_user = User.objects.create_user('U. Ser', 'user@example.com', 'password') a_user.groups.add(Group.objects.get(name=FRONTEND_USER_GROUP)) self.assertTrue(user_in_osmaxx_group(a_user))
7588bab65a098cbc0b5e2ba2c1b9a45b08adfc46
fsspec/__init__.py
fsspec/__init__.py
try: from importlib.metadata import entry_points except ImportError: # python < 3.8 try: from importlib_metadata import entry_points except ImportError: entry_points = None from . import caching from ._version import get_versions from .core import get_fs_token_paths, open, open_files, open_local from .mapping import FSMap, get_mapper from .registry import ( filesystem, get_filesystem_class, register_implementation, registry, ) from .spec import AbstractFileSystem __version__ = get_versions()["version"] del get_versions __all__ = [ "AbstractFileSystem", "FSMap", "filesystem", "register_implementation", "get_filesystem_class", "get_fs_token_paths", "get_mapper", "open", "open_files", "open_local", "registry", "caching", ] if entry_points is not None: try: entry_points = entry_points() except TypeError: pass # importlib-metadata < 0.8 else: for spec in entry_points.get("fsspec.specs", []): err_msg = f"Unable to load filesystem from {spec}" register_implementation( spec.name, spec.value.replace(":", "."), errtxt=err_msg )
try: from importlib.metadata import entry_points except ImportError: # python < 3.8 try: from importlib_metadata import entry_points except ImportError: entry_points = None from . import caching from ._version import get_versions from .core import get_fs_token_paths, open, open_files, open_local from .exceptions import FSBaseException, FSTimeoutError from .mapping import FSMap, get_mapper from .registry import ( filesystem, get_filesystem_class, register_implementation, registry, ) from .spec import AbstractFileSystem __version__ = get_versions()["version"] del get_versions __all__ = [ "AbstractFileSystem", "FSBaseException", "FSTimeoutError", "FSMap", "filesystem", "register_implementation", "get_filesystem_class", "get_fs_token_paths", "get_mapper", "open", "open_files", "open_local", "registry", "caching", ] if entry_points is not None: try: entry_points = entry_points() except TypeError: pass # importlib-metadata < 0.8 else: for spec in entry_points.get("fsspec.specs", []): err_msg = f"Unable to load filesystem from {spec}" register_implementation( spec.name, spec.value.replace(":", "."), errtxt=err_msg )
Make the exceptions valid on fsspec module level
Make the exceptions valid on fsspec module level
Python
bsd-3-clause
intake/filesystem_spec,fsspec/filesystem_spec,fsspec/filesystem_spec
try: from importlib.metadata import entry_points except ImportError: # python < 3.8 try: from importlib_metadata import entry_points except ImportError: entry_points = None from . import caching from ._version import get_versions from .core import get_fs_token_paths, open, open_files, open_local + from .exceptions import FSBaseException, FSTimeoutError from .mapping import FSMap, get_mapper from .registry import ( filesystem, get_filesystem_class, register_implementation, registry, ) from .spec import AbstractFileSystem __version__ = get_versions()["version"] del get_versions __all__ = [ "AbstractFileSystem", + "FSBaseException", + "FSTimeoutError", "FSMap", "filesystem", "register_implementation", "get_filesystem_class", "get_fs_token_paths", "get_mapper", "open", "open_files", "open_local", "registry", "caching", ] if entry_points is not None: try: entry_points = entry_points() except TypeError: pass # importlib-metadata < 0.8 else: for spec in entry_points.get("fsspec.specs", []): err_msg = f"Unable to load filesystem from {spec}" register_implementation( spec.name, spec.value.replace(":", "."), errtxt=err_msg )
Make the exceptions valid on fsspec module level
## Code Before: try: from importlib.metadata import entry_points except ImportError: # python < 3.8 try: from importlib_metadata import entry_points except ImportError: entry_points = None from . import caching from ._version import get_versions from .core import get_fs_token_paths, open, open_files, open_local from .mapping import FSMap, get_mapper from .registry import ( filesystem, get_filesystem_class, register_implementation, registry, ) from .spec import AbstractFileSystem __version__ = get_versions()["version"] del get_versions __all__ = [ "AbstractFileSystem", "FSMap", "filesystem", "register_implementation", "get_filesystem_class", "get_fs_token_paths", "get_mapper", "open", "open_files", "open_local", "registry", "caching", ] if entry_points is not None: try: entry_points = entry_points() except TypeError: pass # importlib-metadata < 0.8 else: for spec in entry_points.get("fsspec.specs", []): err_msg = f"Unable to load filesystem from {spec}" register_implementation( spec.name, spec.value.replace(":", "."), errtxt=err_msg ) ## Instruction: Make the exceptions valid on fsspec module level ## Code After: try: from importlib.metadata import entry_points except ImportError: # python < 3.8 try: from importlib_metadata import entry_points except ImportError: entry_points = None from . import caching from ._version import get_versions from .core import get_fs_token_paths, open, open_files, open_local from .exceptions import FSBaseException, FSTimeoutError from .mapping import FSMap, get_mapper from .registry import ( filesystem, get_filesystem_class, register_implementation, registry, ) from .spec import AbstractFileSystem __version__ = get_versions()["version"] del get_versions __all__ = [ "AbstractFileSystem", "FSBaseException", "FSTimeoutError", "FSMap", "filesystem", "register_implementation", "get_filesystem_class", "get_fs_token_paths", "get_mapper", "open", "open_files", "open_local", "registry", "caching", ] if entry_points is not None: try: entry_points = entry_points() except TypeError: pass # importlib-metadata < 0.8 else: for spec in entry_points.get("fsspec.specs", []): err_msg = f"Unable to load filesystem from {spec}" register_implementation( spec.name, spec.value.replace(":", "."), errtxt=err_msg )
try: from importlib.metadata import entry_points except ImportError: # python < 3.8 try: from importlib_metadata import entry_points except ImportError: entry_points = None from . import caching from ._version import get_versions from .core import get_fs_token_paths, open, open_files, open_local + from .exceptions import FSBaseException, FSTimeoutError from .mapping import FSMap, get_mapper from .registry import ( filesystem, get_filesystem_class, register_implementation, registry, ) from .spec import AbstractFileSystem __version__ = get_versions()["version"] del get_versions __all__ = [ "AbstractFileSystem", + "FSBaseException", + "FSTimeoutError", "FSMap", "filesystem", "register_implementation", "get_filesystem_class", "get_fs_token_paths", "get_mapper", "open", "open_files", "open_local", "registry", "caching", ] if entry_points is not None: try: entry_points = entry_points() except TypeError: pass # importlib-metadata < 0.8 else: for spec in entry_points.get("fsspec.specs", []): err_msg = f"Unable to load filesystem from {spec}" register_implementation( spec.name, spec.value.replace(":", "."), errtxt=err_msg )
ff14a65284603e27cff9628cd8eec0c4cfd8e81d
pale/arguments/url.py
pale/arguments/url.py
from __future__ import absolute_import import string import urlparse from pale.arguments.string import StringArgument from pale.errors import ArgumentError class URLArgument(StringArgument): def validate_url(self, original_string): """Returns the original string if it was valid, raises an argument error if it's not. """ # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse.urlparse(original_string) try: assert all([pieces.scheme, pieces.netloc]) valid_chars = set(string.letters + string.digits + ":-_.") assert set(pieces.netloc) <= valid_chars assert pieces.scheme in ['http', 'https'] except AssertionError as e: raise ArgumentError(self.item_name, "The input you've provided is not a valid URL.") return original_string def validate(self, item, item_name): self.item_name = item_name item = super(URLArgument, self).validate(item, item_name) if item is not None: item = self.validate_url(item) return item
from __future__ import absolute_import import string import urlparse from pale.arguments.string import StringArgument from pale.errors import ArgumentError class URLArgument(StringArgument): path_only = False def validate_url(self, original_string): """Returns the original string if it was valid, raises an argument error if it's not. """ # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse.urlparse(original_string) try: if self.path_only: assert not any([pieces.scheme, pieces.netloc]) assert pieces.path else: assert all([pieces.scheme, pieces.netloc]) valid_chars = set(string.letters + string.digits + ":-_.") assert set(pieces.netloc) <= valid_chars assert pieces.scheme in ['http', 'https'] except AssertionError as e: raise ArgumentError(self.item_name, "The input you've provided is not a valid URL.") return original_string def validate(self, item, item_name): self.item_name = item_name item = super(URLArgument, self).validate(item, item_name) if item is not None: item = self.validate_url(item) return item
Add path_only support to URLArgument
Add path_only support to URLArgument
Python
mit
Loudr/pale
from __future__ import absolute_import import string import urlparse from pale.arguments.string import StringArgument from pale.errors import ArgumentError + class URLArgument(StringArgument): + + path_only = False def validate_url(self, original_string): """Returns the original string if it was valid, raises an argument error if it's not. """ # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse.urlparse(original_string) try: + if self.path_only: + assert not any([pieces.scheme, pieces.netloc]) + assert pieces.path + else: - assert all([pieces.scheme, pieces.netloc]) + assert all([pieces.scheme, pieces.netloc]) - valid_chars = set(string.letters + string.digits + ":-_.") + valid_chars = set(string.letters + string.digits + ":-_.") - assert set(pieces.netloc) <= valid_chars + assert set(pieces.netloc) <= valid_chars - assert pieces.scheme in ['http', 'https'] + assert pieces.scheme in ['http', 'https'] + except AssertionError as e: raise ArgumentError(self.item_name, "The input you've provided is not a valid URL.") return original_string def validate(self, item, item_name): self.item_name = item_name item = super(URLArgument, self).validate(item, item_name) if item is not None: item = self.validate_url(item) return item -
Add path_only support to URLArgument
## Code Before: from __future__ import absolute_import import string import urlparse from pale.arguments.string import StringArgument from pale.errors import ArgumentError class URLArgument(StringArgument): def validate_url(self, original_string): """Returns the original string if it was valid, raises an argument error if it's not. """ # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse.urlparse(original_string) try: assert all([pieces.scheme, pieces.netloc]) valid_chars = set(string.letters + string.digits + ":-_.") assert set(pieces.netloc) <= valid_chars assert pieces.scheme in ['http', 'https'] except AssertionError as e: raise ArgumentError(self.item_name, "The input you've provided is not a valid URL.") return original_string def validate(self, item, item_name): self.item_name = item_name item = super(URLArgument, self).validate(item, item_name) if item is not None: item = self.validate_url(item) return item ## Instruction: Add path_only support to URLArgument ## Code After: from __future__ import absolute_import import string import urlparse from pale.arguments.string import StringArgument from pale.errors import ArgumentError class URLArgument(StringArgument): path_only = False def validate_url(self, original_string): """Returns the original string if it was valid, raises an argument error if it's not. """ # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse.urlparse(original_string) try: if self.path_only: assert not any([pieces.scheme, pieces.netloc]) assert pieces.path else: assert all([pieces.scheme, pieces.netloc]) valid_chars = set(string.letters + string.digits + ":-_.") assert set(pieces.netloc) <= valid_chars assert pieces.scheme in ['http', 'https'] except AssertionError as e: raise ArgumentError(self.item_name, "The input you've provided is not a valid URL.") return original_string def validate(self, item, item_name): self.item_name = item_name item = super(URLArgument, self).validate(item, item_name) if item is not None: item = self.validate_url(item) return item
from __future__ import absolute_import import string import urlparse from pale.arguments.string import StringArgument from pale.errors import ArgumentError + class URLArgument(StringArgument): + + path_only = False def validate_url(self, original_string): """Returns the original string if it was valid, raises an argument error if it's not. """ # nipped from stack overflow: http://stackoverflow.com/questions/827557/how-do-you-validate-a-url-with-a-regular-expression-in-python # I preferred this to the thorough regex approach for simplicity and # readability pieces = urlparse.urlparse(original_string) try: + if self.path_only: + assert not any([pieces.scheme, pieces.netloc]) + assert pieces.path + else: - assert all([pieces.scheme, pieces.netloc]) + assert all([pieces.scheme, pieces.netloc]) ? ++++ - valid_chars = set(string.letters + string.digits + ":-_.") + valid_chars = set(string.letters + string.digits + ":-_.") ? ++++ - assert set(pieces.netloc) <= valid_chars + assert set(pieces.netloc) <= valid_chars ? ++++ - assert pieces.scheme in ['http', 'https'] + assert pieces.scheme in ['http', 'https'] ? ++++ + except AssertionError as e: raise ArgumentError(self.item_name, "The input you've provided is not a valid URL.") return original_string def validate(self, item, item_name): self.item_name = item_name item = super(URLArgument, self).validate(item, item_name) if item is not None: item = self.validate_url(item) return item -
1be9c51d4029c0fa32f7071072c171db42d21c83
doc-src/index.py
doc-src/index.py
import countershape from countershape import Page, Directory, PythonModule import countershape.grok this.layout = countershape.Layout("_layout.html") this.markdown = "rst" ns.docTitle = "Countershape Manual" ns.docMaintainer = "Aldo Cortesi" ns.docMaintainerEmail = "dev@nullcube.com" ns.copyright = "Copyright Nullcube 2007" ns.head = countershape.template.File(None, "_banner.html") ns.sidebar = countershape.widgets.SiblingPageIndex( '/index.html', exclude=['countershape'] ) ns.parse = countershape.grok.parse("../countershape") pages = [ Page("index.html", "Introduction"), Page("structure.html", "Document Structure"), Page("doc.html", "Documenting Code"), Page("api/apiref.html", "API Reference"), Directory("api"), PythonModule("../countershape", "Source"), Page("admin.html", "Administrivia") ] ns.imgBanner = countershape.html.IMG( src=countershape.model.UrlTo("countershape.png"), width="280", height="77", align="right" )
import countershape from countershape import Page, Directory, PythonModule import countershape.grok this.layout = countershape.Layout("_layout.html") this.markdown = "rst" ns.docTitle = "Countershape Manual" ns.docMaintainer = "Aldo Cortesi" ns.docMaintainerEmail = "dev@nullcube.com" ns.copyright = "Copyright Nullcube 2007" ns.head = countershape.template.File(None, "_banner.html") ns.sidebar = countershape.widgets.SiblingPageIndex( '/index.html', exclude=['countershape'] ) ns.parse = countershape.grok.parse("../countershape") pages = [ Page("index.html", "Introduction"), Page("structure/structure.html", "Document Structure"), Directory("structure"), Page("doc.html", "Documenting Code"), Page("api/apiref.html", "API Reference"), Directory("api"), PythonModule("../countershape", "Source"), Page("admin.html", "Administrivia") ] ns.imgBanner = countershape.html.IMG( src=countershape.model.UrlTo("countershape.png"), width="280", height="77", align="right" )
Move structure to a separate directory
Move structure to a separate directory
Python
mit
mhils/countershape,cortesi/countershape,samtaufa/countershape,mhils/countershape,cortesi/countershape,samtaufa/countershape
import countershape from countershape import Page, Directory, PythonModule import countershape.grok this.layout = countershape.Layout("_layout.html") this.markdown = "rst" ns.docTitle = "Countershape Manual" ns.docMaintainer = "Aldo Cortesi" ns.docMaintainerEmail = "dev@nullcube.com" ns.copyright = "Copyright Nullcube 2007" ns.head = countershape.template.File(None, "_banner.html") ns.sidebar = countershape.widgets.SiblingPageIndex( '/index.html', exclude=['countershape'] ) ns.parse = countershape.grok.parse("../countershape") pages = [ Page("index.html", "Introduction"), - Page("structure.html", "Document Structure"), + Page("structure/structure.html", "Document Structure"), + Directory("structure"), Page("doc.html", "Documenting Code"), Page("api/apiref.html", "API Reference"), Directory("api"), PythonModule("../countershape", "Source"), Page("admin.html", "Administrivia") ] ns.imgBanner = countershape.html.IMG( src=countershape.model.UrlTo("countershape.png"), width="280", height="77", align="right" )
Move structure to a separate directory
## Code Before: import countershape from countershape import Page, Directory, PythonModule import countershape.grok this.layout = countershape.Layout("_layout.html") this.markdown = "rst" ns.docTitle = "Countershape Manual" ns.docMaintainer = "Aldo Cortesi" ns.docMaintainerEmail = "dev@nullcube.com" ns.copyright = "Copyright Nullcube 2007" ns.head = countershape.template.File(None, "_banner.html") ns.sidebar = countershape.widgets.SiblingPageIndex( '/index.html', exclude=['countershape'] ) ns.parse = countershape.grok.parse("../countershape") pages = [ Page("index.html", "Introduction"), Page("structure.html", "Document Structure"), Page("doc.html", "Documenting Code"), Page("api/apiref.html", "API Reference"), Directory("api"), PythonModule("../countershape", "Source"), Page("admin.html", "Administrivia") ] ns.imgBanner = countershape.html.IMG( src=countershape.model.UrlTo("countershape.png"), width="280", height="77", align="right" ) ## Instruction: Move structure to a separate directory ## Code After: import countershape from countershape import Page, Directory, PythonModule import countershape.grok this.layout = countershape.Layout("_layout.html") this.markdown = "rst" ns.docTitle = "Countershape Manual" ns.docMaintainer = "Aldo Cortesi" ns.docMaintainerEmail = "dev@nullcube.com" ns.copyright = "Copyright Nullcube 2007" ns.head = countershape.template.File(None, "_banner.html") ns.sidebar = countershape.widgets.SiblingPageIndex( '/index.html', exclude=['countershape'] ) ns.parse = countershape.grok.parse("../countershape") pages = [ Page("index.html", "Introduction"), Page("structure/structure.html", "Document Structure"), Directory("structure"), Page("doc.html", "Documenting Code"), Page("api/apiref.html", "API Reference"), Directory("api"), PythonModule("../countershape", "Source"), Page("admin.html", "Administrivia") ] ns.imgBanner = countershape.html.IMG( src=countershape.model.UrlTo("countershape.png"), width="280", height="77", align="right" )
import countershape from countershape import Page, Directory, PythonModule import countershape.grok this.layout = countershape.Layout("_layout.html") this.markdown = "rst" ns.docTitle = "Countershape Manual" ns.docMaintainer = "Aldo Cortesi" ns.docMaintainerEmail = "dev@nullcube.com" ns.copyright = "Copyright Nullcube 2007" ns.head = countershape.template.File(None, "_banner.html") ns.sidebar = countershape.widgets.SiblingPageIndex( '/index.html', exclude=['countershape'] ) ns.parse = countershape.grok.parse("../countershape") pages = [ Page("index.html", "Introduction"), - Page("structure.html", "Document Structure"), + Page("structure/structure.html", "Document Structure"), ? ++++++++++ + Directory("structure"), Page("doc.html", "Documenting Code"), Page("api/apiref.html", "API Reference"), Directory("api"), PythonModule("../countershape", "Source"), Page("admin.html", "Administrivia") ] ns.imgBanner = countershape.html.IMG( src=countershape.model.UrlTo("countershape.png"), width="280", height="77", align="right" )
83938c9bf7aafc1f7a2a6b9594279600012ee7ef
setup.py
setup.py
import distutils.core import os.path from pypel import __version__ def read(filename): """Small tool function to read file content.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() classifiers = ''' Development Status :: 3 - Alpha Environment :: Console Intended Audience :: End Users/Desktop License :: OSI Approved :: BSD License Operating System :: POSIX Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.2 Programming Language :: Python :: 3.3 Topic :: Office/Business :: Financial '''.strip().splitlines() distutils.core.setup( name = 'pypel', version = __version__, license = 'BSD', description = 'simple tool to manage receipts', long_description = read('README.rst'), classifiers = classifiers, url = 'http://mornie.org/projects/pypel/', author = 'Daniele Tricoli', author_email = 'eriol@mornie.org', packages = ['pypel'], package_dir = dict(pypel='pypel'), scripts = ['bin/pypel'] )
import distutils.core import os.path from pypel import __version__ def read(filename): """Small tool function to read file content.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() classifiers = ''' Development Status :: 3 - Alpha Environment :: Console Intended Audience :: End Users/Desktop License :: OSI Approved :: BSD License Operating System :: POSIX Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.4 Topic :: Office/Business :: Financial '''.strip().splitlines() distutils.core.setup( name = 'pypel', version = __version__, license = 'BSD', description = 'simple tool to manage receipts', long_description = read('README.rst'), classifiers = classifiers, url = 'http://mornie.org/projects/pypel/', author = 'Daniele Tricoli', author_email = 'eriol@mornie.org', packages = ['pypel'], package_dir = dict(pypel='pypel'), scripts = ['bin/pypel'] )
Update Python supported versions classifiers
Update Python supported versions classifiers
Python
bsd-3-clause
eriol/pypel
import distutils.core import os.path from pypel import __version__ def read(filename): """Small tool function to read file content.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() classifiers = ''' Development Status :: 3 - Alpha Environment :: Console Intended Audience :: End Users/Desktop License :: OSI Approved :: BSD License Operating System :: POSIX Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.2 + Programming Language :: Python :: 3.4 - Programming Language :: Python :: 3.3 Topic :: Office/Business :: Financial '''.strip().splitlines() distutils.core.setup( name = 'pypel', version = __version__, license = 'BSD', description = 'simple tool to manage receipts', long_description = read('README.rst'), classifiers = classifiers, url = 'http://mornie.org/projects/pypel/', author = 'Daniele Tricoli', author_email = 'eriol@mornie.org', packages = ['pypel'], package_dir = dict(pypel='pypel'), scripts = ['bin/pypel'] )
Update Python supported versions classifiers
## Code Before: import distutils.core import os.path from pypel import __version__ def read(filename): """Small tool function to read file content.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() classifiers = ''' Development Status :: 3 - Alpha Environment :: Console Intended Audience :: End Users/Desktop License :: OSI Approved :: BSD License Operating System :: POSIX Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.2 Programming Language :: Python :: 3.3 Topic :: Office/Business :: Financial '''.strip().splitlines() distutils.core.setup( name = 'pypel', version = __version__, license = 'BSD', description = 'simple tool to manage receipts', long_description = read('README.rst'), classifiers = classifiers, url = 'http://mornie.org/projects/pypel/', author = 'Daniele Tricoli', author_email = 'eriol@mornie.org', packages = ['pypel'], package_dir = dict(pypel='pypel'), scripts = ['bin/pypel'] ) ## Instruction: Update Python supported versions classifiers ## Code After: import distutils.core import os.path from pypel import __version__ def read(filename): """Small tool function to read file content.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() classifiers = ''' Development Status :: 3 - Alpha Environment :: Console Intended Audience :: End Users/Desktop License :: OSI Approved :: BSD License Operating System :: POSIX Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 Programming Language :: Python :: 3.4 Topic :: Office/Business :: Financial '''.strip().splitlines() distutils.core.setup( name = 'pypel', version = __version__, license = 'BSD', description = 'simple tool to manage receipts', long_description = read('README.rst'), classifiers = classifiers, url = 'http://mornie.org/projects/pypel/', author = 'Daniele Tricoli', author_email = 'eriol@mornie.org', packages = ['pypel'], package_dir = dict(pypel='pypel'), scripts = ['bin/pypel'] )
import distutils.core import os.path from pypel import __version__ def read(filename): """Small tool function to read file content.""" return open(os.path.join(os.path.dirname(__file__), filename)).read() classifiers = ''' Development Status :: 3 - Alpha Environment :: Console Intended Audience :: End Users/Desktop License :: OSI Approved :: BSD License Operating System :: POSIX Programming Language :: Python Programming Language :: Python :: 2 Programming Language :: Python :: 2.7 Programming Language :: Python :: 3 - Programming Language :: Python :: 3.2 ? ^ + Programming Language :: Python :: 3.4 ? ^ - Programming Language :: Python :: 3.3 Topic :: Office/Business :: Financial '''.strip().splitlines() distutils.core.setup( name = 'pypel', version = __version__, license = 'BSD', description = 'simple tool to manage receipts', long_description = read('README.rst'), classifiers = classifiers, url = 'http://mornie.org/projects/pypel/', author = 'Daniele Tricoli', author_email = 'eriol@mornie.org', packages = ['pypel'], package_dir = dict(pypel='pypel'), scripts = ['bin/pypel'] )
edbcfbf83ab79fff7de00c7a6310c9fceb17df91
accelerator/migrations/0099_update_program_model.py
accelerator/migrations/0099_update_program_model.py
from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', field=models.ImageField(null=True, upload_to=''), ), ]
import sorl.thumbnail.fields from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', field=sorl.thumbnail.fields.ImageField( null=True, upload_to='program_images'), ), ]
Fix image field import and migration
[AC-9452] Fix image field import and migration
Python
mit
masschallenge/django-accelerator,masschallenge/django-accelerator
+ import sorl.thumbnail.fields from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', - field=models.ImageField(null=True, upload_to=''), + field=sorl.thumbnail.fields.ImageField( + null=True, + upload_to='program_images'), ), ]
Fix image field import and migration
## Code Before: from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', field=models.ImageField(null=True, upload_to=''), ), ] ## Instruction: Fix image field import and migration ## Code After: import sorl.thumbnail.fields from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', field=sorl.thumbnail.fields.ImageField( null=True, upload_to='program_images'), ), ]
+ import sorl.thumbnail.fields from django.db import ( migrations, models, ) class Migration(migrations.Migration): dependencies = [ ('accelerator', '0098_update_startup_update_20220408_0441'), ] operations = [ migrations.AddField( model_name='program', name='hubspot_url', field=models.URLField(blank=True, null=True), ), migrations.AddField( model_name='program', name='program_image', - field=models.ImageField(null=True, upload_to=''), + field=sorl.thumbnail.fields.ImageField( + null=True, + upload_to='program_images'), ), ]
5e6d62ce7a567282a88530a2db80b775c9c4406e
swf/core.py
swf/core.py
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {k: v for k, v in SETTINGS.iteritems()} settings_.update(kwargs) self.region = (settings_.pop('region', None) or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = boto.swf.connect_to_region(self.region, **settings_) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in ('aws_access_key_id', 'aws_secret_access_key')} self.region = (SETTINGS.get('region') or kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = (kwargs.pop('connection', None) or boto.swf.connect_to_region(self.region, **settings_)) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
Fix ConnectedSWFObject: restrict attributes set by constructor
Fix ConnectedSWFObject: restrict attributes set by constructor - credentials: SETTINGS | kwargs - region: SETTINGS | kwargs | boto.swf.layer1.Layer1.DefaultRegionName - connection: kwargs
Python
mit
botify-labs/python-simple-workflow,botify-labs/python-simple-workflow
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): - settings_ = {k: v for k, v in SETTINGS.iteritems()} - settings_.update(kwargs) + settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in + ('aws_access_key_id', + 'aws_secret_access_key')} - self.region = (settings_.pop('region', None) or + self.region = (SETTINGS.get('region') or + kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) + self.connection = (kwargs.pop('connection', None) or - self.connection = boto.swf.connect_to_region(self.region, **settings_) + boto.swf.connect_to_region(self.region, **settings_)) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
Fix ConnectedSWFObject: restrict attributes set by constructor
## Code Before: import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {k: v for k, v in SETTINGS.iteritems()} settings_.update(kwargs) self.region = (settings_.pop('region', None) or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = boto.swf.connect_to_region(self.region, **settings_) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region)) ## Instruction: Fix ConnectedSWFObject: restrict attributes set by constructor ## Code After: import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in ('aws_access_key_id', 'aws_secret_access_key')} self.region = (SETTINGS.get('region') or kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) self.connection = (kwargs.pop('connection', None) or boto.swf.connect_to_region(self.region, **settings_)) if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
import boto.swf from . import settings SETTINGS = settings.get() class ConnectedSWFObject(object): """Authenticated object interface Provides the instance attributes: - `region`: name of the AWS region - `connection`: to the SWF endpoint (`boto.swf.layer1.Layer1` object): """ __slots__ = [ 'region', 'connection' ] def __init__(self, *args, **kwargs): - settings_ = {k: v for k, v in SETTINGS.iteritems()} - settings_.update(kwargs) + settings_ = {key: SETTINGS.get(key, kwargs.get(key)) for key in + ('aws_access_key_id', + 'aws_secret_access_key')} - self.region = (settings_.pop('region', None) or + self.region = (SETTINGS.get('region') or + kwargs.get('region') or boto.swf.layer1.Layer1.DefaultRegionName) + self.connection = (kwargs.pop('connection', None) or - self.connection = boto.swf.connect_to_region(self.region, **settings_) ? --------------- ^ + boto.swf.connect_to_region(self.region, **settings_)) ? ^^^^^^^^^^^^^^^^^ + if self.connection is None: raise ValueError('invalid region: {}'.format(self.region))
3661edd55553ff2dff27cb102a83d4751e033f2a
painter/management/commands/import_cards.py
painter/management/commands/import_cards.py
import tablib from django.core.management.base import BaseCommand from painter.models import Card class Command(BaseCommand): def handle(self, *args, **options): dataset = tablib.Dataset()
import tablib from django.core.management.base import BaseCommand from painter.models import Card class Command(BaseCommand): help = ('Clears the database of cards, then fills it with the contents of one or' + ' more specified CSV files.') def add_arguments(self, parser): parser.add_argument( 'filenames', nargs='+', type=str, help='One or more CSV file names. The extension is optional.', ) def handle(self, *args, **options): dataset = tablib.Dataset() for filename in options['filenames']: print(filename)
Add help text and a 'filenames' argument.
Add help text and a 'filenames' argument. * Make it print the filenames it's receiving for the sake of good testing output.
Python
mit
adam-incuna/imperial-painter,adam-thomas/imperial-painter,adam-thomas/imperial-painter,adam-incuna/imperial-painter
import tablib from django.core.management.base import BaseCommand from painter.models import Card class Command(BaseCommand): + help = ('Clears the database of cards, then fills it with the contents of one or' + + ' more specified CSV files.') + + def add_arguments(self, parser): + parser.add_argument( + 'filenames', + nargs='+', + type=str, + help='One or more CSV file names. The extension is optional.', + ) + def handle(self, *args, **options): dataset = tablib.Dataset() + for filename in options['filenames']: + print(filename)
Add help text and a 'filenames' argument.
## Code Before: import tablib from django.core.management.base import BaseCommand from painter.models import Card class Command(BaseCommand): def handle(self, *args, **options): dataset = tablib.Dataset() ## Instruction: Add help text and a 'filenames' argument. ## Code After: import tablib from django.core.management.base import BaseCommand from painter.models import Card class Command(BaseCommand): help = ('Clears the database of cards, then fills it with the contents of one or' + ' more specified CSV files.') def add_arguments(self, parser): parser.add_argument( 'filenames', nargs='+', type=str, help='One or more CSV file names. The extension is optional.', ) def handle(self, *args, **options): dataset = tablib.Dataset() for filename in options['filenames']: print(filename)
import tablib from django.core.management.base import BaseCommand from painter.models import Card class Command(BaseCommand): + help = ('Clears the database of cards, then fills it with the contents of one or' + + ' more specified CSV files.') + + def add_arguments(self, parser): + parser.add_argument( + 'filenames', + nargs='+', + type=str, + help='One or more CSV file names. The extension is optional.', + ) + def handle(self, *args, **options): dataset = tablib.Dataset() + for filename in options['filenames']: + print(filename)
ff0da634e1fa0f8b190a3ba2cac3a03f7df75f91
memegen/test/test_routes__common.py
memegen/test/test_routes__common.py
from unittest.mock import patch, Mock from memegen.app import create_app from memegen.settings import get_config from memegen.routes._common import display def describe_display(): app = create_app(get_config('test')) app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' request_html = Mock() request_html.headers.get = Mock(return_value="text/html") request_html.path = "it's a path" @patch('memegen.routes._common.request', request_html) def it_returns_html_for_browsers(): with app.test_request_context(): html = display("my_title", "my_path", raw=True) print(html) assert "<title>my_title</title>" in html assert 'url("it\'s a path")' in html assert "ga('create', 'my_tid', 'auto');" in html
from unittest.mock import patch, call, Mock import pytest from expecter import expect from memegen.app import create_app from memegen.settings import get_config from memegen.routes._common import display def describe_display(): @pytest.fixture def app(): app = create_app(get_config('test')) app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' return app request_html = Mock(path="it's a path") request_html.headers.get = Mock(return_value="text/html") request_image = Mock(path="it's a path") request_image.headers.get = Mock(return_value="(not a browser)") @patch('memegen.routes._common.request', request_html) def it_returns_html_for_browsers(app): with app.test_request_context(): html = display("my_title", "my_path", raw=True) print(html) assert "<title>my_title</title>" in html assert 'url("it\'s a path")' in html assert "ga('create', 'my_tid', 'auto');" in html @patch('memegen.routes._common._track') @patch('memegen.routes._common.send_file') @patch('memegen.routes._common.request', request_image) def it_returns_an_image_otherwise(mock_send_file, mock_track): display("my_title", "my_path") expect(mock_track.mock_calls) == [ call("my_title"), ] expect(mock_send_file.mock_calls) == [ call("my_path", mimetype='image/jpeg'), ]
Test that a request defaults to sending an image
Test that a request defaults to sending an image
Python
mit
joshfriend/memegen,joshfriend/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,DanLindeman/memegen,joshfriend/memegen,joshfriend/memegen
- from unittest.mock import patch, Mock + from unittest.mock import patch, call, Mock + + import pytest + from expecter import expect from memegen.app import create_app from memegen.settings import get_config from memegen.routes._common import display def describe_display(): + @pytest.fixture + def app(): - app = create_app(get_config('test')) + app = create_app(get_config('test')) - app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' + app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' + return app - request_html = Mock() + request_html = Mock(path="it's a path") request_html.headers.get = Mock(return_value="text/html") + - request_html.path = "it's a path" + request_image = Mock(path="it's a path") + request_image.headers.get = Mock(return_value="(not a browser)") @patch('memegen.routes._common.request', request_html) - def it_returns_html_for_browsers(): + def it_returns_html_for_browsers(app): with app.test_request_context(): html = display("my_title", "my_path", raw=True) print(html) assert "<title>my_title</title>" in html assert 'url("it\'s a path")' in html assert "ga('create', 'my_tid', 'auto');" in html + @patch('memegen.routes._common._track') + @patch('memegen.routes._common.send_file') + @patch('memegen.routes._common.request', request_image) + def it_returns_an_image_otherwise(mock_send_file, mock_track): + + display("my_title", "my_path") + + expect(mock_track.mock_calls) == [ + call("my_title"), + ] + expect(mock_send_file.mock_calls) == [ + call("my_path", mimetype='image/jpeg'), + ] +
Test that a request defaults to sending an image
## Code Before: from unittest.mock import patch, Mock from memegen.app import create_app from memegen.settings import get_config from memegen.routes._common import display def describe_display(): app = create_app(get_config('test')) app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' request_html = Mock() request_html.headers.get = Mock(return_value="text/html") request_html.path = "it's a path" @patch('memegen.routes._common.request', request_html) def it_returns_html_for_browsers(): with app.test_request_context(): html = display("my_title", "my_path", raw=True) print(html) assert "<title>my_title</title>" in html assert 'url("it\'s a path")' in html assert "ga('create', 'my_tid', 'auto');" in html ## Instruction: Test that a request defaults to sending an image ## Code After: from unittest.mock import patch, call, Mock import pytest from expecter import expect from memegen.app import create_app from memegen.settings import get_config from memegen.routes._common import display def describe_display(): @pytest.fixture def app(): app = create_app(get_config('test')) app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' return app request_html = Mock(path="it's a path") request_html.headers.get = Mock(return_value="text/html") request_image = Mock(path="it's a path") request_image.headers.get = Mock(return_value="(not a browser)") @patch('memegen.routes._common.request', request_html) def it_returns_html_for_browsers(app): with app.test_request_context(): html = display("my_title", "my_path", raw=True) print(html) assert "<title>my_title</title>" in html assert 'url("it\'s a path")' in html assert "ga('create', 'my_tid', 'auto');" in html @patch('memegen.routes._common._track') @patch('memegen.routes._common.send_file') @patch('memegen.routes._common.request', request_image) def it_returns_an_image_otherwise(mock_send_file, mock_track): display("my_title", "my_path") expect(mock_track.mock_calls) == [ call("my_title"), ] expect(mock_send_file.mock_calls) == [ call("my_path", mimetype='image/jpeg'), ]
- from unittest.mock import patch, Mock + from unittest.mock import patch, call, Mock ? ++++++ + + import pytest + from expecter import expect from memegen.app import create_app from memegen.settings import get_config from memegen.routes._common import display def describe_display(): + @pytest.fixture + def app(): - app = create_app(get_config('test')) + app = create_app(get_config('test')) ? ++++ - app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' + app.config['GOOGLE_ANALYTICS_TID'] = 'my_tid' ? ++++ + return app - request_html = Mock() + request_html = Mock(path="it's a path") request_html.headers.get = Mock(return_value="text/html") + - request_html.path = "it's a path" ? ^^ ^^ - - + request_image = Mock(path="it's a path") ? ^ ^^^^^^^^^^^ + + request_image.headers.get = Mock(return_value="(not a browser)") @patch('memegen.routes._common.request', request_html) - def it_returns_html_for_browsers(): + def it_returns_html_for_browsers(app): ? +++ with app.test_request_context(): html = display("my_title", "my_path", raw=True) print(html) assert "<title>my_title</title>" in html assert 'url("it\'s a path")' in html assert "ga('create', 'my_tid', 'auto');" in html + + @patch('memegen.routes._common._track') + @patch('memegen.routes._common.send_file') + @patch('memegen.routes._common.request', request_image) + def it_returns_an_image_otherwise(mock_send_file, mock_track): + + display("my_title", "my_path") + + expect(mock_track.mock_calls) == [ + call("my_title"), + ] + expect(mock_send_file.mock_calls) == [ + call("my_path", mimetype='image/jpeg'), + ]
8a8d4905c169b9a1060f1283d0286c433af24f43
word2gauss/words.py
word2gauss/words.py
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
Change the interface on tokenize in vocabulary
Change the interface on tokenize in vocabulary
Python
mit
seomoz/word2gauss,seomoz/word2gauss
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ - vocab.tokenize(doc, remove_oov=False, return_ids=True) + vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
Change the interface on tokenize in vocabulary
## Code Before: from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize(doc, remove_oov=False, return_ids=True) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size)) ## Instruction: Change the interface on tokenize in vocabulary ## Code After: from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ vocab.tokenize_ids(doc, remove_oov=False) for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))
from itertools import islice from .embeddings import text_to_pairs def iter_pairs(fin, vocab, batch_size=10, nsamples=2, window=5): ''' Convert a document stream to batches of pairs used for training embeddings. iter_pairs is a generator that yields batches of pairs that can be passed to GaussianEmbedding.train fin = an iterator of documents / sentences (e.g. a file like object) Each element is a string of raw text vocab = something implementing the Vocabulary interface batch_size = size of batches window = Number of words to the left and right of center word to include as positive pairs nsamples = number of negative samples to drawn for each center word ''' documents = iter(fin) batch = list(islice(documents, batch_size)) while len(batch) > 0: text = [ - vocab.tokenize(doc, remove_oov=False, return_ids=True) ? ----------------- + vocab.tokenize_ids(doc, remove_oov=False) ? ++++ for doc in batch ] pairs = text_to_pairs(text, vocab.random_ids, nsamples_per_word=nsamples, half_window_size=window) yield pairs batch = list(islice(documents, batch_size))