commit
stringlengths
40
40
old_file
stringlengths
4
106
new_file
stringlengths
4
106
old_contents
stringlengths
10
2.94k
new_contents
stringlengths
21
2.95k
subject
stringlengths
16
444
message
stringlengths
17
2.63k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
7
43k
ndiff
stringlengths
52
3.31k
instruction
stringlengths
16
444
content
stringlengths
133
4.32k
diff
stringlengths
49
3.61k
9daac0977933238929eda5e05c635e3a626cbe21
tests/test_example.py
tests/test_example.py
import unittest import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
import os import unittest import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): @unittest.skipUnless(os.environ.get("TRAVIS_PULL_REQUEST") == 'false', "S3 tests will fail for Pull Requests due to lack of secrets.") def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
Add intelligent skip for missing secret info
Add intelligent skip for missing secret info
Python
apache-2.0
marshallford/ndsu-ibm-capstone,marshallford/ndsu-ibm-capstone
+ import os import unittest + import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): + @unittest.skipUnless(os.environ.get("TRAVIS_PULL_REQUEST") == 'false', + "S3 tests will fail for Pull Requests due to lack of secrets.") def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
Add intelligent skip for missing secret info
## Code Before: import unittest import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main() ## Instruction: Add intelligent skip for missing secret info ## Code After: import os import unittest import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): @unittest.skipUnless(os.environ.get("TRAVIS_PULL_REQUEST") == 'false', "S3 tests will fail for Pull Requests due to lack of secrets.") def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
+ import os import unittest + import object_storage_tensorflow as obj_tf class TestStringMethods(unittest.TestCase): def test_upper(self): self.assertEqual('foo'.upper(), 'FOO') def test_isupper(self): self.assertTrue('FOO'.isupper()) self.assertFalse('Foo'.isupper()) def test_split(self): s = 'hello world' self.assertEqual(s.split(), ['hello', 'world']) # check that s.split fails when the separator is not a string with self.assertRaises(TypeError): s.split(2) class TestS3Connection(unittest.TestCase): + @unittest.skipUnless(os.environ.get("TRAVIS_PULL_REQUEST") == 'false', + "S3 tests will fail for Pull Requests due to lack of secrets.") def test_buckets(self): conn = obj_tf.s3.getConnection() names = [] for bucket in conn.buckets.all(): names.append(bucket.name) self.assertTrue(len(names) > 0) if __name__ == '__main__': unittest.main()
b636affedea494f1733bf413986a8546d3495c53
chipy_org/apps/meetings/urls.py
chipy_org/apps/meetings/urls.py
from django.conf.urls.defaults import * from django.contrib.auth.decorators import login_required from meetings.views import (PastMeetings, ProposeTopic, MyTopics, RSVP, PastTopics, ) urlpatterns = patterns("", url(r'^past/$', PastMeetings.as_view(), name='past_meetings'), url(r'^rsvp/$', RSVP.as_view(), name='rsvp'), url(r'^rsvp/anonymous/$', RSVP.as_view(), name='anonymous_rsvp'), url(r'^topics/propose$', login_required(ProposeTopic.as_view()), name='propose_topic'), url(r'^topics/mine$', login_required(MyTopics.as_view()), name='my_topics'), url(r'^topics/past$', PastTopics.as_view(), name='past_topics'), )
from django.conf.urls.defaults import * from django.contrib.auth.decorators import login_required from meetings.views import (PastMeetings, ProposeTopic, MyTopics, RSVP, PastTopics, ) urlpatterns = patterns("", url(r'^past/$', PastMeetings.as_view(), name='past_meetings'), url(r'^rsvp/$', RSVP.as_view(), name='rsvp'), url(r'^rsvp/anonymous/$', RSVP.as_view(), name='anonymous_rsvp'), url(r'^rsvp/anonymous/(?P<rsvp_key>[a-z0-1]{40})/$', RSVP.as_view(), name='anonymous_rsvp_with_key'), url(r'^topics/propose$', login_required(ProposeTopic.as_view()), name='propose_topic'), url(r'^topics/mine$', login_required(MyTopics.as_view()), name='my_topics'), url(r'^topics/past$', PastTopics.as_view(), name='past_topics'), )
Add url for anonymous rsvp with key
Add url for anonymous rsvp with key
Python
mit
agfor/chipy.org,bharathelangovan/chipy.org,bharathelangovan/chipy.org,agfor/chipy.org,tanyaschlusser/chipy.org,bharathelangovan/chipy.org,brianray/chipy.org,brianray/chipy.org,tanyaschlusser/chipy.org,brianray/chipy.org,chicagopython/chipy.org,chicagopython/chipy.org,tanyaschlusser/chipy.org,chicagopython/chipy.org,chicagopython/chipy.org,agfor/chipy.org
from django.conf.urls.defaults import * from django.contrib.auth.decorators import login_required from meetings.views import (PastMeetings, ProposeTopic, MyTopics, RSVP, PastTopics, ) urlpatterns = patterns("", url(r'^past/$', PastMeetings.as_view(), name='past_meetings'), url(r'^rsvp/$', RSVP.as_view(), name='rsvp'), url(r'^rsvp/anonymous/$', RSVP.as_view(), name='anonymous_rsvp'), + url(r'^rsvp/anonymous/(?P<rsvp_key>[a-z0-1]{40})/$', RSVP.as_view(), name='anonymous_rsvp_with_key'), url(r'^topics/propose$', login_required(ProposeTopic.as_view()), name='propose_topic'), url(r'^topics/mine$', login_required(MyTopics.as_view()), name='my_topics'), url(r'^topics/past$', PastTopics.as_view(), name='past_topics'), )
Add url for anonymous rsvp with key
## Code Before: from django.conf.urls.defaults import * from django.contrib.auth.decorators import login_required from meetings.views import (PastMeetings, ProposeTopic, MyTopics, RSVP, PastTopics, ) urlpatterns = patterns("", url(r'^past/$', PastMeetings.as_view(), name='past_meetings'), url(r'^rsvp/$', RSVP.as_view(), name='rsvp'), url(r'^rsvp/anonymous/$', RSVP.as_view(), name='anonymous_rsvp'), url(r'^topics/propose$', login_required(ProposeTopic.as_view()), name='propose_topic'), url(r'^topics/mine$', login_required(MyTopics.as_view()), name='my_topics'), url(r'^topics/past$', PastTopics.as_view(), name='past_topics'), ) ## Instruction: Add url for anonymous rsvp with key ## Code After: from django.conf.urls.defaults import * from django.contrib.auth.decorators import login_required from meetings.views import (PastMeetings, ProposeTopic, MyTopics, RSVP, PastTopics, ) urlpatterns = patterns("", url(r'^past/$', PastMeetings.as_view(), name='past_meetings'), url(r'^rsvp/$', RSVP.as_view(), name='rsvp'), url(r'^rsvp/anonymous/$', RSVP.as_view(), name='anonymous_rsvp'), url(r'^rsvp/anonymous/(?P<rsvp_key>[a-z0-1]{40})/$', RSVP.as_view(), name='anonymous_rsvp_with_key'), url(r'^topics/propose$', login_required(ProposeTopic.as_view()), name='propose_topic'), url(r'^topics/mine$', login_required(MyTopics.as_view()), name='my_topics'), url(r'^topics/past$', PastTopics.as_view(), name='past_topics'), )
from django.conf.urls.defaults import * from django.contrib.auth.decorators import login_required from meetings.views import (PastMeetings, ProposeTopic, MyTopics, RSVP, PastTopics, ) urlpatterns = patterns("", url(r'^past/$', PastMeetings.as_view(), name='past_meetings'), url(r'^rsvp/$', RSVP.as_view(), name='rsvp'), url(r'^rsvp/anonymous/$', RSVP.as_view(), name='anonymous_rsvp'), + url(r'^rsvp/anonymous/(?P<rsvp_key>[a-z0-1]{40})/$', RSVP.as_view(), name='anonymous_rsvp_with_key'), url(r'^topics/propose$', login_required(ProposeTopic.as_view()), name='propose_topic'), url(r'^topics/mine$', login_required(MyTopics.as_view()), name='my_topics'), url(r'^topics/past$', PastTopics.as_view(), name='past_topics'), )
440593615adca029b11575e604d251c7b68942b4
api/licenses/serializers.py
api/licenses/serializers.py
from rest_framework import serializers as ser from api.base.serializers import ( JSONAPISerializer, LinksField, IDField, TypeField ) from api.base.utils import absolute_reverse class LicenseSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'id', ]) non_anonymized_fields = ['type'] id = IDField(source='_id', read_only=True) type = TypeField() name = ser.CharField(required=True, help_text='License name') text = ser.CharField(required=True, help_text='Full text of the license') required_fields = ser.ListField(source='properties', read_only=True, help_text='Fields required for this license (provided to help front-end validators)') links = LinksField({'self': 'get_absolute_url'}) class Meta: type_ = 'licenses' def get_absolute_url(self, obj): return absolute_reverse('licenses:license-detail', kwargs={ 'license_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version'] })
from rest_framework import serializers as ser from api.base.serializers import ( JSONAPISerializer, LinksField, IDField, TypeField ) from api.base.utils import absolute_reverse class LicenseSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'id', ]) non_anonymized_fields = ['type'] id = IDField(source='_id', read_only=True) type = TypeField() name = ser.CharField(required=True, help_text='License name') text = ser.CharField(required=True, help_text='Full text of the license') url = ser.URLField(required=False, help_text='URL for the license') required_fields = ser.ListField(source='properties', read_only=True, help_text='Fields required for this license (provided to help front-end validators)') links = LinksField({'self': 'get_absolute_url'}) class Meta: type_ = 'licenses' def get_absolute_url(self, obj): return absolute_reverse('licenses:license-detail', kwargs={ 'license_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version'] })
Add url to the license api serializer
Add url to the license api serializer
Python
apache-2.0
felliott/osf.io,baylee-d/osf.io,sloria/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,adlius/osf.io,HalcyonChimera/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,felliott/osf.io,mfraezz/osf.io,cslzchen/osf.io,icereval/osf.io,felliott/osf.io,adlius/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,caseyrollins/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,saradbowman/osf.io,mfraezz/osf.io,adlius/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,cslzchen/osf.io,caseyrollins/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,adlius/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,aaxelb/osf.io,aaxelb/osf.io,mfraezz/osf.io,erinspace/osf.io,pattisdr/osf.io,mattclark/osf.io,erinspace/osf.io,icereval/osf.io,cslzchen/osf.io,caseyrollins/osf.io,aaxelb/osf.io,cslzchen/osf.io,mfraezz/osf.io,felliott/osf.io,brianjgeiger/osf.io,sloria/osf.io,icereval/osf.io,mattclark/osf.io,sloria/osf.io,erinspace/osf.io
from rest_framework import serializers as ser from api.base.serializers import ( JSONAPISerializer, LinksField, IDField, TypeField ) from api.base.utils import absolute_reverse class LicenseSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'id', ]) non_anonymized_fields = ['type'] id = IDField(source='_id', read_only=True) type = TypeField() name = ser.CharField(required=True, help_text='License name') text = ser.CharField(required=True, help_text='Full text of the license') + url = ser.URLField(required=False, help_text='URL for the license') required_fields = ser.ListField(source='properties', read_only=True, help_text='Fields required for this license (provided to help front-end validators)') links = LinksField({'self': 'get_absolute_url'}) class Meta: type_ = 'licenses' def get_absolute_url(self, obj): return absolute_reverse('licenses:license-detail', kwargs={ 'license_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version'] })
Add url to the license api serializer
## Code Before: from rest_framework import serializers as ser from api.base.serializers import ( JSONAPISerializer, LinksField, IDField, TypeField ) from api.base.utils import absolute_reverse class LicenseSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'id', ]) non_anonymized_fields = ['type'] id = IDField(source='_id', read_only=True) type = TypeField() name = ser.CharField(required=True, help_text='License name') text = ser.CharField(required=True, help_text='Full text of the license') required_fields = ser.ListField(source='properties', read_only=True, help_text='Fields required for this license (provided to help front-end validators)') links = LinksField({'self': 'get_absolute_url'}) class Meta: type_ = 'licenses' def get_absolute_url(self, obj): return absolute_reverse('licenses:license-detail', kwargs={ 'license_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version'] }) ## Instruction: Add url to the license api serializer ## Code After: from rest_framework import serializers as ser from api.base.serializers import ( JSONAPISerializer, LinksField, IDField, TypeField ) from api.base.utils import absolute_reverse class LicenseSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'id', ]) non_anonymized_fields = ['type'] id = IDField(source='_id', read_only=True) type = TypeField() name = ser.CharField(required=True, help_text='License name') text = ser.CharField(required=True, help_text='Full text of the license') url = ser.URLField(required=False, help_text='URL for the license') required_fields = ser.ListField(source='properties', read_only=True, help_text='Fields required for this license (provided to help front-end validators)') links = LinksField({'self': 'get_absolute_url'}) class Meta: type_ = 'licenses' def get_absolute_url(self, obj): return absolute_reverse('licenses:license-detail', kwargs={ 'license_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version'] })
from rest_framework import serializers as ser from api.base.serializers import ( JSONAPISerializer, LinksField, IDField, TypeField ) from api.base.utils import absolute_reverse class LicenseSerializer(JSONAPISerializer): filterable_fields = frozenset([ 'name', 'id', ]) non_anonymized_fields = ['type'] id = IDField(source='_id', read_only=True) type = TypeField() name = ser.CharField(required=True, help_text='License name') text = ser.CharField(required=True, help_text='Full text of the license') + url = ser.URLField(required=False, help_text='URL for the license') required_fields = ser.ListField(source='properties', read_only=True, help_text='Fields required for this license (provided to help front-end validators)') links = LinksField({'self': 'get_absolute_url'}) class Meta: type_ = 'licenses' def get_absolute_url(self, obj): return absolute_reverse('licenses:license-detail', kwargs={ 'license_id': obj._id, 'version': self.context['request'].parser_context['kwargs']['version'] })
fbd99212c7af806137f996ac3c1d6c018f9402a7
puffin/core/compose.py
puffin/core/compose.py
from .applications import get_application_domain, get_application_name from .machine import get_env_vars from .. import app from subprocess import Popen, STDOUT, PIPE from os import environ from os.path import join def init(): pass def compose_start(machine, user, application, **environment): compose_run(machine, user, application, "up", "-d", **environment) def compose_stop(machine, user, application): compose_run(machine, user, application, "down") def compose_run(machine, user, application, *arguments, **environment): name = get_application_name(user, application) args = ["docker-compose", "-f", application.compose, "-p", name] args += arguments domain = get_application_domain(user, application) env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain) env.update(get_env_vars(machine)) env.update(**environment) process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env) process.wait() out, err = process.communicate() print(out) #app.logger.info("Compose:", out)
from .applications import get_application_domain, get_application_name from .machine import get_env_vars from .. import app from subprocess import Popen, STDOUT, PIPE from os import environ from os.path import join def init(): pass def compose_start(machine, user, application, **environment): compose_run(machine, user, application, "up", "-d", **environment) def compose_stop(machine, user, application): compose_run(machine, user, application, "down") def compose_run(machine, user, application, *arguments, **environment): name = get_application_name(user, application) args = ["docker-compose", "-f", application.compose, "-p", name] args += arguments domain = get_application_domain(user, application) env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain, LETSENCRYPT_HOST=domain) env.update(get_env_vars(machine)) env.update(**environment) process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env) process.wait() out, err = process.communicate() print(out) #app.logger.info("Compose:", out)
Add Let's Encrypt env var
Add Let's Encrypt env var
Python
agpl-3.0
puffinrocks/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin,loomchild/puffin,loomchild/jenca-puffin
from .applications import get_application_domain, get_application_name from .machine import get_env_vars from .. import app from subprocess import Popen, STDOUT, PIPE from os import environ from os.path import join def init(): pass def compose_start(machine, user, application, **environment): compose_run(machine, user, application, "up", "-d", **environment) def compose_stop(machine, user, application): compose_run(machine, user, application, "down") def compose_run(machine, user, application, *arguments, **environment): name = get_application_name(user, application) args = ["docker-compose", "-f", application.compose, "-p", name] args += arguments domain = get_application_domain(user, application) - env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain) + env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain, + LETSENCRYPT_HOST=domain) env.update(get_env_vars(machine)) env.update(**environment) process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env) process.wait() out, err = process.communicate() print(out) #app.logger.info("Compose:", out)
Add Let's Encrypt env var
## Code Before: from .applications import get_application_domain, get_application_name from .machine import get_env_vars from .. import app from subprocess import Popen, STDOUT, PIPE from os import environ from os.path import join def init(): pass def compose_start(machine, user, application, **environment): compose_run(machine, user, application, "up", "-d", **environment) def compose_stop(machine, user, application): compose_run(machine, user, application, "down") def compose_run(machine, user, application, *arguments, **environment): name = get_application_name(user, application) args = ["docker-compose", "-f", application.compose, "-p", name] args += arguments domain = get_application_domain(user, application) env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain) env.update(get_env_vars(machine)) env.update(**environment) process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env) process.wait() out, err = process.communicate() print(out) #app.logger.info("Compose:", out) ## Instruction: Add Let's Encrypt env var ## Code After: from .applications import get_application_domain, get_application_name from .machine import get_env_vars from .. import app from subprocess import Popen, STDOUT, PIPE from os import environ from os.path import join def init(): pass def compose_start(machine, user, application, **environment): compose_run(machine, user, application, "up", "-d", **environment) def compose_stop(machine, user, application): compose_run(machine, user, application, "down") def compose_run(machine, user, application, *arguments, **environment): name = get_application_name(user, application) args = ["docker-compose", "-f", application.compose, "-p", name] args += arguments domain = get_application_domain(user, application) env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain, LETSENCRYPT_HOST=domain) env.update(get_env_vars(machine)) env.update(**environment) process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env) process.wait() out, err = process.communicate() print(out) #app.logger.info("Compose:", out)
from .applications import get_application_domain, get_application_name from .machine import get_env_vars from .. import app from subprocess import Popen, STDOUT, PIPE from os import environ from os.path import join def init(): pass def compose_start(machine, user, application, **environment): compose_run(machine, user, application, "up", "-d", **environment) def compose_stop(machine, user, application): compose_run(machine, user, application, "down") def compose_run(machine, user, application, *arguments, **environment): name = get_application_name(user, application) args = ["docker-compose", "-f", application.compose, "-p", name] args += arguments domain = get_application_domain(user, application) - env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain) ? ^ + env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain, ? ^ + LETSENCRYPT_HOST=domain) env.update(get_env_vars(machine)) env.update(**environment) process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env) process.wait() out, err = process.communicate() print(out) #app.logger.info("Compose:", out)
8ddc1e40dd505aeb1b28d05238fa198eb3260f94
fireplace/cards/tgt/hunter.py
fireplace/cards/tgt/hunter.py
from ..utils import * ## # Minions # Ram Wrangler class AT_010: play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast()) ## # Spells # Lock and Load class AT_061: play = Buff(FRIENDLY_HERO, "AT_061e") class AT_061e: events = OWN_SPELL_PLAY.on( Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER)) ) # Ball of Spiders class AT_062: play = Summon(CONTROLLER, "FP1_011") * 3
from ..utils import * ## # Minions # Ram Wrangler class AT_010: play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast()) # Stablemaster class AT_057: play = Buff(TARGET, "AT_057o") # Brave Archer class AT_059: inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2) ## # Spells # Powershot class AT_056: play = Hit(TARGET | TARGET_ADJACENT, 2) # Lock and Load class AT_061: play = Buff(FRIENDLY_HERO, "AT_061e") class AT_061e: events = OWN_SPELL_PLAY.on( Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER)) ) # Ball of Spiders class AT_062: play = Summon(CONTROLLER, "FP1_011") * 3 ## # Secrets # Bear Trap class AT_060: events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
Implement more TGT Hunter cards
Implement more TGT Hunter cards
Python
agpl-3.0
smallnamespace/fireplace,Ragowit/fireplace,Ragowit/fireplace,amw2104/fireplace,NightKev/fireplace,liujimj/fireplace,jleclanche/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,liujimj/fireplace,amw2104/fireplace,Meerkov/fireplace,oftc-ftw/fireplace,beheh/fireplace,smallnamespace/fireplace
from ..utils import * ## # Minions # Ram Wrangler class AT_010: play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast()) + # Stablemaster + class AT_057: + play = Buff(TARGET, "AT_057o") + + + # Brave Archer + class AT_059: + inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2) + + ## # Spells + + # Powershot + class AT_056: + play = Hit(TARGET | TARGET_ADJACENT, 2) + # Lock and Load class AT_061: play = Buff(FRIENDLY_HERO, "AT_061e") class AT_061e: events = OWN_SPELL_PLAY.on( Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER)) ) # Ball of Spiders class AT_062: play = Summon(CONTROLLER, "FP1_011") * 3 + + ## + # Secrets + + # Bear Trap + class AT_060: + events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125")) +
Implement more TGT Hunter cards
## Code Before: from ..utils import * ## # Minions # Ram Wrangler class AT_010: play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast()) ## # Spells # Lock and Load class AT_061: play = Buff(FRIENDLY_HERO, "AT_061e") class AT_061e: events = OWN_SPELL_PLAY.on( Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER)) ) # Ball of Spiders class AT_062: play = Summon(CONTROLLER, "FP1_011") * 3 ## Instruction: Implement more TGT Hunter cards ## Code After: from ..utils import * ## # Minions # Ram Wrangler class AT_010: play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast()) # Stablemaster class AT_057: play = Buff(TARGET, "AT_057o") # Brave Archer class AT_059: inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2) ## # Spells # Powershot class AT_056: play = Hit(TARGET | TARGET_ADJACENT, 2) # Lock and Load class AT_061: play = Buff(FRIENDLY_HERO, "AT_061e") class AT_061e: events = OWN_SPELL_PLAY.on( Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER)) ) # Ball of Spiders class AT_062: play = Summon(CONTROLLER, "FP1_011") * 3 ## # Secrets # Bear Trap class AT_060: events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
from ..utils import * ## # Minions # Ram Wrangler class AT_010: play = Find(FRIENDLY_MINIONS + BEAST) & Summon(CONTROLLER, RandomBeast()) + # Stablemaster + class AT_057: + play = Buff(TARGET, "AT_057o") + + + # Brave Archer + class AT_059: + inspire = Find(CONTROLLER_HAND) | Hit(ENEMY_HERO, 2) + + ## # Spells + + # Powershot + class AT_056: + play = Hit(TARGET | TARGET_ADJACENT, 2) + # Lock and Load class AT_061: play = Buff(FRIENDLY_HERO, "AT_061e") class AT_061e: events = OWN_SPELL_PLAY.on( Give(CONTROLLER, RandomCollectible(card_class=CardClass.HUNTER)) ) # Ball of Spiders class AT_062: play = Summon(CONTROLLER, "FP1_011") * 3 + + + ## + # Secrets + + # Bear Trap + class AT_060: + events = Attack(CHARACTER, FRIENDLY_HERO).after(Summon(CONTROLLER, "CS2_125"))
2bf5809b651bc85e922c853d2dab27d35fee85b8
test/bibliopixel/threads/sub_test.py
test/bibliopixel/threads/sub_test.py
import functools, time, unittest from bibliopixel.util.threads import sub WAIT_FOR_SUB = 0.1 def pause(delay=0.01): time.sleep(delay) def run(input, output, *arg, **kwds): pause() output.put('first') if arg != (1, 2, 3): raise ValueError('1 2 3') if kwds != dict(a=1): raise ValueError('a=1') pause() output.put('second') class SubTest(unittest.TestCase): def do_test(self, use_subprocess): s, input, output = sub.run( run, 1, 2, 3, a=1, use_subprocess=use_subprocess) self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'first') self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'second') pause(WAIT_FOR_SUB) self.assertFalse(s.is_alive()) def test_subprocess(self): self.do_test(True) def test_threading(self): self.do_test(False)
import functools, time, unittest from bibliopixel.util.threads import sub from .. import mark_tests WAIT_FOR_SUB = 0.1 def pause(delay=0.01): time.sleep(delay) def run(input, output, *arg, **kwds): pause() output.put('first') if arg != (1, 2, 3): raise ValueError('1 2 3') if kwds != dict(a=1): raise ValueError('a=1') pause() output.put('second') class SubTest(unittest.TestCase): def do_test(self, use_subprocess): s, input, output = sub.run( run, 1, 2, 3, a=1, use_subprocess=use_subprocess) self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'first') self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'second') pause(WAIT_FOR_SUB) self.assertFalse(s.is_alive()) @mark_tests.fails_on_windows def test_subprocess(self): self.do_test(True) def test_threading(self): self.do_test(False)
Disable one test in windows-only
Disable one test in windows-only
Python
mit
ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,rec/BiblioPixel,rec/BiblioPixel,ManiacalLabs/BiblioPixel,ManiacalLabs/BiblioPixel
import functools, time, unittest from bibliopixel.util.threads import sub + from .. import mark_tests WAIT_FOR_SUB = 0.1 def pause(delay=0.01): time.sleep(delay) def run(input, output, *arg, **kwds): pause() output.put('first') if arg != (1, 2, 3): raise ValueError('1 2 3') if kwds != dict(a=1): raise ValueError('a=1') pause() output.put('second') class SubTest(unittest.TestCase): def do_test(self, use_subprocess): s, input, output = sub.run( run, 1, 2, 3, a=1, use_subprocess=use_subprocess) self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'first') self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'second') pause(WAIT_FOR_SUB) self.assertFalse(s.is_alive()) + @mark_tests.fails_on_windows def test_subprocess(self): self.do_test(True) def test_threading(self): self.do_test(False)
Disable one test in windows-only
## Code Before: import functools, time, unittest from bibliopixel.util.threads import sub WAIT_FOR_SUB = 0.1 def pause(delay=0.01): time.sleep(delay) def run(input, output, *arg, **kwds): pause() output.put('first') if arg != (1, 2, 3): raise ValueError('1 2 3') if kwds != dict(a=1): raise ValueError('a=1') pause() output.put('second') class SubTest(unittest.TestCase): def do_test(self, use_subprocess): s, input, output = sub.run( run, 1, 2, 3, a=1, use_subprocess=use_subprocess) self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'first') self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'second') pause(WAIT_FOR_SUB) self.assertFalse(s.is_alive()) def test_subprocess(self): self.do_test(True) def test_threading(self): self.do_test(False) ## Instruction: Disable one test in windows-only ## Code After: import functools, time, unittest from bibliopixel.util.threads import sub from .. import mark_tests WAIT_FOR_SUB = 0.1 def pause(delay=0.01): time.sleep(delay) def run(input, output, *arg, **kwds): pause() output.put('first') if arg != (1, 2, 3): raise ValueError('1 2 3') if kwds != dict(a=1): raise ValueError('a=1') pause() output.put('second') class SubTest(unittest.TestCase): def do_test(self, use_subprocess): s, input, output = sub.run( run, 1, 2, 3, a=1, use_subprocess=use_subprocess) self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'first') self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'second') pause(WAIT_FOR_SUB) self.assertFalse(s.is_alive()) @mark_tests.fails_on_windows def test_subprocess(self): self.do_test(True) def test_threading(self): self.do_test(False)
import functools, time, unittest from bibliopixel.util.threads import sub + from .. import mark_tests WAIT_FOR_SUB = 0.1 def pause(delay=0.01): time.sleep(delay) def run(input, output, *arg, **kwds): pause() output.put('first') if arg != (1, 2, 3): raise ValueError('1 2 3') if kwds != dict(a=1): raise ValueError('a=1') pause() output.put('second') class SubTest(unittest.TestCase): def do_test(self, use_subprocess): s, input, output = sub.run( run, 1, 2, 3, a=1, use_subprocess=use_subprocess) self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'first') self.assertTrue(s.is_alive()) self.assertEqual(output.get(), 'second') pause(WAIT_FOR_SUB) self.assertFalse(s.is_alive()) + @mark_tests.fails_on_windows def test_subprocess(self): self.do_test(True) def test_threading(self): self.do_test(False)
d3c7ae5389f2fd90ae35d87f87e4f7dd01572f4a
numpy/f2py/__init__.py
numpy/f2py/__init__.py
__all__ = ['run_main','compile','f2py_testing'] import os import sys import commands from info import __doc__ import f2py2e run_main = f2py2e.run_main main = f2py2e.main import f2py_testing def compile(source, modulename = 'untitled', extra_args = '', verbose = 1, source_fn = None ): ''' Build extension module from processing source with f2py. Read the source of this function for more information. ''' from numpy.distutils.exec_command import exec_command import tempfile if source_fn is None: fname = os.path.join(tempfile.mktemp()+'.f') else: fname = source_fn f = open(fname,'w') f.write(source) f.close() args = ' -c -m %s %s %s'%(modulename,fname,extra_args) c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args) s,o = exec_command(c) if source_fn is None: try: os.remove(fname) except OSError: pass return s
__all__ = ['run_main','compile','f2py_testing'] import os import sys import commands import f2py2e import f2py_testing import diagnose from info import __doc__ run_main = f2py2e.run_main main = f2py2e.main def compile(source, modulename = 'untitled', extra_args = '', verbose = 1, source_fn = None ): ''' Build extension module from processing source with f2py. Read the source of this function for more information. ''' from numpy.distutils.exec_command import exec_command import tempfile if source_fn is None: fname = os.path.join(tempfile.mktemp()+'.f') else: fname = source_fn f = open(fname,'w') f.write(source) f.close() args = ' -c -m %s %s %s'%(modulename,fname,extra_args) c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args) s,o = exec_command(c) if source_fn is None: try: os.remove(fname) except OSError: pass return s
Add diagnose to f2py package. This makes the tests a bit easier to fix.
ENH: Add diagnose to f2py package. This makes the tests a bit easier to fix.
Python
bsd-3-clause
ChristopherHogan/numpy,ChristopherHogan/numpy,seberg/numpy,bmorris3/numpy,njase/numpy,BabeNovelty/numpy,mhvk/numpy,tdsmith/numpy,cowlicks/numpy,MaPePeR/numpy,rmcgibbo/numpy,utke1/numpy,simongibbons/numpy,GrimDerp/numpy,shoyer/numpy,numpy/numpy-refactor,has2k1/numpy,ESSS/numpy,githubmlai/numpy,rgommers/numpy,Srisai85/numpy,pizzathief/numpy,joferkington/numpy,rhythmsosad/numpy,embray/numpy,dch312/numpy,embray/numpy,jonathanunderwood/numpy,trankmichael/numpy,has2k1/numpy,SunghanKim/numpy,bmorris3/numpy,madphysicist/numpy,jschueller/numpy,musically-ut/numpy,felipebetancur/numpy,WillieMaddox/numpy,numpy/numpy,SunghanKim/numpy,sigma-random/numpy,astrofrog/numpy,WarrenWeckesser/numpy,NextThought/pypy-numpy,ajdawson/numpy,MaPePeR/numpy,rudimeier/numpy,pizzathief/numpy,grlee77/numpy,Dapid/numpy,stefanv/numpy,ajdawson/numpy,CMartelLML/numpy,GrimDerp/numpy,jonathanunderwood/numpy,ddasilva/numpy,shoyer/numpy,jorisvandenbossche/numpy,rherault-insa/numpy,MichaelAquilina/numpy,Eric89GXL/numpy,SunghanKim/numpy,mwiebe/numpy,rajathkumarmp/numpy,jankoslavic/numpy,larsmans/numpy,tdsmith/numpy,KaelChen/numpy,brandon-rhodes/numpy,SiccarPoint/numpy,rajathkumarmp/numpy,numpy/numpy,dato-code/numpy,seberg/numpy,pdebuyl/numpy,KaelChen/numpy,Anwesh43/numpy,leifdenby/numpy,joferkington/numpy,matthew-brett/numpy,BabeNovelty/numpy,gmcastil/numpy,brandon-rhodes/numpy,MichaelAquilina/numpy,anntzer/numpy,Eric89GXL/numpy,mortada/numpy,groutr/numpy,ahaldane/numpy,simongibbons/numpy,anntzer/numpy,astrofrog/numpy,stuarteberg/numpy,ChanderG/numpy,ContinuumIO/numpy,jakirkham/numpy,grlee77/numpy,pizzathief/numpy,kirillzhuravlev/numpy,NextThought/pypy-numpy,dimasad/numpy,astrofrog/numpy,GaZ3ll3/numpy,andsor/numpy,ViralLeadership/numpy,MichaelAquilina/numpy,WillieMaddox/numpy,mingwpy/numpy,dimasad/numpy,anntzer/numpy,rhythmsosad/numpy,simongibbons/numpy,numpy/numpy-refactor,sigma-random/numpy,tacaswell/numpy,bringingheavendown/numpy,ahaldane/numpy,b-carter/numpy,jakirkham/numpy,BabeNovelty/numpy,mathdd/numpy,mwiebe/numpy,drasmuss/numpy,WarrenWeckesser/numpy,ChristopherHogan/numpy,b-carter/numpy,andsor/numpy,endolith/numpy,bertrand-l/numpy,mwiebe/numpy,endolith/numpy,ogrisel/numpy,abalkin/numpy,rmcgibbo/numpy,mhvk/numpy,ChanderG/numpy,GrimDerp/numpy,seberg/numpy,jorisvandenbossche/numpy,mingwpy/numpy,ogrisel/numpy,ogrisel/numpy,mortada/numpy,ewmoore/numpy,mindw/numpy,madphysicist/numpy,pelson/numpy,mindw/numpy,dato-code/numpy,matthew-brett/numpy,mhvk/numpy,sinhrks/numpy,stuarteberg/numpy,pelson/numpy,njase/numpy,SiccarPoint/numpy,skymanaditya1/numpy,ViralLeadership/numpy,ssanderson/numpy,Yusa95/numpy,trankmichael/numpy,Anwesh43/numpy,WillieMaddox/numpy,jonathanunderwood/numpy,has2k1/numpy,GaZ3ll3/numpy,ajdawson/numpy,Yusa95/numpy,ChristopherHogan/numpy,dwillmer/numpy,Anwesh43/numpy,rudimeier/numpy,anntzer/numpy,ahaldane/numpy,shoyer/numpy,MSeifert04/numpy,charris/numpy,ChanderG/numpy,Dapid/numpy,grlee77/numpy,pbrod/numpy,pbrod/numpy,nguyentu1602/numpy,endolith/numpy,ekalosak/numpy,SunghanKim/numpy,numpy/numpy-refactor,MSeifert04/numpy,ewmoore/numpy,behzadnouri/numpy,gfyoung/numpy,ContinuumIO/numpy,mattip/numpy,NextThought/pypy-numpy,rgommers/numpy,solarjoe/numpy,astrofrog/numpy,gmcastil/numpy,naritta/numpy,chiffa/numpy,sigma-random/numpy,tdsmith/numpy,ewmoore/numpy,behzadnouri/numpy,ssanderson/numpy,nguyentu1602/numpy,felipebetancur/numpy,grlee77/numpy,rgommers/numpy,yiakwy/numpy,ekalosak/numpy,MaPePeR/numpy,b-carter/numpy,pizzathief/numpy,jankoslavic/numpy,dwf/numpy,nguyentu1602/numpy,SiccarPoint/numpy,dch312/numpy,trankmichael/numpy,BabeNovelty/numpy,tacaswell/numpy,skwbc/numpy,kirillzhuravlev/numpy,behzadnouri/numpy,andsor/numpy,dwillmer/numpy,grlee77/numpy,moreati/numpy,dwf/numpy,cowlicks/numpy,chiffa/numpy,utke1/numpy,pyparallel/numpy,sonnyhu/numpy,chiffa/numpy,mhvk/numpy,andsor/numpy,AustereCuriosity/numpy,drasmuss/numpy,rudimeier/numpy,ESSS/numpy,cjermain/numpy,Linkid/numpy,tacaswell/numpy,mathdd/numpy,argriffing/numpy,ddasilva/numpy,AustereCuriosity/numpy,sigma-random/numpy,pyparallel/numpy,ogrisel/numpy,dwillmer/numpy,rhythmsosad/numpy,abalkin/numpy,Srisai85/numpy,pbrod/numpy,ddasilva/numpy,bringingheavendown/numpy,simongibbons/numpy,pelson/numpy,embray/numpy,stuarteberg/numpy,mindw/numpy,bertrand-l/numpy,GrimDerp/numpy,tynn/numpy,argriffing/numpy,embray/numpy,nbeaver/numpy,musically-ut/numpy,MSeifert04/numpy,shoyer/numpy,ESSS/numpy,kiwifb/numpy,skymanaditya1/numpy,tynn/numpy,rherault-insa/numpy,sonnyhu/numpy,cjermain/numpy,yiakwy/numpy,naritta/numpy,maniteja123/numpy,dato-code/numpy,pdebuyl/numpy,WarrenWeckesser/numpy,KaelChen/numpy,WarrenWeckesser/numpy,gmcastil/numpy,astrofrog/numpy,nbeaver/numpy,KaelChen/numpy,hainm/numpy,larsmans/numpy,numpy/numpy-refactor,SiccarPoint/numpy,bertrand-l/numpy,hainm/numpy,jakirkham/numpy,Yusa95/numpy,jorisvandenbossche/numpy,naritta/numpy,pelson/numpy,numpy/numpy-refactor,CMartelLML/numpy,matthew-brett/numpy,mortada/numpy,cjermain/numpy,seberg/numpy,hainm/numpy,ChanderG/numpy,drasmuss/numpy,jankoslavic/numpy,kiwifb/numpy,dwf/numpy,BMJHayward/numpy,ahaldane/numpy,rmcgibbo/numpy,tynn/numpy,chatcannon/numpy,mortada/numpy,dch312/numpy,joferkington/numpy,groutr/numpy,empeeu/numpy,ekalosak/numpy,groutr/numpy,BMJHayward/numpy,WarrenWeckesser/numpy,matthew-brett/numpy,sonnyhu/numpy,bmorris3/numpy,stefanv/numpy,embray/numpy,abalkin/numpy,brandon-rhodes/numpy,stefanv/numpy,madphysicist/numpy,dimasad/numpy,bmorris3/numpy,mattip/numpy,GaZ3ll3/numpy,charris/numpy,empeeu/numpy,mathdd/numpy,jschueller/numpy,MSeifert04/numpy,pdebuyl/numpy,jakirkham/numpy,mattip/numpy,yiakwy/numpy,felipebetancur/numpy,bringingheavendown/numpy,ContinuumIO/numpy,BMJHayward/numpy,skwbc/numpy,numpy/numpy,Linkid/numpy,maniteja123/numpy,empeeu/numpy,githubmlai/numpy,madphysicist/numpy,empeeu/numpy,MaPePeR/numpy,utke1/numpy,jorisvandenbossche/numpy,ekalosak/numpy,naritta/numpy,leifdenby/numpy,pbrod/numpy,Eric89GXL/numpy,kirillzhuravlev/numpy,jorisvandenbossche/numpy,Yusa95/numpy,musically-ut/numpy,kirillzhuravlev/numpy,rhythmsosad/numpy,matthew-brett/numpy,mingwpy/numpy,immerrr/numpy,mhvk/numpy,pyparallel/numpy,cjermain/numpy,solarjoe/numpy,ssanderson/numpy,githubmlai/numpy,mingwpy/numpy,skymanaditya1/numpy,stuarteberg/numpy,skymanaditya1/numpy,has2k1/numpy,BMJHayward/numpy,MichaelAquilina/numpy,AustereCuriosity/numpy,gfyoung/numpy,sinhrks/numpy,rgommers/numpy,stefanv/numpy,charris/numpy,immerrr/numpy,sonnyhu/numpy,moreati/numpy,ewmoore/numpy,dwillmer/numpy,dch312/numpy,joferkington/numpy,sinhrks/numpy,dimasad/numpy,jschueller/numpy,pizzathief/numpy,nbeaver/numpy,leifdenby/numpy,endolith/numpy,CMartelLML/numpy,simongibbons/numpy,Dapid/numpy,dwf/numpy,felipebetancur/numpy,githubmlai/numpy,chatcannon/numpy,rajathkumarmp/numpy,mathdd/numpy,Linkid/numpy,GaZ3ll3/numpy,njase/numpy,solarjoe/numpy,skwbc/numpy,larsmans/numpy,pdebuyl/numpy,jakirkham/numpy,NextThought/pypy-numpy,cowlicks/numpy,rajathkumarmp/numpy,hainm/numpy,chatcannon/numpy,trankmichael/numpy,pbrod/numpy,ewmoore/numpy,numpy/numpy,jschueller/numpy,Srisai85/numpy,cowlicks/numpy,rmcgibbo/numpy,MSeifert04/numpy,dato-code/numpy,madphysicist/numpy,musically-ut/numpy,maniteja123/numpy,Srisai85/numpy,immerrr/numpy,Anwesh43/numpy,ajdawson/numpy,Eric89GXL/numpy,ahaldane/numpy,charris/numpy,mindw/numpy,nguyentu1602/numpy,kiwifb/numpy,ogrisel/numpy,sinhrks/numpy,CMartelLML/numpy,Linkid/numpy,brandon-rhodes/numpy,rudimeier/numpy,larsmans/numpy,immerrr/numpy,moreati/numpy,yiakwy/numpy,mattip/numpy,pelson/numpy,tdsmith/numpy,argriffing/numpy,shoyer/numpy,ViralLeadership/numpy,rherault-insa/numpy,jankoslavic/numpy,gfyoung/numpy,stefanv/numpy,dwf/numpy
__all__ = ['run_main','compile','f2py_testing'] import os import sys import commands + import f2py2e + import f2py_testing + import diagnose + from info import __doc__ - import f2py2e run_main = f2py2e.run_main main = f2py2e.main - import f2py_testing def compile(source, modulename = 'untitled', extra_args = '', verbose = 1, source_fn = None ): ''' Build extension module from processing source with f2py. Read the source of this function for more information. ''' from numpy.distutils.exec_command import exec_command import tempfile if source_fn is None: fname = os.path.join(tempfile.mktemp()+'.f') else: fname = source_fn f = open(fname,'w') f.write(source) f.close() args = ' -c -m %s %s %s'%(modulename,fname,extra_args) c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args) s,o = exec_command(c) if source_fn is None: try: os.remove(fname) except OSError: pass return s
Add diagnose to f2py package. This makes the tests a bit easier to fix.
## Code Before: __all__ = ['run_main','compile','f2py_testing'] import os import sys import commands from info import __doc__ import f2py2e run_main = f2py2e.run_main main = f2py2e.main import f2py_testing def compile(source, modulename = 'untitled', extra_args = '', verbose = 1, source_fn = None ): ''' Build extension module from processing source with f2py. Read the source of this function for more information. ''' from numpy.distutils.exec_command import exec_command import tempfile if source_fn is None: fname = os.path.join(tempfile.mktemp()+'.f') else: fname = source_fn f = open(fname,'w') f.write(source) f.close() args = ' -c -m %s %s %s'%(modulename,fname,extra_args) c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args) s,o = exec_command(c) if source_fn is None: try: os.remove(fname) except OSError: pass return s ## Instruction: Add diagnose to f2py package. This makes the tests a bit easier to fix. ## Code After: __all__ = ['run_main','compile','f2py_testing'] import os import sys import commands import f2py2e import f2py_testing import diagnose from info import __doc__ run_main = f2py2e.run_main main = f2py2e.main def compile(source, modulename = 'untitled', extra_args = '', verbose = 1, source_fn = None ): ''' Build extension module from processing source with f2py. Read the source of this function for more information. ''' from numpy.distutils.exec_command import exec_command import tempfile if source_fn is None: fname = os.path.join(tempfile.mktemp()+'.f') else: fname = source_fn f = open(fname,'w') f.write(source) f.close() args = ' -c -m %s %s %s'%(modulename,fname,extra_args) c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args) s,o = exec_command(c) if source_fn is None: try: os.remove(fname) except OSError: pass return s
__all__ = ['run_main','compile','f2py_testing'] import os import sys import commands + import f2py2e + import f2py_testing + import diagnose + from info import __doc__ - import f2py2e run_main = f2py2e.run_main main = f2py2e.main - import f2py_testing def compile(source, modulename = 'untitled', extra_args = '', verbose = 1, source_fn = None ): ''' Build extension module from processing source with f2py. Read the source of this function for more information. ''' from numpy.distutils.exec_command import exec_command import tempfile if source_fn is None: fname = os.path.join(tempfile.mktemp()+'.f') else: fname = source_fn f = open(fname,'w') f.write(source) f.close() args = ' -c -m %s %s %s'%(modulename,fname,extra_args) c = '%s -c "import numpy.f2py as f2py2e;f2py2e.main()" %s' %(sys.executable,args) s,o = exec_command(c) if source_fn is None: try: os.remove(fname) except OSError: pass return s
0158579b9a6c729e7af9a543caeef25018e07834
conda_build/ldd.py
conda_build/ldd.py
from __future__ import absolute_import, division, print_function import re import subprocess from conda_build import post LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() res = [] for line in lines: if '=>' not in line: continue assert line[0] == '\t', (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: res.append((m.group(1), 'not found')) continue if 'ld-linux' in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) return res
from __future__ import absolute_import, division, print_function import re import subprocess import json from os.path import join from conda.install import rm_rf from conda_build import post from conda_build.config import config from conda_build.build import create_env LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() res = [] for line in lines: if '=>' not in line: continue assert line[0] == '\t', (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: res.append((m.group(1), 'not found')) continue if 'ld-linux' in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) return res def get_package_linkages(pkg): rm_rf(config.test_prefix) specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))] create_env(config.test_prefix, specs) res = {} with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) + '.json')) as f: data = json.load(f) files = data['files'] for f in files: if post.is_obj(f): res[f] = ldd(f) return res
Add first pass at a get_package_linkages function
Add first pass at a get_package_linkages function
Python
bsd-3-clause
takluyver/conda-build,takluyver/conda-build,sandhujasmine/conda-build,frol/conda-build,frol/conda-build,ilastik/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,rmcgibbo/conda-build,dan-blanchard/conda-build,sandhujasmine/conda-build,ilastik/conda-build,ilastik/conda-build,shastings517/conda-build,sandhujasmine/conda-build,rmcgibbo/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,shastings517/conda-build,rmcgibbo/conda-build,takluyver/conda-build,shastings517/conda-build,mwcraig/conda-build,frol/conda-build
from __future__ import absolute_import, division, print_function import re import subprocess + import json + from os.path import join + + from conda.install import rm_rf from conda_build import post + from conda_build.config import config + from conda_build.build import create_env + LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() res = [] for line in lines: if '=>' not in line: continue assert line[0] == '\t', (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: res.append((m.group(1), 'not found')) continue if 'ld-linux' in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) return res + def get_package_linkages(pkg): + rm_rf(config.test_prefix) + specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))] + + create_env(config.test_prefix, specs) + + res = {} + + with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) + + '.json')) as f: + data = json.load(f) + + files = data['files'] + for f in files: + if post.is_obj(f): + res[f] = ldd(f) + + return res +
Add first pass at a get_package_linkages function
## Code Before: from __future__ import absolute_import, division, print_function import re import subprocess from conda_build import post LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() res = [] for line in lines: if '=>' not in line: continue assert line[0] == '\t', (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: res.append((m.group(1), 'not found')) continue if 'ld-linux' in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) return res ## Instruction: Add first pass at a get_package_linkages function ## Code After: from __future__ import absolute_import, division, print_function import re import subprocess import json from os.path import join from conda.install import rm_rf from conda_build import post from conda_build.config import config from conda_build.build import create_env LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() res = [] for line in lines: if '=>' not in line: continue assert line[0] == '\t', (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: res.append((m.group(1), 'not found')) continue if 'ld-linux' in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) return res def get_package_linkages(pkg): rm_rf(config.test_prefix) specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))] create_env(config.test_prefix, specs) res = {} with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) + '.json')) as f: data = json.load(f) files = data['files'] for f in files: if post.is_obj(f): res[f] = ldd(f) return res
from __future__ import absolute_import, division, print_function import re import subprocess + import json + from os.path import join + + from conda.install import rm_rf from conda_build import post + from conda_build.config import config + from conda_build.build import create_env + LDD_RE = re.compile(r'\s*(.*?)\s*=>\s*(.*?)\s*\(.*\)') LDD_NOT_FOUND_RE = re.compile(r'\s*(.*?)\s*=>\s*not found') def ldd(path): "thin wrapper around ldd" lines = subprocess.check_output(['ldd', path]).decode('utf-8').splitlines() res = [] for line in lines: if '=>' not in line: continue assert line[0] == '\t', (path, line) m = LDD_RE.match(line) if m: res.append(m.groups()) continue m = LDD_NOT_FOUND_RE.match(line) if m: res.append((m.group(1), 'not found')) continue if 'ld-linux' in line: continue raise RuntimeError("Unexpected output from ldd: %s" % line) return res + + def get_package_linkages(pkg): + rm_rf(config.test_prefix) + specs = ['%s %s %s' % (pkg.rsplit('.tar.bz2', 1)[0].rsplit('-', 2))] + + create_env(config.test_prefix, specs) + + res = {} + + with open(join(config.test_prefix, 'conda-meta', '-'.join(specs[0]) + + '.json')) as f: + data = json.load(f) + + files = data['files'] + for f in files: + if post.is_obj(f): + res[f] = ldd(f) + + return res
6e6aaac438a18220db20ad480a8a82af49c44caa
pages/serializers.py
pages/serializers.py
from rest_framework import serializers from rest_framework.reverse import reverse from pages import fields, mixins, models from pages.utils import build_url class PageSerializer(serializers.HyperlinkedModelSerializer): name = serializers.CharField() regions = serializers.SerializerMethodField('rendered_regions') class Meta: fields = ('id', 'url', 'name', 'slug', 'regions') model = models.Page view_name = 'pages:page-detail' extra_kwargs = { 'url': {'lookup_field': 'slug'}, } def rendered_regions(self, obj): return obj.rendered_regions(self.context['request']) class JsonPageSerializer(PageSerializer): def rendered_regions(self, obj): """Render regions as a json-serializable dictionary.""" return obj.render_json(self.context.get('request')) class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer): url = fields.AbsoluteURLIdentityField() pages = serializers.SerializerMethodField('get_pages_link') links_fields = ['pages'] class Meta: model = models.Group def get_pages_link(self, obj): return build_url( reverse('pages:page-list', request=self.context.get('request')), {'group': obj.slug}, )
from rest_framework import serializers from rest_framework.reverse import reverse from pages import fields, mixins, models from pages.utils import build_url class PageSerializer(serializers.HyperlinkedModelSerializer): name = serializers.CharField() regions = serializers.SerializerMethodField('rendered_regions') class Meta: fields = ('id', 'url', 'name', 'slug', 'regions') model = models.Page view_name = 'pages:page-detail' extra_kwargs = { 'url': {'lookup_field': 'slug', 'view_name': 'pages:page-detail'}, } def rendered_regions(self, obj): return obj.rendered_regions(self.context['request']) class JsonPageSerializer(PageSerializer): def rendered_regions(self, obj): """Render regions as a json-serializable dictionary.""" return obj.render_json(self.context.get('request')) class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer): url = fields.AbsoluteURLIdentityField() pages = serializers.SerializerMethodField('get_pages_link') links_fields = ['pages'] class Meta: model = models.Group def get_pages_link(self, obj): return build_url( reverse('pages:page-list', request=self.context.get('request')), {'group': obj.slug}, )
Add 'view_name' to url extra kwargs
Add 'view_name' to url extra kwargs
Python
bsd-2-clause
incuna/feincms-pages-api
from rest_framework import serializers from rest_framework.reverse import reverse from pages import fields, mixins, models from pages.utils import build_url class PageSerializer(serializers.HyperlinkedModelSerializer): name = serializers.CharField() regions = serializers.SerializerMethodField('rendered_regions') class Meta: fields = ('id', 'url', 'name', 'slug', 'regions') model = models.Page view_name = 'pages:page-detail' extra_kwargs = { - 'url': {'lookup_field': 'slug'}, + 'url': {'lookup_field': 'slug', 'view_name': 'pages:page-detail'}, } def rendered_regions(self, obj): return obj.rendered_regions(self.context['request']) class JsonPageSerializer(PageSerializer): def rendered_regions(self, obj): """Render regions as a json-serializable dictionary.""" return obj.render_json(self.context.get('request')) class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer): url = fields.AbsoluteURLIdentityField() pages = serializers.SerializerMethodField('get_pages_link') links_fields = ['pages'] class Meta: model = models.Group def get_pages_link(self, obj): return build_url( reverse('pages:page-list', request=self.context.get('request')), {'group': obj.slug}, )
Add 'view_name' to url extra kwargs
## Code Before: from rest_framework import serializers from rest_framework.reverse import reverse from pages import fields, mixins, models from pages.utils import build_url class PageSerializer(serializers.HyperlinkedModelSerializer): name = serializers.CharField() regions = serializers.SerializerMethodField('rendered_regions') class Meta: fields = ('id', 'url', 'name', 'slug', 'regions') model = models.Page view_name = 'pages:page-detail' extra_kwargs = { 'url': {'lookup_field': 'slug'}, } def rendered_regions(self, obj): return obj.rendered_regions(self.context['request']) class JsonPageSerializer(PageSerializer): def rendered_regions(self, obj): """Render regions as a json-serializable dictionary.""" return obj.render_json(self.context.get('request')) class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer): url = fields.AbsoluteURLIdentityField() pages = serializers.SerializerMethodField('get_pages_link') links_fields = ['pages'] class Meta: model = models.Group def get_pages_link(self, obj): return build_url( reverse('pages:page-list', request=self.context.get('request')), {'group': obj.slug}, ) ## Instruction: Add 'view_name' to url extra kwargs ## Code After: from rest_framework import serializers from rest_framework.reverse import reverse from pages import fields, mixins, models from pages.utils import build_url class PageSerializer(serializers.HyperlinkedModelSerializer): name = serializers.CharField() regions = serializers.SerializerMethodField('rendered_regions') class Meta: fields = ('id', 'url', 'name', 'slug', 'regions') model = models.Page view_name = 'pages:page-detail' extra_kwargs = { 'url': {'lookup_field': 'slug', 'view_name': 'pages:page-detail'}, } def rendered_regions(self, obj): return obj.rendered_regions(self.context['request']) class JsonPageSerializer(PageSerializer): def rendered_regions(self, obj): """Render regions as a json-serializable dictionary.""" return obj.render_json(self.context.get('request')) class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer): url = fields.AbsoluteURLIdentityField() pages = serializers.SerializerMethodField('get_pages_link') links_fields = ['pages'] class Meta: model = models.Group def get_pages_link(self, obj): return build_url( reverse('pages:page-list', request=self.context.get('request')), {'group': obj.slug}, )
from rest_framework import serializers from rest_framework.reverse import reverse from pages import fields, mixins, models from pages.utils import build_url class PageSerializer(serializers.HyperlinkedModelSerializer): name = serializers.CharField() regions = serializers.SerializerMethodField('rendered_regions') class Meta: fields = ('id', 'url', 'name', 'slug', 'regions') model = models.Page view_name = 'pages:page-detail' extra_kwargs = { - 'url': {'lookup_field': 'slug'}, + 'url': {'lookup_field': 'slug', 'view_name': 'pages:page-detail'}, } def rendered_regions(self, obj): return obj.rendered_regions(self.context['request']) class JsonPageSerializer(PageSerializer): def rendered_regions(self, obj): """Render regions as a json-serializable dictionary.""" return obj.render_json(self.context.get('request')) class GroupSerializer(mixins.LinksMixin, serializers.HyperlinkedModelSerializer): url = fields.AbsoluteURLIdentityField() pages = serializers.SerializerMethodField('get_pages_link') links_fields = ['pages'] class Meta: model = models.Group def get_pages_link(self, obj): return build_url( reverse('pages:page-list', request=self.context.get('request')), {'group': obj.slug}, )
8ffd26f4fddb0c367e61a46af6427eab6c244ea8
south/signals.py
south/signals.py
from django.dispatch import Signal # Sent at the start of the migration of an app pre_migrate = Signal(providing_args=["app"]) # Sent after each successful migration of an app post_migrate = Signal(providing_args=["app"]) # Sent after each run of a particular migration in a direction ran_migration = Signal(providing_args=["app","migration","method"])
from django.dispatch import Signal from django.conf import settings # Sent at the start of the migration of an app pre_migrate = Signal(providing_args=["app"]) # Sent after each successful migration of an app post_migrate = Signal(providing_args=["app"]) # Sent after each run of a particular migration in a direction ran_migration = Signal(providing_args=["app","migration","method"]) # Compatibility code for django.contrib.auth if 'django.contrib.auth' in settings.INSTALLED_APPS: def create_permissions_compat(app, **kwargs): from django.db.models import get_app from django.contrib.auth.management import create_permissions create_permissions(get_app(app), (), 0) post_migrate.connect(create_permissions_compat)
Add a compatibility hook to deal with creating django.contrib.auth permissions on migrated models.
Add a compatibility hook to deal with creating django.contrib.auth permissions on migrated models.
Python
apache-2.0
theatlantic/django-south,theatlantic/django-south
from django.dispatch import Signal + from django.conf import settings # Sent at the start of the migration of an app pre_migrate = Signal(providing_args=["app"]) # Sent after each successful migration of an app post_migrate = Signal(providing_args=["app"]) # Sent after each run of a particular migration in a direction ran_migration = Signal(providing_args=["app","migration","method"]) + # Compatibility code for django.contrib.auth + if 'django.contrib.auth' in settings.INSTALLED_APPS: + def create_permissions_compat(app, **kwargs): + from django.db.models import get_app + from django.contrib.auth.management import create_permissions + create_permissions(get_app(app), (), 0) + post_migrate.connect(create_permissions_compat) +
Add a compatibility hook to deal with creating django.contrib.auth permissions on migrated models.
## Code Before: from django.dispatch import Signal # Sent at the start of the migration of an app pre_migrate = Signal(providing_args=["app"]) # Sent after each successful migration of an app post_migrate = Signal(providing_args=["app"]) # Sent after each run of a particular migration in a direction ran_migration = Signal(providing_args=["app","migration","method"]) ## Instruction: Add a compatibility hook to deal with creating django.contrib.auth permissions on migrated models. ## Code After: from django.dispatch import Signal from django.conf import settings # Sent at the start of the migration of an app pre_migrate = Signal(providing_args=["app"]) # Sent after each successful migration of an app post_migrate = Signal(providing_args=["app"]) # Sent after each run of a particular migration in a direction ran_migration = Signal(providing_args=["app","migration","method"]) # Compatibility code for django.contrib.auth if 'django.contrib.auth' in settings.INSTALLED_APPS: def create_permissions_compat(app, **kwargs): from django.db.models import get_app from django.contrib.auth.management import create_permissions create_permissions(get_app(app), (), 0) post_migrate.connect(create_permissions_compat)
from django.dispatch import Signal + from django.conf import settings # Sent at the start of the migration of an app pre_migrate = Signal(providing_args=["app"]) # Sent after each successful migration of an app post_migrate = Signal(providing_args=["app"]) # Sent after each run of a particular migration in a direction ran_migration = Signal(providing_args=["app","migration","method"]) + + # Compatibility code for django.contrib.auth + if 'django.contrib.auth' in settings.INSTALLED_APPS: + def create_permissions_compat(app, **kwargs): + from django.db.models import get_app + from django.contrib.auth.management import create_permissions + create_permissions(get_app(app), (), 0) + post_migrate.connect(create_permissions_compat)
6a5413ce81a606476734d9b37b33f683ed0c85e3
cards/card.py
cards/card.py
from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
Switch to pre-python 3.6 string formatting for Codeship
Switch to pre-python 3.6 string formatting for Codeship
Python
mit
johnpapa2/twenty-one,johnpapa2/twenty-one
from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): - return f"{self._rank} of {self._suit}" + # return f"{self.rank} of {self.suit}" + return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
Switch to pre-python 3.6 string formatting for Codeship
## Code Before: from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): return f"{self._rank} of {self._suit}" @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """ ## Instruction: Switch to pre-python 3.6 string formatting for Codeship ## Code After: from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): # return f"{self.rank} of {self.suit}" return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
from abc import ABCMeta, abstractproperty class Card(metaclass=ABCMeta): def __init__(self, suit, rank): self._rank = rank self._suit = suit self._value = None def __str__(self): - return f"{self._rank} of {self._suit}" ? - - + # return f"{self.rank} of {self.suit}" ? ++ + return "{0} of {1}".format(self.rank, self.suit) @property def rank(self): return self._rank @property def suit(self): return self._suit @abstractproperty def value(self): """ Returns the value of the card used for scoring the game """
678ddb9813edbc8a0013e8cf9ae5ff17cf3c72f7
test/test_grid.py
test/test_grid.py
import pytest import torch from torch import Tensor import syft as sy def test_virtual_grid(workers): """This tests our ability to simplify tuple types. This test is pretty simple since tuples just serialize to themselves, with a tuple wrapper with the correct ID (1) for tuples so that the detailer knows how to interpret it.""" print(len(workers)) print(workers) bob = workers["bob"] alice = workers["alice"] james = workers["james"] grid = sy.grid.VirtualGrid(*[bob, alice, james]) x = torch.tensor([1, 2, 3, 4]).tag("#bob", "#male").send(bob) y = torch.tensor([1, 2, 3, 4]).tag("#alice", "#female").send(alice) z = torch.tensor([1, 2, 3, 4]).tag("#james", "#male").send(james) results, tags = grid.search("#bob") assert len(results) == 3 assert "bob" in results.keys() assert "alice" in results.keys() assert "james" in results.keys() results, tags = grid.search("#bob") assert len(results["bob"]) == 1 assert len(results["alice"]) == 0 assert len(results["james"]) == 0 results, tags = grid.search("#male") assert len(results["bob"]) == 1 assert len(results["alice"]) == 0 assert len(results["james"]) == 1
import pytest import torch from torch import Tensor import syft as sy def test_virtual_grid(workers): """This tests our ability to simplify tuple types. This test is pretty simple since tuples just serialize to themselves, with a tuple wrapper with the correct ID (1) for tuples so that the detailer knows how to interpret it.""" print(len(workers)) print(workers) bob = workers["bob"] alice = workers["alice"] james = workers["james"] grid = sy.grid.VirtualGrid(*[bob, alice, james]) x = torch.tensor([1, 2, 3, 4]).tag("#bob", "#male").send(bob) y = torch.tensor([1, 2, 3, 4]).tag("#alice", "#female").send(alice) z = torch.tensor([1, 2, 3, 4]).tag("#james", "#male").send(james) results, tags = grid.search() assert len(results) == 3 assert "bob" in results.keys() assert "alice" in results.keys() assert "james" in results.keys() results, tags = grid.search("#bob") assert len(results["bob"]) == 1 assert "alice" not in results assert "james" not in results results, tags = grid.search("#male") assert len(results["bob"]) == 1 assert "alice" not in results assert len(results["james"]) == 1
Fix test on grid search
Fix test on grid search
Python
apache-2.0
OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft,OpenMined/PySyft
import pytest import torch from torch import Tensor import syft as sy def test_virtual_grid(workers): """This tests our ability to simplify tuple types. This test is pretty simple since tuples just serialize to themselves, with a tuple wrapper with the correct ID (1) for tuples so that the detailer knows how to interpret it.""" print(len(workers)) print(workers) bob = workers["bob"] alice = workers["alice"] james = workers["james"] grid = sy.grid.VirtualGrid(*[bob, alice, james]) x = torch.tensor([1, 2, 3, 4]).tag("#bob", "#male").send(bob) y = torch.tensor([1, 2, 3, 4]).tag("#alice", "#female").send(alice) z = torch.tensor([1, 2, 3, 4]).tag("#james", "#male").send(james) - results, tags = grid.search("#bob") + results, tags = grid.search() assert len(results) == 3 assert "bob" in results.keys() assert "alice" in results.keys() assert "james" in results.keys() results, tags = grid.search("#bob") assert len(results["bob"]) == 1 - assert len(results["alice"]) == 0 - assert len(results["james"]) == 0 + assert "alice" not in results + assert "james" not in results results, tags = grid.search("#male") assert len(results["bob"]) == 1 - assert len(results["alice"]) == 0 + assert "alice" not in results assert len(results["james"]) == 1
Fix test on grid search
## Code Before: import pytest import torch from torch import Tensor import syft as sy def test_virtual_grid(workers): """This tests our ability to simplify tuple types. This test is pretty simple since tuples just serialize to themselves, with a tuple wrapper with the correct ID (1) for tuples so that the detailer knows how to interpret it.""" print(len(workers)) print(workers) bob = workers["bob"] alice = workers["alice"] james = workers["james"] grid = sy.grid.VirtualGrid(*[bob, alice, james]) x = torch.tensor([1, 2, 3, 4]).tag("#bob", "#male").send(bob) y = torch.tensor([1, 2, 3, 4]).tag("#alice", "#female").send(alice) z = torch.tensor([1, 2, 3, 4]).tag("#james", "#male").send(james) results, tags = grid.search("#bob") assert len(results) == 3 assert "bob" in results.keys() assert "alice" in results.keys() assert "james" in results.keys() results, tags = grid.search("#bob") assert len(results["bob"]) == 1 assert len(results["alice"]) == 0 assert len(results["james"]) == 0 results, tags = grid.search("#male") assert len(results["bob"]) == 1 assert len(results["alice"]) == 0 assert len(results["james"]) == 1 ## Instruction: Fix test on grid search ## Code After: import pytest import torch from torch import Tensor import syft as sy def test_virtual_grid(workers): """This tests our ability to simplify tuple types. This test is pretty simple since tuples just serialize to themselves, with a tuple wrapper with the correct ID (1) for tuples so that the detailer knows how to interpret it.""" print(len(workers)) print(workers) bob = workers["bob"] alice = workers["alice"] james = workers["james"] grid = sy.grid.VirtualGrid(*[bob, alice, james]) x = torch.tensor([1, 2, 3, 4]).tag("#bob", "#male").send(bob) y = torch.tensor([1, 2, 3, 4]).tag("#alice", "#female").send(alice) z = torch.tensor([1, 2, 3, 4]).tag("#james", "#male").send(james) results, tags = grid.search() assert len(results) == 3 assert "bob" in results.keys() assert "alice" in results.keys() assert "james" in results.keys() results, tags = grid.search("#bob") assert len(results["bob"]) == 1 assert "alice" not in results assert "james" not in results results, tags = grid.search("#male") assert len(results["bob"]) == 1 assert "alice" not in results assert len(results["james"]) == 1
import pytest import torch from torch import Tensor import syft as sy def test_virtual_grid(workers): """This tests our ability to simplify tuple types. This test is pretty simple since tuples just serialize to themselves, with a tuple wrapper with the correct ID (1) for tuples so that the detailer knows how to interpret it.""" print(len(workers)) print(workers) bob = workers["bob"] alice = workers["alice"] james = workers["james"] grid = sy.grid.VirtualGrid(*[bob, alice, james]) x = torch.tensor([1, 2, 3, 4]).tag("#bob", "#male").send(bob) y = torch.tensor([1, 2, 3, 4]).tag("#alice", "#female").send(alice) z = torch.tensor([1, 2, 3, 4]).tag("#james", "#male").send(james) - results, tags = grid.search("#bob") ? ------ + results, tags = grid.search() assert len(results) == 3 assert "bob" in results.keys() assert "alice" in results.keys() assert "james" in results.keys() results, tags = grid.search("#bob") assert len(results["bob"]) == 1 - assert len(results["alice"]) == 0 - assert len(results["james"]) == 0 + assert "alice" not in results + assert "james" not in results results, tags = grid.search("#male") assert len(results["bob"]) == 1 - assert len(results["alice"]) == 0 + assert "alice" not in results assert len(results["james"]) == 1
53cca5180ec5ad04694ce28d0fc0d945004c33b3
src/unifind.py
src/unifind.py
class UnionFind: def __init__(self, it=None): self.uf = {} if it is None else {i : i for i in it} self.count = len(self.uf) def __iter__(self): return iter(self.uf.keys()) def __getitem__(self, key): return self.uf[key] def __setitem__(self, key, val): if key is not val: raise RuntimeError("key and val must be the same object") self.uf[key] = key class QuickFind(UnionFind): def find(self, key): return self.uf[key] def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return for i in range(len(self.uf)): if self.uf[i] == u1: self.uf[i] = u2 self.count -= 1 class QuickUnion(UnionFind): def find(self, key): while self.uf[key] != key: key = self.uf[key] return key def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return self.uf[u1] = u2 self.count -= 1
class UnionFind: def __init__(self, it=None): self.uf = {} if it is None else {i : i for i in it} self.count = len(self.uf) def __iter__(self): return iter(self.uf.keys()) def __getitem__(self, key): return self.uf[key] def __setitem__(self, key, val): if key is not val: raise RuntimeError("key and val must be the same object") self.uf[key] = key class QuickFind(UnionFind): def find(self, key): return self.uf[key] def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return for k in self.uf: if self.uf[k] == u1: self.uf[k] = u2 self.count -= 1 class QuickUnion(UnionFind): def find(self, key): while self.uf[key] != key: key = self.uf[key] return key def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return self.uf[u1] = u2 self.count -= 1
Fix QuickFind: stop relying on keys being integers
Fix QuickFind: stop relying on keys being integers
Python
mit
all3fox/algos-py
class UnionFind: def __init__(self, it=None): self.uf = {} if it is None else {i : i for i in it} self.count = len(self.uf) def __iter__(self): return iter(self.uf.keys()) def __getitem__(self, key): return self.uf[key] def __setitem__(self, key, val): if key is not val: raise RuntimeError("key and val must be the same object") self.uf[key] = key class QuickFind(UnionFind): def find(self, key): return self.uf[key] def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return - for i in range(len(self.uf)): + for k in self.uf: - if self.uf[i] == u1: + if self.uf[k] == u1: - self.uf[i] = u2 + self.uf[k] = u2 self.count -= 1 class QuickUnion(UnionFind): def find(self, key): while self.uf[key] != key: key = self.uf[key] return key def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return self.uf[u1] = u2 self.count -= 1
Fix QuickFind: stop relying on keys being integers
## Code Before: class UnionFind: def __init__(self, it=None): self.uf = {} if it is None else {i : i for i in it} self.count = len(self.uf) def __iter__(self): return iter(self.uf.keys()) def __getitem__(self, key): return self.uf[key] def __setitem__(self, key, val): if key is not val: raise RuntimeError("key and val must be the same object") self.uf[key] = key class QuickFind(UnionFind): def find(self, key): return self.uf[key] def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return for i in range(len(self.uf)): if self.uf[i] == u1: self.uf[i] = u2 self.count -= 1 class QuickUnion(UnionFind): def find(self, key): while self.uf[key] != key: key = self.uf[key] return key def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return self.uf[u1] = u2 self.count -= 1 ## Instruction: Fix QuickFind: stop relying on keys being integers ## Code After: class UnionFind: def __init__(self, it=None): self.uf = {} if it is None else {i : i for i in it} self.count = len(self.uf) def __iter__(self): return iter(self.uf.keys()) def __getitem__(self, key): return self.uf[key] def __setitem__(self, key, val): if key is not val: raise RuntimeError("key and val must be the same object") self.uf[key] = key class QuickFind(UnionFind): def find(self, key): return self.uf[key] def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return for k in self.uf: if self.uf[k] == u1: self.uf[k] = u2 self.count -= 1 class QuickUnion(UnionFind): def find(self, key): while self.uf[key] != key: key = self.uf[key] return key def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return self.uf[u1] = u2 self.count -= 1
class UnionFind: def __init__(self, it=None): self.uf = {} if it is None else {i : i for i in it} self.count = len(self.uf) def __iter__(self): return iter(self.uf.keys()) def __getitem__(self, key): return self.uf[key] def __setitem__(self, key, val): if key is not val: raise RuntimeError("key and val must be the same object") self.uf[key] = key class QuickFind(UnionFind): def find(self, key): return self.uf[key] def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return - for i in range(len(self.uf)): ? ^ ---------- -- + for k in self.uf: ? ^ - if self.uf[i] == u1: ? ^ + if self.uf[k] == u1: ? ^ - self.uf[i] = u2 ? ^ + self.uf[k] = u2 ? ^ self.count -= 1 class QuickUnion(UnionFind): def find(self, key): while self.uf[key] != key: key = self.uf[key] return key def union(self, key1, key2): u1 = self.find(key1) u2 = self.find(key2) if u1 == u2: return self.uf[u1] = u2 self.count -= 1
9c923b8a94ea534c5b18983e81add7301a1dfa66
waterbutler/core/streams/file.py
waterbutler/core/streams/file.py
import os from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen)
import os # import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() class read_chunks: def __init__(self, read_size, fp): self.done = False self.read_size = read_size self.fp = fp async def __aiter__(self): return self async def __anext__(self): if self.done: raise StopAsyncIteration return await self.get_chunk() async def get_chunk(self): while True: chunk = self.fp.read(self.read_size) if not chunk: chunk = b'' self.done = True return chunk async def _read(self, read_size): async for chunk in self.read_chunks(read_size, self.file_pointer): if chunk == b'': self.feed_eof() return chunk
Update FileStreamReader to use async generator
Update FileStreamReader to use async generator
Python
apache-2.0
RCOSDP/waterbutler,TomBaxter/waterbutler,rdhyee/waterbutler,CenterForOpenScience/waterbutler,felliott/waterbutler,Johnetordoff/waterbutler
import os + # import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() - def read_as_gen(self): - self.file_pointer.seek(0) + class read_chunks: + def __init__(self, read_size, fp): + self.done = False + self.read_size = read_size + self.fp = fp + + async def __aiter__(self): + return self + + async def __anext__(self): + if self.done: + raise StopAsyncIteration + return await self.get_chunk() + + async def get_chunk(self): - while True: + while True: - chunk = self.file_pointer.read(self.read_size) + chunk = self.fp.read(self.read_size) - if not chunk: + if not chunk: + chunk = b'' + self.done = True + return chunk + + async def _read(self, read_size): + async for chunk in self.read_chunks(read_size, self.file_pointer): + if chunk == b'': self.feed_eof() - chunk = b'' - yield chunk + return chunk - async def _read(self, size): - self.file_gen = self.file_gen or self.read_as_gen() - self.read_size = size - return next(self.file_gen) -
Update FileStreamReader to use async generator
## Code Before: import os from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() def read_as_gen(self): self.file_pointer.seek(0) while True: chunk = self.file_pointer.read(self.read_size) if not chunk: self.feed_eof() chunk = b'' yield chunk async def _read(self, size): self.file_gen = self.file_gen or self.read_as_gen() self.read_size = size return next(self.file_gen) ## Instruction: Update FileStreamReader to use async generator ## Code After: import os # import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() class read_chunks: def __init__(self, read_size, fp): self.done = False self.read_size = read_size self.fp = fp async def __aiter__(self): return self async def __anext__(self): if self.done: raise StopAsyncIteration return await self.get_chunk() async def get_chunk(self): while True: chunk = self.fp.read(self.read_size) if not chunk: chunk = b'' self.done = True return chunk async def _read(self, read_size): async for chunk in self.read_chunks(read_size, self.file_pointer): if chunk == b'': self.feed_eof() return chunk
import os + # import asyncio from waterbutler.core.streams import BaseStream class FileStreamReader(BaseStream): def __init__(self, file_pointer): super().__init__() self.file_gen = None self.file_pointer = file_pointer self.read_size = None self.content_type = 'application/octet-stream' @property def size(self): cursor = self.file_pointer.tell() self.file_pointer.seek(0, os.SEEK_END) ret = self.file_pointer.tell() self.file_pointer.seek(cursor) return ret def close(self): self.file_pointer.close() self.feed_eof() - def read_as_gen(self): - self.file_pointer.seek(0) + class read_chunks: + def __init__(self, read_size, fp): + self.done = False + self.read_size = read_size + self.fp = fp + + async def __aiter__(self): + return self + + async def __anext__(self): + if self.done: + raise StopAsyncIteration + return await self.get_chunk() + + async def get_chunk(self): - while True: + while True: ? ++++ - chunk = self.file_pointer.read(self.read_size) ? ---- ------ + chunk = self.fp.read(self.read_size) ? ++++ - if not chunk: + if not chunk: ? ++++ + chunk = b'' + self.done = True + return chunk + + async def _read(self, read_size): + async for chunk in self.read_chunks(read_size, self.file_pointer): + if chunk == b'': self.feed_eof() - chunk = b'' - yield chunk ? ^^ ^^ + return chunk ? ^ ^^^^ - - async def _read(self, size): - self.file_gen = self.file_gen or self.read_as_gen() - self.read_size = size - return next(self.file_gen)
81069682d724c0a1e2cd292e286e4148cd9c3d9d
scraping/IEEE/main.py
scraping/IEEE/main.py
from scraping.tools import * from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='IEEE Xplore API Request') parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'], arguments['-py'], arguments['-hc']] standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?' url = create_url_search(parameters=parameters, standard=standard) root = fetch_xml(url) parents = root.getchildren() [parents.remove(parents[0]) for _ in range(2)] for document in parents: article = xml_to_dict(document) post = iee_to_axelbib(article) send = post_to_axelbib(post)
from scraping.tools import * from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='IEEE Xplore API Request') parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'], arguments['-py'], arguments['-hc']] standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?' url = create_url_search(parameters=parameters, standard=standard) root = fetch_xml(url) parents = root.getchildren() for _ in range(2): parents.remove(parents[0]) for document in parents: article = xml_to_dict(document) post = iee_to_axelbib(article) send = post_to_axelbib(post)
Fix loop to delete branches from xml.
Fix loop to delete branches from xml.
Python
mit
ArcasProject/Arcas
from scraping.tools import * from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='IEEE Xplore API Request') parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'], arguments['-py'], arguments['-hc']] standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?' url = create_url_search(parameters=parameters, standard=standard) root = fetch_xml(url) parents = root.getchildren() - [parents.remove(parents[0]) for _ in range(2)] + for _ in range(2): parents.remove(parents[0]) for document in parents: article = xml_to_dict(document) post = iee_to_axelbib(article) send = post_to_axelbib(post)
Fix loop to delete branches from xml.
## Code Before: from scraping.tools import * from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='IEEE Xplore API Request') parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'], arguments['-py'], arguments['-hc']] standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?' url = create_url_search(parameters=parameters, standard=standard) root = fetch_xml(url) parents = root.getchildren() [parents.remove(parents[0]) for _ in range(2)] for document in parents: article = xml_to_dict(document) post = iee_to_axelbib(article) send = post_to_axelbib(post) ## Instruction: Fix loop to delete branches from xml. ## Code After: from scraping.tools import * from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='IEEE Xplore API Request') parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'], arguments['-py'], arguments['-hc']] standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?' url = create_url_search(parameters=parameters, standard=standard) root = fetch_xml(url) parents = root.getchildren() for _ in range(2): parents.remove(parents[0]) for document in parents: article = xml_to_dict(document) post = iee_to_axelbib(article) send = post_to_axelbib(post)
from scraping.tools import * from docopt import docopt if __name__ == '__main__': arguments = docopt(__doc__, version='IEEE Xplore API Request') parameters = [arguments['-au'], arguments['-ti'], arguments['-ab'], arguments['-py'], arguments['-hc']] standard = 'http://ieeexplore.ieee.org/gateway/ipsSearch.jsp?' url = create_url_search(parameters=parameters, standard=standard) root = fetch_xml(url) parents = root.getchildren() - [parents.remove(parents[0]) for _ in range(2)] + for _ in range(2): parents.remove(parents[0]) for document in parents: article = xml_to_dict(document) post = iee_to_axelbib(article) send = post_to_axelbib(post)
5da7189f195d0daf9595c61f05156c85031ba0c5
tests/testapp/tests/test_scheduling.py
tests/testapp/tests/test_scheduling.py
from django.test import TestCase from testapp.tests import factories class SchedulingTestCase(TestCase): def test_scheduling(self): for i in range(20): factories.AssignmentFactory.create() factories.WaitListFactory.create() admin = factories.UserFactory.create(is_staff=True, is_superuser=True) self.client.login(username=admin.username, password='test') self.client.get('/zivinetz/admin/scheduling/')
from django.test import TestCase from zivinetz.models import Assignment from testapp.tests import factories class SchedulingTestCase(TestCase): def test_scheduling(self): for i in range(20): factories.AssignmentFactory.create() factories.WaitListFactory.create() admin = factories.UserFactory.create(is_staff=True, is_superuser=True) self.client.login(username=admin.username, password='test') self.assertEqual( self.client.get('/zivinetz/admin/scheduling/').status_code, 200) Assignment.objects.all().delete() self.assertEqual( self.client.get('/zivinetz/admin/scheduling/').status_code, 200)
Test that scheduling does not crash
Test that scheduling does not crash
Python
mit
matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz,matthiask/zivinetz
from django.test import TestCase + + from zivinetz.models import Assignment from testapp.tests import factories class SchedulingTestCase(TestCase): def test_scheduling(self): for i in range(20): factories.AssignmentFactory.create() factories.WaitListFactory.create() admin = factories.UserFactory.create(is_staff=True, is_superuser=True) self.client.login(username=admin.username, password='test') + self.assertEqual( - self.client.get('/zivinetz/admin/scheduling/') + self.client.get('/zivinetz/admin/scheduling/').status_code, + 200) + Assignment.objects.all().delete() + self.assertEqual( + self.client.get('/zivinetz/admin/scheduling/').status_code, + 200) +
Test that scheduling does not crash
## Code Before: from django.test import TestCase from testapp.tests import factories class SchedulingTestCase(TestCase): def test_scheduling(self): for i in range(20): factories.AssignmentFactory.create() factories.WaitListFactory.create() admin = factories.UserFactory.create(is_staff=True, is_superuser=True) self.client.login(username=admin.username, password='test') self.client.get('/zivinetz/admin/scheduling/') ## Instruction: Test that scheduling does not crash ## Code After: from django.test import TestCase from zivinetz.models import Assignment from testapp.tests import factories class SchedulingTestCase(TestCase): def test_scheduling(self): for i in range(20): factories.AssignmentFactory.create() factories.WaitListFactory.create() admin = factories.UserFactory.create(is_staff=True, is_superuser=True) self.client.login(username=admin.username, password='test') self.assertEqual( self.client.get('/zivinetz/admin/scheduling/').status_code, 200) Assignment.objects.all().delete() self.assertEqual( self.client.get('/zivinetz/admin/scheduling/').status_code, 200)
from django.test import TestCase + + from zivinetz.models import Assignment from testapp.tests import factories class SchedulingTestCase(TestCase): def test_scheduling(self): for i in range(20): factories.AssignmentFactory.create() factories.WaitListFactory.create() admin = factories.UserFactory.create(is_staff=True, is_superuser=True) self.client.login(username=admin.username, password='test') + self.assertEqual( - self.client.get('/zivinetz/admin/scheduling/') + self.client.get('/zivinetz/admin/scheduling/').status_code, ? ++++ +++++++++++++ + 200) + + Assignment.objects.all().delete() + self.assertEqual( + self.client.get('/zivinetz/admin/scheduling/').status_code, + 200)
19bda5c7a2ebe38e856283423f64e1151eff4e80
parktain/tests/test_bot.py
parktain/tests/test_bot.py
"""Tests for the bot.""" def test_no_logger_crash_if_no_user(): # Given from parktain.main import logger user, channel, message = None, '#CTESTING', 'Hello!' # When # Then: Test fails if exception gets raised. logger(user, channel, message)
"""Tests for the bot.""" # Just setup db without any fixture magic. from parktain.main import Base, engine Base.metadata.create_all(engine) def test_no_logger_crash_if_no_user(): # Given from parktain.main import logger user, channel, message = None, '#CTESTING', 'Hello!' # When # Then: Test fails if exception gets raised. logger(user, channel, message)
Create db before running tests.
Create db before running tests.
Python
bsd-3-clause
punchagan/parktain,punchagan/parktain,punchagan/parktain
"""Tests for the bot.""" + + # Just setup db without any fixture magic. + from parktain.main import Base, engine + Base.metadata.create_all(engine) + def test_no_logger_crash_if_no_user(): # Given from parktain.main import logger user, channel, message = None, '#CTESTING', 'Hello!' # When # Then: Test fails if exception gets raised. logger(user, channel, message)
Create db before running tests.
## Code Before: """Tests for the bot.""" def test_no_logger_crash_if_no_user(): # Given from parktain.main import logger user, channel, message = None, '#CTESTING', 'Hello!' # When # Then: Test fails if exception gets raised. logger(user, channel, message) ## Instruction: Create db before running tests. ## Code After: """Tests for the bot.""" # Just setup db without any fixture magic. from parktain.main import Base, engine Base.metadata.create_all(engine) def test_no_logger_crash_if_no_user(): # Given from parktain.main import logger user, channel, message = None, '#CTESTING', 'Hello!' # When # Then: Test fails if exception gets raised. logger(user, channel, message)
"""Tests for the bot.""" + + # Just setup db without any fixture magic. + from parktain.main import Base, engine + Base.metadata.create_all(engine) + def test_no_logger_crash_if_no_user(): # Given from parktain.main import logger user, channel, message = None, '#CTESTING', 'Hello!' # When # Then: Test fails if exception gets raised. logger(user, channel, message)
90655c89fcf56af06a69f8110a9f7154294ca11c
ritter/analytics/sentiment_analyzer.py
ritter/analytics/sentiment_analyzer.py
import re, math from collections import Counter import itertools from sentimental import sentimental class SentimentAnalyzer(): _sentimental = sentimental.Sentimental(max_ngrams=2) path = sentimental.Sentimental.get_datafolder() _sentimental.train([path + '/sv/ruhburg']) def calculate_friend_scores(marked_tree): reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)') scores = {} for item in marked_tree: if 'text' in item: m = reg.findall(item['text']) c = sorted(list(Counter(m))) pairs = list(itertools.combinations(c, 2)) senti = SentimentAnalyzer.sentiment(item['text']) for pair in pairs: s = scores.get(pair, [0, 0]) if senti == 1: s[0] = s[0] + 1 elif senti == -1: s[1] = s[1] + 1 scores[pair] = s return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()} def sentiment(text): label = max(SentimentAnalyzer._sentimental.sentiment(text)) if label == 'positive': return 1 elif label == 'negative': return -1 else: return 0
import re, math from collections import Counter import itertools from sentimental import sentimental class SentimentAnalyzer(): _sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True) path = sentimental.Sentimental.get_datafolder() _sentimental.train([path + '/sv/ruhburg']) def calculate_friend_scores(marked_tree): reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)') scores = {} for item in marked_tree: if 'text' in item: m = reg.findall(item['text']) c = sorted(list(Counter(m))) pairs = list(itertools.combinations(c, 2)) senti = SentimentAnalyzer.sentiment(item['text']) for pair in pairs: s = scores.get(pair, [0, 0]) if senti == 1: s[0] = s[0] + 1 elif senti == -1: s[1] = s[1] + 1 scores[pair] = s return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()} def sentiment(text): label = max(SentimentAnalyzer._sentimental.sentiment(text)) if label == 'positive': return 1 elif label == 'negative': return -1 else: return 0
Update to Sentimental 2.2.x with undersampling
feat: Update to Sentimental 2.2.x with undersampling
Python
mit
ErikGartner/ghostdoc-ritter
import re, math from collections import Counter import itertools from sentimental import sentimental class SentimentAnalyzer(): - _sentimental = sentimental.Sentimental(max_ngrams=2) + _sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True) path = sentimental.Sentimental.get_datafolder() _sentimental.train([path + '/sv/ruhburg']) def calculate_friend_scores(marked_tree): reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)') scores = {} for item in marked_tree: if 'text' in item: m = reg.findall(item['text']) c = sorted(list(Counter(m))) pairs = list(itertools.combinations(c, 2)) senti = SentimentAnalyzer.sentiment(item['text']) for pair in pairs: s = scores.get(pair, [0, 0]) if senti == 1: s[0] = s[0] + 1 elif senti == -1: s[1] = s[1] + 1 scores[pair] = s return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()} def sentiment(text): label = max(SentimentAnalyzer._sentimental.sentiment(text)) if label == 'positive': return 1 elif label == 'negative': return -1 else: return 0
Update to Sentimental 2.2.x with undersampling
## Code Before: import re, math from collections import Counter import itertools from sentimental import sentimental class SentimentAnalyzer(): _sentimental = sentimental.Sentimental(max_ngrams=2) path = sentimental.Sentimental.get_datafolder() _sentimental.train([path + '/sv/ruhburg']) def calculate_friend_scores(marked_tree): reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)') scores = {} for item in marked_tree: if 'text' in item: m = reg.findall(item['text']) c = sorted(list(Counter(m))) pairs = list(itertools.combinations(c, 2)) senti = SentimentAnalyzer.sentiment(item['text']) for pair in pairs: s = scores.get(pair, [0, 0]) if senti == 1: s[0] = s[0] + 1 elif senti == -1: s[1] = s[1] + 1 scores[pair] = s return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()} def sentiment(text): label = max(SentimentAnalyzer._sentimental.sentiment(text)) if label == 'positive': return 1 elif label == 'negative': return -1 else: return 0 ## Instruction: Update to Sentimental 2.2.x with undersampling ## Code After: import re, math from collections import Counter import itertools from sentimental import sentimental class SentimentAnalyzer(): _sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True) path = sentimental.Sentimental.get_datafolder() _sentimental.train([path + '/sv/ruhburg']) def calculate_friend_scores(marked_tree): reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)') scores = {} for item in marked_tree: if 'text' in item: m = reg.findall(item['text']) c = sorted(list(Counter(m))) pairs = list(itertools.combinations(c, 2)) senti = SentimentAnalyzer.sentiment(item['text']) for pair in pairs: s = scores.get(pair, [0, 0]) if senti == 1: s[0] = s[0] + 1 elif senti == -1: s[1] = s[1] + 1 scores[pair] = s return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()} def sentiment(text): label = max(SentimentAnalyzer._sentimental.sentiment(text)) if label == 'positive': return 1 elif label == 'negative': return -1 else: return 0
import re, math from collections import Counter import itertools from sentimental import sentimental class SentimentAnalyzer(): - _sentimental = sentimental.Sentimental(max_ngrams=2) + _sentimental = sentimental.Sentimental(max_ngrams=2, undersample=True) ? ++++++++++++++++++ path = sentimental.Sentimental.get_datafolder() _sentimental.train([path + '/sv/ruhburg']) def calculate_friend_scores(marked_tree): reg = re.compile('\(([\w]+) \\\"GHOSTDOC-TOKEN\\\"\)') scores = {} for item in marked_tree: if 'text' in item: m = reg.findall(item['text']) c = sorted(list(Counter(m))) pairs = list(itertools.combinations(c, 2)) senti = SentimentAnalyzer.sentiment(item['text']) for pair in pairs: s = scores.get(pair, [0, 0]) if senti == 1: s[0] = s[0] + 1 elif senti == -1: s[1] = s[1] + 1 scores[pair] = s return {_id: (vals[0] - vals[1]) * math.exp(max(vals) / (vals[0] + vals[1] + 1)) for _id, vals in scores.items()} def sentiment(text): label = max(SentimentAnalyzer._sentimental.sentiment(text)) if label == 'positive': return 1 elif label == 'negative': return -1 else: return 0
8207d86b7b2a6e1f81454eefea4784d89c8674a8
resolver_test/django_test.py
resolver_test/django_test.py
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): self.user = User(username='cherie') self.user.save() self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None usernumber = 0 class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): global usernumber self.user = User.objects.create(username='cherie{}'.format(usernumber)) usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
Use different usernames for each test. by: Glenn, Giles
Use different usernames for each test. by: Glenn, Giles
Python
mit
pythonanywhere/resolver_test
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None + + usernumber = 0 + class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): - self.user = User(username='cherie') - self.user.save() + global usernumber + self.user = User.objects.create(username='cherie{}'.format(usernumber)) + usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
Use different usernames for each test. by: Glenn, Giles
## Code Before: from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): self.user = User(username='cherie') self.user.save() self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') ) ## Instruction: Use different usernames for each test. by: Glenn, Giles ## Code After: from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None usernumber = 0 class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): global usernumber self.user = User.objects.create(username='cherie{}'.format(usernumber)) usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
from urlparse import urljoin from mock import Mock from resolver_test import ResolverTestMixins import django from django.conf import settings from django.contrib.auth.models import AnonymousUser, User from django.http import HttpRequest class ResolverDjangoTestCase(django.test.TestCase, ResolverTestMixins): maxDiff = None + + usernumber = 0 + class ResolverViewTestCase(ResolverDjangoTestCase): def setUp(self): - self.user = User(username='cherie') - self.user.save() + global usernumber + self.user = User.objects.create(username='cherie{}'.format(usernumber)) + usernumber += 1 self.request = HttpRequest() self.request.session = Mock() self.request.user = self.user self.client.force_login(self.user) def assert_login_required(self, view_to_call): self.owner = self.request.user = AnonymousUser() self.request.get_full_path = lambda: "my_path" self.request.build_absolute_uri = lambda: "my_path" response = view_to_call() self.assertEquals(response.status_code, 302) self.assertEquals( response['Location'], urljoin(settings.LOGIN_URL, '?next=my_path') )
7b71425a4434ac2544340d651f52c0d87ff37132
web/impact/impact/v1/helpers/refund_code_helper.py
web/impact/impact/v1/helpers/refund_code_helper.py
from impact.models import RefundCode from impact.v1.helpers.model_helper import( INTEGER_ARRAY_FIELD, INTEGER_FIELD, ModelHelper, PK_FIELD, STRING_FIELD, ) PROGRAMS_FIELD = { "json-schema": { "type": "array", "items": {"type": "string"}, }, "POST": {"required": False}, "PATCH": {"required": False}, } REFUND_CODE_FIELDS = { "id": PK_FIELD, "issued_to": INTEGER_FIELD, "created_at": STRING_FIELD, "unique_code": STRING_FIELD, "discount": INTEGER_FIELD, "maximum_uses": INTEGER_FIELD, "programs": INTEGER_ARRAY_FIELD, } class RefundCodeHelper(ModelHelper): model = RefundCode @classmethod def fields(self): return REFUND_CODE_FIELDS @property def issued_to(self): return self.field_pk("issued_to") @property def programs(self): if hasattr(self.subject, "programs"): programs = self.subject.programs if programs: return [program.pk for program in programs.all()]
from impact.models import RefundCode from impact.v1.helpers.model_helper import( BOOLEAN_FIELD, INTEGER_ARRAY_FIELD, INTEGER_FIELD, ModelHelper, PK_FIELD, STRING_FIELD, ) PROGRAMS_FIELD = { "json-schema": { "type": "array", "items": {"type": "string"}, }, "POST": {"required": False}, "PATCH": {"required": False}, } REFUND_CODE_FIELDS = { "id": PK_FIELD, "issued_to": INTEGER_FIELD, "created_at": STRING_FIELD, "unique_code": STRING_FIELD, "discount": INTEGER_FIELD, "maximum_uses": INTEGER_FIELD, "programs": INTEGER_ARRAY_FIELD, "notes": STRING_FIELD, "internal": BOOLEAN_FIELD, } class RefundCodeHelper(ModelHelper): model = RefundCode @classmethod def fields(self): return REFUND_CODE_FIELDS @property def issued_to(self): return self.field_pk("issued_to") @property def programs(self): if hasattr(self.subject, "programs"): programs = self.subject.programs if programs: return [program.pk for program in programs.all()]
Add Notes and Internal Fields
[AC-5291] Add Notes and Internal Fields
Python
mit
masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api,masschallenge/impact-api
from impact.models import RefundCode from impact.v1.helpers.model_helper import( + BOOLEAN_FIELD, INTEGER_ARRAY_FIELD, INTEGER_FIELD, ModelHelper, PK_FIELD, STRING_FIELD, ) PROGRAMS_FIELD = { "json-schema": { "type": "array", "items": {"type": "string"}, }, "POST": {"required": False}, "PATCH": {"required": False}, } REFUND_CODE_FIELDS = { "id": PK_FIELD, "issued_to": INTEGER_FIELD, "created_at": STRING_FIELD, "unique_code": STRING_FIELD, "discount": INTEGER_FIELD, "maximum_uses": INTEGER_FIELD, "programs": INTEGER_ARRAY_FIELD, + "notes": STRING_FIELD, + "internal": BOOLEAN_FIELD, } class RefundCodeHelper(ModelHelper): model = RefundCode @classmethod def fields(self): return REFUND_CODE_FIELDS @property def issued_to(self): return self.field_pk("issued_to") @property def programs(self): if hasattr(self.subject, "programs"): programs = self.subject.programs if programs: return [program.pk for program in programs.all()]
Add Notes and Internal Fields
## Code Before: from impact.models import RefundCode from impact.v1.helpers.model_helper import( INTEGER_ARRAY_FIELD, INTEGER_FIELD, ModelHelper, PK_FIELD, STRING_FIELD, ) PROGRAMS_FIELD = { "json-schema": { "type": "array", "items": {"type": "string"}, }, "POST": {"required": False}, "PATCH": {"required": False}, } REFUND_CODE_FIELDS = { "id": PK_FIELD, "issued_to": INTEGER_FIELD, "created_at": STRING_FIELD, "unique_code": STRING_FIELD, "discount": INTEGER_FIELD, "maximum_uses": INTEGER_FIELD, "programs": INTEGER_ARRAY_FIELD, } class RefundCodeHelper(ModelHelper): model = RefundCode @classmethod def fields(self): return REFUND_CODE_FIELDS @property def issued_to(self): return self.field_pk("issued_to") @property def programs(self): if hasattr(self.subject, "programs"): programs = self.subject.programs if programs: return [program.pk for program in programs.all()] ## Instruction: Add Notes and Internal Fields ## Code After: from impact.models import RefundCode from impact.v1.helpers.model_helper import( BOOLEAN_FIELD, INTEGER_ARRAY_FIELD, INTEGER_FIELD, ModelHelper, PK_FIELD, STRING_FIELD, ) PROGRAMS_FIELD = { "json-schema": { "type": "array", "items": {"type": "string"}, }, "POST": {"required": False}, "PATCH": {"required": False}, } REFUND_CODE_FIELDS = { "id": PK_FIELD, "issued_to": INTEGER_FIELD, "created_at": STRING_FIELD, "unique_code": STRING_FIELD, "discount": INTEGER_FIELD, "maximum_uses": INTEGER_FIELD, "programs": INTEGER_ARRAY_FIELD, "notes": STRING_FIELD, "internal": BOOLEAN_FIELD, } class RefundCodeHelper(ModelHelper): model = RefundCode @classmethod def fields(self): return REFUND_CODE_FIELDS @property def issued_to(self): return self.field_pk("issued_to") @property def programs(self): if hasattr(self.subject, "programs"): programs = self.subject.programs if programs: return [program.pk for program in programs.all()]
from impact.models import RefundCode from impact.v1.helpers.model_helper import( + BOOLEAN_FIELD, INTEGER_ARRAY_FIELD, INTEGER_FIELD, ModelHelper, PK_FIELD, STRING_FIELD, ) PROGRAMS_FIELD = { "json-schema": { "type": "array", "items": {"type": "string"}, }, "POST": {"required": False}, "PATCH": {"required": False}, } REFUND_CODE_FIELDS = { "id": PK_FIELD, "issued_to": INTEGER_FIELD, "created_at": STRING_FIELD, "unique_code": STRING_FIELD, "discount": INTEGER_FIELD, "maximum_uses": INTEGER_FIELD, "programs": INTEGER_ARRAY_FIELD, + "notes": STRING_FIELD, + "internal": BOOLEAN_FIELD, } class RefundCodeHelper(ModelHelper): model = RefundCode @classmethod def fields(self): return REFUND_CODE_FIELDS @property def issued_to(self): return self.field_pk("issued_to") @property def programs(self): if hasattr(self.subject, "programs"): programs = self.subject.programs if programs: return [program.pk for program in programs.all()]
e85883389dd14377d63fc8c0b4decf486b3b7c2c
conveyor/exceptions.py
conveyor/exceptions.py
class HashMismatch(ValueError): """ Raised when the incoming hash of a file does not match the expected. """
from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals class HashMismatch(ValueError): """ Raised when the incoming hash of a file does not match the expected. """
Bring the standard imports over
Bring the standard imports over
Python
bsd-2-clause
crateio/carrier
+ from __future__ import absolute_import + from __future__ import division + from __future__ import unicode_literals + + class HashMismatch(ValueError): """ Raised when the incoming hash of a file does not match the expected. """
Bring the standard imports over
## Code Before: class HashMismatch(ValueError): """ Raised when the incoming hash of a file does not match the expected. """ ## Instruction: Bring the standard imports over ## Code After: from __future__ import absolute_import from __future__ import division from __future__ import unicode_literals class HashMismatch(ValueError): """ Raised when the incoming hash of a file does not match the expected. """
+ from __future__ import absolute_import + from __future__ import division + from __future__ import unicode_literals + + class HashMismatch(ValueError): """ Raised when the incoming hash of a file does not match the expected. """
48ef416352870ae5c695ada006f1855d03d893df
dlexperiment.py
dlexperiment.py
class Experiment(object): def __init__(self, epochs=1): self.epochs = epochs def get_epochs(self): return self.epochs def train(self): raise NotImplementedError def test(self): raise NotImplementedError def set_loss(self): raise NotImplementedError def checkpoint(self): raise NotImplementedError def save(self): raise NotImplementedError def load(self): raise NotImplementedError def is_done(self): raise NotImplementedError class PyTorchExperiment(object): def save(self): pass
class Experiment(object): def __init__(self, model, optimizer, train_data, test_data, epochs=1): self.model = model self.optimizer = optimizer self.train_data = train_data self.test_data = test_data self.epochs = epochs self.loss = 0 self.current_epoch = 0 def get_epoch(self): return self.current_epoch def train(self): raise NotImplementedError def test(self): raise NotImplementedError def set_loss(self, loss): self.loss = loss def checkpoint(self): raise NotImplementedError def save(self): raise NotImplementedError def load(self): raise NotImplementedError def is_done(self): raise NotImplementedError class PyTorchExperiment(object): def save(self): pass
Add necessary params to Experiment.
Add necessary params to Experiment.
Python
apache-2.0
sagelywizard/dlex
class Experiment(object): - def __init__(self, epochs=1): + def __init__(self, model, optimizer, train_data, test_data, epochs=1): + self.model = model + self.optimizer = optimizer + self.train_data = train_data + self.test_data = test_data self.epochs = epochs + self.loss = 0 + self.current_epoch = 0 - def get_epochs(self): + def get_epoch(self): - return self.epochs + return self.current_epoch def train(self): raise NotImplementedError def test(self): raise NotImplementedError - def set_loss(self): + def set_loss(self, loss): - raise NotImplementedError + self.loss = loss def checkpoint(self): raise NotImplementedError def save(self): raise NotImplementedError def load(self): raise NotImplementedError def is_done(self): raise NotImplementedError class PyTorchExperiment(object): def save(self): pass
Add necessary params to Experiment.
## Code Before: class Experiment(object): def __init__(self, epochs=1): self.epochs = epochs def get_epochs(self): return self.epochs def train(self): raise NotImplementedError def test(self): raise NotImplementedError def set_loss(self): raise NotImplementedError def checkpoint(self): raise NotImplementedError def save(self): raise NotImplementedError def load(self): raise NotImplementedError def is_done(self): raise NotImplementedError class PyTorchExperiment(object): def save(self): pass ## Instruction: Add necessary params to Experiment. ## Code After: class Experiment(object): def __init__(self, model, optimizer, train_data, test_data, epochs=1): self.model = model self.optimizer = optimizer self.train_data = train_data self.test_data = test_data self.epochs = epochs self.loss = 0 self.current_epoch = 0 def get_epoch(self): return self.current_epoch def train(self): raise NotImplementedError def test(self): raise NotImplementedError def set_loss(self, loss): self.loss = loss def checkpoint(self): raise NotImplementedError def save(self): raise NotImplementedError def load(self): raise NotImplementedError def is_done(self): raise NotImplementedError class PyTorchExperiment(object): def save(self): pass
class Experiment(object): - def __init__(self, epochs=1): + def __init__(self, model, optimizer, train_data, test_data, epochs=1): + self.model = model + self.optimizer = optimizer + self.train_data = train_data + self.test_data = test_data self.epochs = epochs + self.loss = 0 + self.current_epoch = 0 - def get_epochs(self): ? - + def get_epoch(self): - return self.epochs ? - + return self.current_epoch ? ++++++++ def train(self): raise NotImplementedError def test(self): raise NotImplementedError - def set_loss(self): + def set_loss(self, loss): ? ++++++ - raise NotImplementedError + self.loss = loss def checkpoint(self): raise NotImplementedError def save(self): raise NotImplementedError def load(self): raise NotImplementedError def is_done(self): raise NotImplementedError class PyTorchExperiment(object): def save(self): pass
4e9de4dd4c408a056f72c833d89832a1981a7b0d
features/tags/forms.py
features/tags/forms.py
from django import forms from django.db.models.functions import Lower from . import models class TagGroup(forms.ModelForm): class Meta: model = models.Tagged fields = [] group = forms.ModelChoiceField(label='Gruppe', queryset=None) def __init__(self, **kwargs): tagger = kwargs.pop('tagger') super().__init__(**kwargs) self.fields['group'].queryset = tagger.groups.exclude( tags__tag=self.instance.tag).order_by(Lower('name')) def save(self, commit=True): self.instance.tagged = self.cleaned_data['group'] return super().save(commit)
from django import forms from django.db.models.functions import Lower from . import models class TagGroup(forms.ModelForm): class Meta: model = models.Tagged fields = [] group = forms.ModelChoiceField(label='Gruppe', queryset=None) def __init__(self, **kwargs): tagger = kwargs.pop('tagger') super().__init__(**kwargs) self.fields['group'].queryset = tagger.groups.exclude( tags__tag=self.instance.tag).order_by(Lower('name')) def save(self, commit=True): if commit and not self.instance.tag.pk: self.instance.tag.save() self.instance.tag = self.instance.tag self.instance.tagged = self.cleaned_data['group'] return super().save(commit)
Fix save for empty tags
Fix save for empty tags
Python
agpl-3.0
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
from django import forms from django.db.models.functions import Lower from . import models class TagGroup(forms.ModelForm): class Meta: model = models.Tagged fields = [] group = forms.ModelChoiceField(label='Gruppe', queryset=None) def __init__(self, **kwargs): tagger = kwargs.pop('tagger') super().__init__(**kwargs) self.fields['group'].queryset = tagger.groups.exclude( tags__tag=self.instance.tag).order_by(Lower('name')) def save(self, commit=True): + if commit and not self.instance.tag.pk: + self.instance.tag.save() + self.instance.tag = self.instance.tag self.instance.tagged = self.cleaned_data['group'] return super().save(commit)
Fix save for empty tags
## Code Before: from django import forms from django.db.models.functions import Lower from . import models class TagGroup(forms.ModelForm): class Meta: model = models.Tagged fields = [] group = forms.ModelChoiceField(label='Gruppe', queryset=None) def __init__(self, **kwargs): tagger = kwargs.pop('tagger') super().__init__(**kwargs) self.fields['group'].queryset = tagger.groups.exclude( tags__tag=self.instance.tag).order_by(Lower('name')) def save(self, commit=True): self.instance.tagged = self.cleaned_data['group'] return super().save(commit) ## Instruction: Fix save for empty tags ## Code After: from django import forms from django.db.models.functions import Lower from . import models class TagGroup(forms.ModelForm): class Meta: model = models.Tagged fields = [] group = forms.ModelChoiceField(label='Gruppe', queryset=None) def __init__(self, **kwargs): tagger = kwargs.pop('tagger') super().__init__(**kwargs) self.fields['group'].queryset = tagger.groups.exclude( tags__tag=self.instance.tag).order_by(Lower('name')) def save(self, commit=True): if commit and not self.instance.tag.pk: self.instance.tag.save() self.instance.tag = self.instance.tag self.instance.tagged = self.cleaned_data['group'] return super().save(commit)
from django import forms from django.db.models.functions import Lower from . import models class TagGroup(forms.ModelForm): class Meta: model = models.Tagged fields = [] group = forms.ModelChoiceField(label='Gruppe', queryset=None) def __init__(self, **kwargs): tagger = kwargs.pop('tagger') super().__init__(**kwargs) self.fields['group'].queryset = tagger.groups.exclude( tags__tag=self.instance.tag).order_by(Lower('name')) def save(self, commit=True): + if commit and not self.instance.tag.pk: + self.instance.tag.save() + self.instance.tag = self.instance.tag self.instance.tagged = self.cleaned_data['group'] return super().save(commit)
c78d9c63238b5535b1881f4eee54700f5a138b04
lupa/__init__.py
lupa/__init__.py
def _try_import_with_global_library_symbols(): try: import DLFCN dlopen_flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL except ImportError: import ctypes dlopen_flags = ctypes.RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
from __future__ import absolute_import # We need to enable global symbol visibility for lupa in order to # support binary module loading in Lua. If we can enable it here, we # do it temporarily. def _try_import_with_global_library_symbols(): try: from os import RTLD_NOW, RTLD_GLOBAL except ImportError: from DLFCN import RTLD_NOW, RTLD_GLOBAL # Py2.7 dlopen_flags = RTLD_NOW | RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
Use "os.RTLD_*" flags in Py3, as "DLFCN.*" is only the right thing to use in Py2.
Use "os.RTLD_*" flags in Py3, as "DLFCN.*" is only the right thing to use in Py2.
Python
mit
pombredanne/lupa,pombredanne/lupa
+ from __future__ import absolute_import + + + # We need to enable global symbol visibility for lupa in order to + # support binary module loading in Lua. If we can enable it here, we + # do it temporarily. def _try_import_with_global_library_symbols(): try: + from os import RTLD_NOW, RTLD_GLOBAL - import DLFCN - dlopen_flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL except ImportError: - import ctypes - dlopen_flags = ctypes.RTLD_GLOBAL + from DLFCN import RTLD_NOW, RTLD_GLOBAL # Py2.7 + dlopen_flags = RTLD_NOW | RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
Use "os.RTLD_*" flags in Py3, as "DLFCN.*" is only the right thing to use in Py2.
## Code Before: def _try_import_with_global_library_symbols(): try: import DLFCN dlopen_flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL except ImportError: import ctypes dlopen_flags = ctypes.RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass ## Instruction: Use "os.RTLD_*" flags in Py3, as "DLFCN.*" is only the right thing to use in Py2. ## Code After: from __future__ import absolute_import # We need to enable global symbol visibility for lupa in order to # support binary module loading in Lua. If we can enable it here, we # do it temporarily. def _try_import_with_global_library_symbols(): try: from os import RTLD_NOW, RTLD_GLOBAL except ImportError: from DLFCN import RTLD_NOW, RTLD_GLOBAL # Py2.7 dlopen_flags = RTLD_NOW | RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
+ from __future__ import absolute_import + + + # We need to enable global symbol visibility for lupa in order to + # support binary module loading in Lua. If we can enable it here, we + # do it temporarily. def _try_import_with_global_library_symbols(): try: + from os import RTLD_NOW, RTLD_GLOBAL - import DLFCN - dlopen_flags = DLFCN.RTLD_NOW | DLFCN.RTLD_GLOBAL except ImportError: - import ctypes - dlopen_flags = ctypes.RTLD_GLOBAL + from DLFCN import RTLD_NOW, RTLD_GLOBAL # Py2.7 + dlopen_flags = RTLD_NOW | RTLD_GLOBAL import sys old_flags = sys.getdlopenflags() try: sys.setdlopenflags(dlopen_flags) import lupa._lupa finally: sys.setdlopenflags(old_flags) try: _try_import_with_global_library_symbols() except: pass del _try_import_with_global_library_symbols # the following is all that should stay in the namespace: from lupa._lupa import * try: from lupa.version import __version__ except ImportError: pass
d500e290f8c1422f74b1d8c8d2bbb8ec9e5529cb
misc/singleton.py
misc/singleton.py
class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. """ _instances = {} @classmethod def instance(cls): if cls not in cls._instances: cls._instances[cls] = cls() return cls._instances[cls]
class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. Classes that use this must: 1) Add Singleton as a superclass. 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod def instance(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = cls(*args, **kwargs) return cls._instances[cls]
Add comments to Singleton about usage.
Add comments to Singleton about usage.
Python
mit
dpazel/music_rep
- class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. + Classes that use this must: + 1) Add Singleton as a superclass. + 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod - def instance(cls): + def instance(cls, *args, **kwargs): if cls not in cls._instances: - cls._instances[cls] = cls() + cls._instances[cls] = cls(*args, **kwargs) return cls._instances[cls]
Add comments to Singleton about usage.
## Code Before: class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. """ _instances = {} @classmethod def instance(cls): if cls not in cls._instances: cls._instances[cls] = cls() return cls._instances[cls] ## Instruction: Add comments to Singleton about usage. ## Code After: class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. Classes that use this must: 1) Add Singleton as a superclass. 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod def instance(cls, *args, **kwargs): if cls not in cls._instances: cls._instances[cls] = cls(*args, **kwargs) return cls._instances[cls]
- class Singleton(object): """ This is a class that implements singleton for its subclasses. The technique is based on a variant of other techniques found in: http://stackoverflow.com/questions/6760685/creating-a-singleton-in-python https://gist.github.com/werediver/4396488 The technique is simply to build a map of classes to their unique instances. The first time called for some particular class the class is mapped to the instance. On other class to the same class, the mapped instance is returned. + Classes that use this must: + 1) Add Singleton as a superclass. + 2) Have this signature for the constructor: __init__(self, *args, **kwargs) """ _instances = {} @classmethod - def instance(cls): + def instance(cls, *args, **kwargs): if cls not in cls._instances: - cls._instances[cls] = cls() + cls._instances[cls] = cls(*args, **kwargs) ? +++++++++++++++ return cls._instances[cls]
9b968e8cf4c5fd8e4bd120255b8eb3c7bc4e6943
mygpoauth/authorization/admin.py
mygpoauth/authorization/admin.py
from django.contrib import admin from .models import Authorization @admin.register(Authorization) class ApplicationAdmin(admin.ModelAdmin): pass
from django.contrib import admin from .models import Authorization @admin.register(Authorization) class ApplicationAdmin(admin.ModelAdmin): def scope_list(self, app): return ', '.join(app.scopes) list_display = ['user', 'application', 'scope_list'] list_select_related = ['user', 'application'] readonly_fields = ['user'] fields = ['user', 'application', 'scopes', 'code']
Improve Admin for Authorization objects
Improve Admin for Authorization objects
Python
agpl-3.0
gpodder/mygpo-auth,gpodder/mygpo-auth
from django.contrib import admin from .models import Authorization @admin.register(Authorization) class ApplicationAdmin(admin.ModelAdmin): - pass + def scope_list(self, app): + return ', '.join(app.scopes) + + list_display = ['user', 'application', 'scope_list'] + + list_select_related = ['user', 'application'] + + readonly_fields = ['user'] + + fields = ['user', 'application', 'scopes', 'code'] +
Improve Admin for Authorization objects
## Code Before: from django.contrib import admin from .models import Authorization @admin.register(Authorization) class ApplicationAdmin(admin.ModelAdmin): pass ## Instruction: Improve Admin for Authorization objects ## Code After: from django.contrib import admin from .models import Authorization @admin.register(Authorization) class ApplicationAdmin(admin.ModelAdmin): def scope_list(self, app): return ', '.join(app.scopes) list_display = ['user', 'application', 'scope_list'] list_select_related = ['user', 'application'] readonly_fields = ['user'] fields = ['user', 'application', 'scopes', 'code']
from django.contrib import admin from .models import Authorization @admin.register(Authorization) class ApplicationAdmin(admin.ModelAdmin): - pass + + def scope_list(self, app): + return ', '.join(app.scopes) + + list_display = ['user', 'application', 'scope_list'] + + list_select_related = ['user', 'application'] + + readonly_fields = ['user'] + + fields = ['user', 'application', 'scopes', 'code']
7e15c50628f5d0a03b5407923a1dc2db99932ba3
partner_company_group/models/res_partner.py
partner_company_group/models/res_partner.py
from odoo import fields, models class Contact(models.Model): _inherit = "res.partner" company_group_id = fields.Many2one( "res.partner", "Company group", domain=[("is_company", "=", True)] ) def _commercial_fields(self): return super()._commercial_fields() + ["company_group_id"]
from odoo import fields, models class Contact(models.Model): _inherit = "res.partner" company_group_id = fields.Many2one( "res.partner", "Company group", domain=[("is_company", "=", True)] ) company_group_member_ids = fields.One2many( comodel_name="res.partner", inverse_name="company_group_id", string="Company group members", ) def _commercial_fields(self): return super()._commercial_fields() + ["company_group_id"]
Add one2many counterpart for company_group_id TT34815
[IMP] partner_company_group: Add one2many counterpart for company_group_id TT34815
Python
agpl-3.0
OCA/partner-contact,OCA/partner-contact
from odoo import fields, models class Contact(models.Model): _inherit = "res.partner" company_group_id = fields.Many2one( "res.partner", "Company group", domain=[("is_company", "=", True)] ) + company_group_member_ids = fields.One2many( + comodel_name="res.partner", + inverse_name="company_group_id", + string="Company group members", + ) def _commercial_fields(self): return super()._commercial_fields() + ["company_group_id"]
Add one2many counterpart for company_group_id TT34815
## Code Before: from odoo import fields, models class Contact(models.Model): _inherit = "res.partner" company_group_id = fields.Many2one( "res.partner", "Company group", domain=[("is_company", "=", True)] ) def _commercial_fields(self): return super()._commercial_fields() + ["company_group_id"] ## Instruction: Add one2many counterpart for company_group_id TT34815 ## Code After: from odoo import fields, models class Contact(models.Model): _inherit = "res.partner" company_group_id = fields.Many2one( "res.partner", "Company group", domain=[("is_company", "=", True)] ) company_group_member_ids = fields.One2many( comodel_name="res.partner", inverse_name="company_group_id", string="Company group members", ) def _commercial_fields(self): return super()._commercial_fields() + ["company_group_id"]
from odoo import fields, models class Contact(models.Model): _inherit = "res.partner" company_group_id = fields.Many2one( "res.partner", "Company group", domain=[("is_company", "=", True)] ) + company_group_member_ids = fields.One2many( + comodel_name="res.partner", + inverse_name="company_group_id", + string="Company group members", + ) def _commercial_fields(self): return super()._commercial_fields() + ["company_group_id"]
366d7abd63d3f70ad206336a0278a0968b04b678
panoptes_aggregation/extractors/poly_line_text_extractor.py
panoptes_aggregation/extractors/poly_line_text_extractor.py
from collections import OrderedDict def classification_to_extract(classification): extract = OrderedDict([ ('points', OrderedDict([('x', []), ('y', [])])), ('text', []), ('frame', []) ]) annotation = classification['annotations'][0] for value in annotation['value']: text = value['details'][0]['value'] words = text.split(' ') for word, point in zip(words, value['points']): extract['frame'].append(value['frame']) extract['text'].append(word) extract['points']['x'].append(point['x']) extract['points']['y'].append(point['y']) return extract def poly_line_text_extractor_request(request): data = request.get_json() return classification_to_extract(data)
from collections import OrderedDict def classification_to_extract(classification): extract = OrderedDict([ ('points', OrderedDict([('x', []), ('y', [])])), ('text', []), ('frame', []) ]) annotation = classification['annotations'][0] for value in annotation['value']: text = value['details'][0]['value'] words = text.split(' ') # NOTE: if `words` and `points` are differnt lengths # the extract will only contain the *shorter* of the # two lists (assuming they match from the front) for word, point in zip(words, value['points']): extract['frame'].append(value['frame']) extract['text'].append(word) extract['points']['x'].append(point['x']) extract['points']['y'].append(point['y']) return extract def poly_line_text_extractor_request(request): data = request.get_json() return classification_to_extract(data)
Add clarification comment to extractor
Add clarification comment to extractor Add a comment about the behavior of the extractor when the length of the `words` list does not match the lenght of the `points` list. The extract will only contain the *shorter* of the two lists and assume they match from the front.
Python
apache-2.0
CKrawczyk/python-reducers-for-caesar
from collections import OrderedDict def classification_to_extract(classification): extract = OrderedDict([ ('points', OrderedDict([('x', []), ('y', [])])), ('text', []), ('frame', []) ]) annotation = classification['annotations'][0] for value in annotation['value']: text = value['details'][0]['value'] words = text.split(' ') + # NOTE: if `words` and `points` are differnt lengths + # the extract will only contain the *shorter* of the + # two lists (assuming they match from the front) for word, point in zip(words, value['points']): extract['frame'].append(value['frame']) extract['text'].append(word) extract['points']['x'].append(point['x']) extract['points']['y'].append(point['y']) return extract def poly_line_text_extractor_request(request): data = request.get_json() return classification_to_extract(data)
Add clarification comment to extractor
## Code Before: from collections import OrderedDict def classification_to_extract(classification): extract = OrderedDict([ ('points', OrderedDict([('x', []), ('y', [])])), ('text', []), ('frame', []) ]) annotation = classification['annotations'][0] for value in annotation['value']: text = value['details'][0]['value'] words = text.split(' ') for word, point in zip(words, value['points']): extract['frame'].append(value['frame']) extract['text'].append(word) extract['points']['x'].append(point['x']) extract['points']['y'].append(point['y']) return extract def poly_line_text_extractor_request(request): data = request.get_json() return classification_to_extract(data) ## Instruction: Add clarification comment to extractor ## Code After: from collections import OrderedDict def classification_to_extract(classification): extract = OrderedDict([ ('points', OrderedDict([('x', []), ('y', [])])), ('text', []), ('frame', []) ]) annotation = classification['annotations'][0] for value in annotation['value']: text = value['details'][0]['value'] words = text.split(' ') # NOTE: if `words` and `points` are differnt lengths # the extract will only contain the *shorter* of the # two lists (assuming they match from the front) for word, point in zip(words, value['points']): extract['frame'].append(value['frame']) extract['text'].append(word) extract['points']['x'].append(point['x']) extract['points']['y'].append(point['y']) return extract def poly_line_text_extractor_request(request): data = request.get_json() return classification_to_extract(data)
from collections import OrderedDict def classification_to_extract(classification): extract = OrderedDict([ ('points', OrderedDict([('x', []), ('y', [])])), ('text', []), ('frame', []) ]) annotation = classification['annotations'][0] for value in annotation['value']: text = value['details'][0]['value'] words = text.split(' ') + # NOTE: if `words` and `points` are differnt lengths + # the extract will only contain the *shorter* of the + # two lists (assuming they match from the front) for word, point in zip(words, value['points']): extract['frame'].append(value['frame']) extract['text'].append(word) extract['points']['x'].append(point['x']) extract['points']['y'].append(point['y']) return extract def poly_line_text_extractor_request(request): data = request.get_json() return classification_to_extract(data)
8e0d61aa69a15a9efc967ec263bc73c3018f9b3d
process_to_only_word.py
process_to_only_word.py
import re import sys ########################################################################### # This code is developing yet!! # Target file(Already morphological analysis file) to process to word only. ########################################################################### argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s filename' % argvs[0]) quit() with open(argvs[1],mode='r', encoding='utf-8') as read_file: line = read_file.readlines() for l in line: split_text_list = str(l).split() if(len(split_text_list) == 0): continue print(split_text_list[0])
import re import sys ########################################################################### # This code is developing yet!! # Target file(Already morphological analysis file) to process to word only. # <How to use> # python process_to_only_word.py <Target file(Already morphological analysis file)> > result.txt ########################################################################### argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s filename' % argvs[0]) quit() with open(argvs[1],mode='r', encoding='utf-8') as read_file: line = read_file.readlines() for l in line: split_text_list = str(l).split() if(len(split_text_list) == 0): continue print(split_text_list[0])
Add a "How to use(comment)"
Add a "How to use(comment)"
Python
mit
shinshin86/little-magnifying-py-glass,shinshin86/little-magnifying-py-glass
import re import sys ########################################################################### # This code is developing yet!! # Target file(Already morphological analysis file) to process to word only. + # <How to use> + # python process_to_only_word.py <Target file(Already morphological analysis file)> > result.txt ########################################################################### argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s filename' % argvs[0]) quit() with open(argvs[1],mode='r', encoding='utf-8') as read_file: line = read_file.readlines() for l in line: split_text_list = str(l).split() if(len(split_text_list) == 0): continue print(split_text_list[0])
Add a "How to use(comment)"
## Code Before: import re import sys ########################################################################### # This code is developing yet!! # Target file(Already morphological analysis file) to process to word only. ########################################################################### argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s filename' % argvs[0]) quit() with open(argvs[1],mode='r', encoding='utf-8') as read_file: line = read_file.readlines() for l in line: split_text_list = str(l).split() if(len(split_text_list) == 0): continue print(split_text_list[0]) ## Instruction: Add a "How to use(comment)" ## Code After: import re import sys ########################################################################### # This code is developing yet!! # Target file(Already morphological analysis file) to process to word only. # <How to use> # python process_to_only_word.py <Target file(Already morphological analysis file)> > result.txt ########################################################################### argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s filename' % argvs[0]) quit() with open(argvs[1],mode='r', encoding='utf-8') as read_file: line = read_file.readlines() for l in line: split_text_list = str(l).split() if(len(split_text_list) == 0): continue print(split_text_list[0])
import re import sys ########################################################################### # This code is developing yet!! # Target file(Already morphological analysis file) to process to word only. + # <How to use> + # python process_to_only_word.py <Target file(Already morphological analysis file)> > result.txt ########################################################################### argvs = sys.argv argc = len(argvs) if (argc != 2): print('Usage: # python %s filename' % argvs[0]) quit() with open(argvs[1],mode='r', encoding='utf-8') as read_file: line = read_file.readlines() for l in line: split_text_list = str(l).split() if(len(split_text_list) == 0): continue print(split_text_list[0])
361af42be2c3044a15480572befb1405a603b4ab
VALDprepare.py
VALDprepare.py
import argparse import gzip def _parser(): parser = argparse.ArgumentParser(description='Prepare the data downloaded ' 'from VALD.') parser.add_argument('input', help='input compressed file') parser.add_argument('-o', '--output', help='Optional output', default=False) return parser.parse_args() def main(input, output=False): if not isinstance(input, str): raise TypeError('Input must be a str. A %s was parsed' % type(input)) if not isinstance(output, str) and output: raise TypeError('Output must be a str. A %s was parsed' % type(output)) # TODO: Check if the input exists fname = input.rpartition('.')[0] if not output: output = '%s.dat' % fname oref = '%s.ref' % fname fout = '' fref = '' with gzip.open(input, 'r') as lines: for i, line in enumerate(lines): if i < 2: fout += '# %s' % line.replace("'", '') else: fout += line.replace("'", '') if 'References' in line: break with open(output, 'w') as fo: fo.write(fout) if __name__ == '__main__': args = _parser() input, output = args.input, args.output main(input, output)
import argparse import gzip import os def _parser(): parser = argparse.ArgumentParser(description='Prepare the data downloaded ' 'from VALD.') parser.add_argument('input', help='input compressed file', type=str) parser.add_argument('-o', '--output', help='Optional output', default=False, type=str) return parser.parse_args() def main(input, output=False): if not os.path.isfile(input): raise IOError('File: %s does not exists' % input) fname = input.rpartition('.')[0] if not output: output = '%s.dat' % fname oref = '%s.ref' % fname fout = '' fref = '' with gzip.open(input, 'r') as lines: for i, line in enumerate(lines): if i < 2: fout += '# %s' % line.replace("'", '') else: fout += line.replace("'", '') if 'References' in line: break with open(output, 'w') as fo: fo.write(fout) if __name__ == '__main__': args = _parser() input, output = args.input, args.output main(input, output)
Check if the file exists before doing anything else.
Check if the file exists before doing anything else.
Python
mit
DanielAndreasen/astro_scripts
import argparse import gzip + import os def _parser(): parser = argparse.ArgumentParser(description='Prepare the data downloaded ' 'from VALD.') - parser.add_argument('input', help='input compressed file') + parser.add_argument('input', help='input compressed file', type=str) parser.add_argument('-o', '--output', help='Optional output', - default=False) + default=False, type=str) return parser.parse_args() def main(input, output=False): - if not isinstance(input, str): - raise TypeError('Input must be a str. A %s was parsed' % type(input)) - if not isinstance(output, str) and output: - raise TypeError('Output must be a str. A %s was parsed' % type(output)) - # TODO: Check if the input exists + if not os.path.isfile(input): + raise IOError('File: %s does not exists' % input) fname = input.rpartition('.')[0] if not output: output = '%s.dat' % fname oref = '%s.ref' % fname fout = '' fref = '' with gzip.open(input, 'r') as lines: for i, line in enumerate(lines): if i < 2: fout += '# %s' % line.replace("'", '') else: fout += line.replace("'", '') if 'References' in line: break with open(output, 'w') as fo: fo.write(fout) if __name__ == '__main__': args = _parser() input, output = args.input, args.output main(input, output)
Check if the file exists before doing anything else.
## Code Before: import argparse import gzip def _parser(): parser = argparse.ArgumentParser(description='Prepare the data downloaded ' 'from VALD.') parser.add_argument('input', help='input compressed file') parser.add_argument('-o', '--output', help='Optional output', default=False) return parser.parse_args() def main(input, output=False): if not isinstance(input, str): raise TypeError('Input must be a str. A %s was parsed' % type(input)) if not isinstance(output, str) and output: raise TypeError('Output must be a str. A %s was parsed' % type(output)) # TODO: Check if the input exists fname = input.rpartition('.')[0] if not output: output = '%s.dat' % fname oref = '%s.ref' % fname fout = '' fref = '' with gzip.open(input, 'r') as lines: for i, line in enumerate(lines): if i < 2: fout += '# %s' % line.replace("'", '') else: fout += line.replace("'", '') if 'References' in line: break with open(output, 'w') as fo: fo.write(fout) if __name__ == '__main__': args = _parser() input, output = args.input, args.output main(input, output) ## Instruction: Check if the file exists before doing anything else. ## Code After: import argparse import gzip import os def _parser(): parser = argparse.ArgumentParser(description='Prepare the data downloaded ' 'from VALD.') parser.add_argument('input', help='input compressed file', type=str) parser.add_argument('-o', '--output', help='Optional output', default=False, type=str) return parser.parse_args() def main(input, output=False): if not os.path.isfile(input): raise IOError('File: %s does not exists' % input) fname = input.rpartition('.')[0] if not output: output = '%s.dat' % fname oref = '%s.ref' % fname fout = '' fref = '' with gzip.open(input, 'r') as lines: for i, line in enumerate(lines): if i < 2: fout += '# %s' % line.replace("'", '') else: fout += line.replace("'", '') if 'References' in line: break with open(output, 'w') as fo: fo.write(fout) if __name__ == '__main__': args = _parser() input, output = args.input, args.output main(input, output)
import argparse import gzip + import os def _parser(): parser = argparse.ArgumentParser(description='Prepare the data downloaded ' 'from VALD.') - parser.add_argument('input', help='input compressed file') + parser.add_argument('input', help='input compressed file', type=str) ? ++++++++++ parser.add_argument('-o', '--output', help='Optional output', - default=False) + default=False, type=str) ? ++++++++++ return parser.parse_args() def main(input, output=False): - if not isinstance(input, str): - raise TypeError('Input must be a str. A %s was parsed' % type(input)) - if not isinstance(output, str) and output: - raise TypeError('Output must be a str. A %s was parsed' % type(output)) - # TODO: Check if the input exists + if not os.path.isfile(input): + raise IOError('File: %s does not exists' % input) fname = input.rpartition('.')[0] if not output: output = '%s.dat' % fname oref = '%s.ref' % fname fout = '' fref = '' with gzip.open(input, 'r') as lines: for i, line in enumerate(lines): if i < 2: fout += '# %s' % line.replace("'", '') else: fout += line.replace("'", '') if 'References' in line: break with open(output, 'w') as fo: fo.write(fout) if __name__ == '__main__': args = _parser() input, output = args.input, args.output main(input, output)
be27ec6d2567b85b94b40c79570ca5d9c20fd0bf
modeltrans/admin.py
modeltrans/admin.py
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Hide all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) return excludes
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Add this mixin to your admin class to hide the untranslated field and all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) # also add the name of the original field, as it is added excludes.append(field.original_field.name) # de-duplicate return list(set(excludes))
Improve ActiveLanguageMixin to hide original field
Improve ActiveLanguageMixin to hide original field
Python
bsd-3-clause
zostera/django-modeltrans,zostera/django-modeltrans
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' + Add this mixin to your admin class to hide the untranslated field and all - Hide all translated fields, except: + translated fields, except: + - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): - + # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) - return excludes + # also add the name of the original field, as it is added + excludes.append(field.original_field.name) + # de-duplicate + return list(set(excludes)) +
Improve ActiveLanguageMixin to hide original field
## Code Before: from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Hide all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) return excludes ## Instruction: Improve ActiveLanguageMixin to hide original field ## Code After: from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' Add this mixin to your admin class to hide the untranslated field and all translated fields, except: - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) # also add the name of the original field, as it is added excludes.append(field.original_field.name) # de-duplicate return list(set(excludes))
from .conf import get_default_language from .translator import get_i18n_field from .utils import get_language class ActiveLanguageMixin(object): ''' + Add this mixin to your admin class to hide the untranslated field and all - Hide all translated fields, except: ? --------- + translated fields, except: + - The field for the default language (settings.LANGUAGE_CODE) - The field for the currently active language. ''' def get_exclude(self, request, obj=None): - + # use default implementation for models without i18n-field i18n_field = get_i18n_field(self.model) if i18n_field is None: return super(ActiveLanguageMixin, self).get_exclude(request) language = get_language() if language == get_default_language(): language = False excludes = [] for field in i18n_field.get_translated_fields(): if field.language is None or field.language == language: continue excludes.append(field.name) + # also add the name of the original field, as it is added + excludes.append(field.original_field.name) + + # de-duplicate - return excludes + return list(set(excludes)) ? +++++++++ ++
a0d10e419b504dc2e7f4ba45a5d10a2d9d47019c
knights/base.py
knights/base.py
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.root = parse.parse(raw) code = ast.Expression( body=ast.ListComp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ ast.Name(id='context', ctx=ast.Load()), ], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.root.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.nodelist = parse.parse(raw) code = ast.Expression( body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ ast.Name(id='context', ctx=ast.Load()), ], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
Use a generator for rendering, and pass nodelist unwrapped
Use a generator for rendering, and pass nodelist unwrapped
Python
mit
funkybob/knights-templater,funkybob/knights-templater
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw - self.root = parse.parse(raw) + self.nodelist = parse.parse(raw) code = ast.Expression( - body=ast.ListComp( + body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), - args=[ - ast.Name(id='context', ctx=ast.Load()), + args=[ ast.Name(id='context', ctx=ast.Load()), ], - ], keywords=[], starargs=None, kwargs=None + keywords=[], starargs=None, kwargs=None ), + ], - ], keywords=[], starargs=None, kwargs=None + keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { - 'nodelist': self.root.nodelist, + 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
Use a generator for rendering, and pass nodelist unwrapped
## Code Before: import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.root = parse.parse(raw) code = ast.Expression( body=ast.ListComp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ ast.Name(id='context', ctx=ast.Load()), ], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.root.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {})) ## Instruction: Use a generator for rendering, and pass nodelist unwrapped ## Code After: import ast from . import parse class Template: def __init__(self, raw): self.raw = raw self.nodelist = parse.parse(raw) code = ast.Expression( body=ast.GeneratorExp( elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), args=[ ast.Name(id='context', ctx=ast.Load()), ], keywords=[], starargs=None, kwargs=None ), ], keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
import ast from . import parse class Template: def __init__(self, raw): self.raw = raw - self.root = parse.parse(raw) ? ^ ^ + self.nodelist = parse.parse(raw) ? ^ ^^^^^ code = ast.Expression( - body=ast.ListComp( ? ^^^ - ^ + body=ast.GeneratorExp( ? ^^^^^^ ^^^ elt=ast.Call( func=ast.Name(id='str', ctx=ast.Load()), args=[ ast.Call( func=ast.Attribute( value=ast.Name(id='x', ctx=ast.Load()), attr='render', ctx=ast.Load() ), - args=[ - ast.Name(id='context', ctx=ast.Load()), ? ^^^ + args=[ ast.Name(id='context', ctx=ast.Load()), ], ? ^^^^^^ +++ - ], keywords=[], starargs=None, kwargs=None ? --- + keywords=[], starargs=None, kwargs=None ), + ], - ], keywords=[], starargs=None, kwargs=None ? --- + keywords=[], starargs=None, kwargs=None ), generators=[ ast.comprehension( target=ast.Name(id='x', ctx=ast.Store()), iter=ast.Name(id='nodelist', ctx=ast.Load()), ifs=[] ), ] ) ) ast.fix_missing_locations(code) self.code = compile(code, filename='<template>', mode='eval') def render(self, context): global_ctx = { - 'nodelist': self.root.nodelist, ? ----- + 'nodelist': self.nodelist, 'context': dict(context), } return ''.join(eval(self.code, global_ctx, {}))
e5b1eeab4486d2182396a7f8e64d0a05207a3f5f
bom_data_parser/__init__.py
bom_data_parser/__init__.py
from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json
from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
Fix up imports in package.
Fix up imports in package.
Python
bsd-3-clause
amacd31/bom_data_parser,amacd31/bom_data_parser
+ from bom_data_parser.acorn_sat import read_acorn_sat_csv - from climate_data_online import read_climate_data_online_csv + from bom_data_parser.climate_data_online import read_climate_data_online_csv - from acorn_sat import read_acorn_sat_csv - from hrs import read_hrs_csv + from bom_data_parser.hrs import read_hrs_csv - from observations_json import read_obs_json + from bom_data_parser.observations_json import read_obs_json
Fix up imports in package.
## Code Before: from climate_data_online import read_climate_data_online_csv from acorn_sat import read_acorn_sat_csv from hrs import read_hrs_csv from observations_json import read_obs_json ## Instruction: Fix up imports in package. ## Code After: from bom_data_parser.acorn_sat import read_acorn_sat_csv from bom_data_parser.climate_data_online import read_climate_data_online_csv from bom_data_parser.hrs import read_hrs_csv from bom_data_parser.observations_json import read_obs_json
+ from bom_data_parser.acorn_sat import read_acorn_sat_csv - from climate_data_online import read_climate_data_online_csv + from bom_data_parser.climate_data_online import read_climate_data_online_csv ? ++++++++++++++++ - from acorn_sat import read_acorn_sat_csv - from hrs import read_hrs_csv + from bom_data_parser.hrs import read_hrs_csv ? ++++++++++++++++ - from observations_json import read_obs_json + from bom_data_parser.observations_json import read_obs_json ? ++++++++++++++++
c1e6b61b6da9f17f11ce41bbcdaad61fadc075db
serenata_toolbox/datasets/remote.py
serenata_toolbox/datasets/remote.py
import os from functools import partial import boto3 from decouple import config from serenata_toolbox import log from serenata_toolbox.datasets.contextmanager import status_message class RemoteDatasets: def __init__(self): self.client = None self.credentials = { 'aws_access_key_id': config('AMAZON_ACCESS_KEY'), 'aws_secret_access_key': config('AMAZON_SECRET_KEY'), 'region_name': config('AMAZON_REGION'), } @property def bucket(self): return config('AMAZON_BUCKET') @property def s3(self): if not self.client: self.client = boto3.client('s3', **self.credentials) return self.client @property def all(self): response = self.s3.list_objects(Bucket=self.bucket) yield from (obj.get('Key') for obj in response.get('Contents', [])) def upload(self, file_path): _, file_name = os.path.split(file_path) with status_message('Uploading {}…'.format(file_name)): self.s3.upload_file(file_path, self.bucket, file_name) def delete(self, file_name): with status_message('Deleting {}…'.format(file_name)): self.s3.delete_object(Bucket=self.bucket, Key=file_name)
import os from functools import partial import boto3 from decouple import config from serenata_toolbox import log from serenata_toolbox.datasets.contextmanager import status_message class RemoteDatasets: def __init__(self): self.client = None self.credentials = { 'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None), 'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None), 'region_name': config('AMAZON_REGION'), } @property def bucket(self): return config('AMAZON_BUCKET') @property def s3(self): if not self.client: self.client = boto3.client('s3', **self.credentials) return self.client @property def all(self): response = self.s3.list_objects(Bucket=self.bucket) yield from (obj.get('Key') for obj in response.get('Contents', [])) def upload(self, file_path): _, file_name = os.path.split(file_path) with status_message('Uploading {}…'.format(file_name)): self.s3.upload_file(file_path, self.bucket, file_name) def delete(self, file_name): with status_message('Deleting {}…'.format(file_name)): self.s3.delete_object(Bucket=self.bucket, Key=file_name)
Make Amazon keys non required
Make Amazon keys non required
Python
mit
datasciencebr/serenata-toolbox
import os from functools import partial import boto3 from decouple import config from serenata_toolbox import log from serenata_toolbox.datasets.contextmanager import status_message class RemoteDatasets: def __init__(self): self.client = None self.credentials = { - 'aws_access_key_id': config('AMAZON_ACCESS_KEY'), + 'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None), - 'aws_secret_access_key': config('AMAZON_SECRET_KEY'), + 'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None), 'region_name': config('AMAZON_REGION'), } @property def bucket(self): return config('AMAZON_BUCKET') @property def s3(self): if not self.client: self.client = boto3.client('s3', **self.credentials) return self.client @property def all(self): response = self.s3.list_objects(Bucket=self.bucket) yield from (obj.get('Key') for obj in response.get('Contents', [])) def upload(self, file_path): _, file_name = os.path.split(file_path) with status_message('Uploading {}…'.format(file_name)): self.s3.upload_file(file_path, self.bucket, file_name) def delete(self, file_name): with status_message('Deleting {}…'.format(file_name)): self.s3.delete_object(Bucket=self.bucket, Key=file_name)
Make Amazon keys non required
## Code Before: import os from functools import partial import boto3 from decouple import config from serenata_toolbox import log from serenata_toolbox.datasets.contextmanager import status_message class RemoteDatasets: def __init__(self): self.client = None self.credentials = { 'aws_access_key_id': config('AMAZON_ACCESS_KEY'), 'aws_secret_access_key': config('AMAZON_SECRET_KEY'), 'region_name': config('AMAZON_REGION'), } @property def bucket(self): return config('AMAZON_BUCKET') @property def s3(self): if not self.client: self.client = boto3.client('s3', **self.credentials) return self.client @property def all(self): response = self.s3.list_objects(Bucket=self.bucket) yield from (obj.get('Key') for obj in response.get('Contents', [])) def upload(self, file_path): _, file_name = os.path.split(file_path) with status_message('Uploading {}…'.format(file_name)): self.s3.upload_file(file_path, self.bucket, file_name) def delete(self, file_name): with status_message('Deleting {}…'.format(file_name)): self.s3.delete_object(Bucket=self.bucket, Key=file_name) ## Instruction: Make Amazon keys non required ## Code After: import os from functools import partial import boto3 from decouple import config from serenata_toolbox import log from serenata_toolbox.datasets.contextmanager import status_message class RemoteDatasets: def __init__(self): self.client = None self.credentials = { 'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None), 'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None), 'region_name': config('AMAZON_REGION'), } @property def bucket(self): return config('AMAZON_BUCKET') @property def s3(self): if not self.client: self.client = boto3.client('s3', **self.credentials) return self.client @property def all(self): response = self.s3.list_objects(Bucket=self.bucket) yield from (obj.get('Key') for obj in response.get('Contents', [])) def upload(self, file_path): _, file_name = os.path.split(file_path) with status_message('Uploading {}…'.format(file_name)): self.s3.upload_file(file_path, self.bucket, file_name) def delete(self, file_name): with status_message('Deleting {}…'.format(file_name)): self.s3.delete_object(Bucket=self.bucket, Key=file_name)
import os from functools import partial import boto3 from decouple import config from serenata_toolbox import log from serenata_toolbox.datasets.contextmanager import status_message class RemoteDatasets: def __init__(self): self.client = None self.credentials = { - 'aws_access_key_id': config('AMAZON_ACCESS_KEY'), + 'aws_access_key_id': config('AMAZON_ACCESS_KEY', default=None), ? ++++++++++++++ - 'aws_secret_access_key': config('AMAZON_SECRET_KEY'), + 'aws_secret_access_key': config('AMAZON_SECRET_KEY', default=None), ? ++++++++++++++ 'region_name': config('AMAZON_REGION'), } @property def bucket(self): return config('AMAZON_BUCKET') @property def s3(self): if not self.client: self.client = boto3.client('s3', **self.credentials) return self.client @property def all(self): response = self.s3.list_objects(Bucket=self.bucket) yield from (obj.get('Key') for obj in response.get('Contents', [])) def upload(self, file_path): _, file_name = os.path.split(file_path) with status_message('Uploading {}…'.format(file_name)): self.s3.upload_file(file_path, self.bucket, file_name) def delete(self, file_name): with status_message('Deleting {}…'.format(file_name)): self.s3.delete_object(Bucket=self.bucket, Key=file_name)
2f4b57b2b7c5b391af615a204ad85dd04cc780d3
chatroom/views.py
chatroom/views.py
from django.shortcuts import render from django.http import HttpResponse from django.http import HttpResponseRedirect def index(request): return render(request, 'index.html') def append(request): # open("data", "a").write(str(request.args.get("msg")) + "\n\r") open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8')) return HttpResponse("") def retreive(request): fil = open("/tmp/data", "rb") payload = fil.read() return HttpResponse(payload) def order(request): return render(request, 'order.html') def faq(request): return render(request, 'faq.html') def about_us(request): return render(request, 'about_us.html') def progress(request): return render(request, 'progress.html') def exchange(request): return render(request, 'exchange.html') def chatroom(request): return render(request, 'chatroom.html')
from django.shortcuts import render, redirect from django.http import HttpResponse from django.http import HttpResponseRedirect from login.views import isLogin from login import auth def index(request): return render(request, 'index.html') def append(request): # open("data", "a").write(str(request.args.get("msg")) + "\n\r") open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8')) return HttpResponse("") def retreive(request): fil = open("/tmp/data", "rb") payload = fil.read() return HttpResponse(payload) def order(request): if isLogin(request): data = auth.get_user_data(request) if auth.hasProfile(data.uuid): profile = auth.get_user_profile(request) return render(request, "order.html", {'realname' : profile.real_name, 'email' : profile.email, 'shipping_address' : profile.default_shipping_address, 'phone' : profile.phone_number}) else: redirect("/profile/") def faq(request): return render(request, 'faq.html') def about_us(request): return render(request, 'about_us.html') def progress(request): return render(request, 'progress.html') def exchange(request): return render(request, 'exchange.html') def chatroom(request): return render(request, 'chatroom.html')
Load profiles on the order page
Load profiles on the order page
Python
mit
sonicyang/chiphub,sonicyang/chiphub,sonicyang/chiphub
- from django.shortcuts import render + from django.shortcuts import render, redirect from django.http import HttpResponse from django.http import HttpResponseRedirect + from login.views import isLogin + from login import auth def index(request): return render(request, 'index.html') def append(request): # open("data", "a").write(str(request.args.get("msg")) + "\n\r") open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8')) return HttpResponse("") def retreive(request): fil = open("/tmp/data", "rb") payload = fil.read() return HttpResponse(payload) def order(request): - return render(request, 'order.html') + if isLogin(request): + data = auth.get_user_data(request) + if auth.hasProfile(data.uuid): + profile = auth.get_user_profile(request) + return render(request, "order.html", {'realname' : profile.real_name, + 'email' : profile.email, + 'shipping_address' : profile.default_shipping_address, + 'phone' : profile.phone_number}) + + else: + redirect("/profile/") def faq(request): return render(request, 'faq.html') def about_us(request): return render(request, 'about_us.html') def progress(request): return render(request, 'progress.html') def exchange(request): return render(request, 'exchange.html') def chatroom(request): return render(request, 'chatroom.html')
Load profiles on the order page
## Code Before: from django.shortcuts import render from django.http import HttpResponse from django.http import HttpResponseRedirect def index(request): return render(request, 'index.html') def append(request): # open("data", "a").write(str(request.args.get("msg")) + "\n\r") open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8')) return HttpResponse("") def retreive(request): fil = open("/tmp/data", "rb") payload = fil.read() return HttpResponse(payload) def order(request): return render(request, 'order.html') def faq(request): return render(request, 'faq.html') def about_us(request): return render(request, 'about_us.html') def progress(request): return render(request, 'progress.html') def exchange(request): return render(request, 'exchange.html') def chatroom(request): return render(request, 'chatroom.html') ## Instruction: Load profiles on the order page ## Code After: from django.shortcuts import render, redirect from django.http import HttpResponse from django.http import HttpResponseRedirect from login.views import isLogin from login import auth def index(request): return render(request, 'index.html') def append(request): # open("data", "a").write(str(request.args.get("msg")) + "\n\r") open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8')) return HttpResponse("") def retreive(request): fil = open("/tmp/data", "rb") payload = fil.read() return HttpResponse(payload) def order(request): if isLogin(request): data = auth.get_user_data(request) if auth.hasProfile(data.uuid): profile = auth.get_user_profile(request) return render(request, "order.html", {'realname' : profile.real_name, 'email' : profile.email, 'shipping_address' : profile.default_shipping_address, 'phone' : profile.phone_number}) else: redirect("/profile/") def faq(request): return render(request, 'faq.html') def about_us(request): return render(request, 'about_us.html') def progress(request): return render(request, 'progress.html') def exchange(request): return render(request, 'exchange.html') def chatroom(request): return render(request, 'chatroom.html')
- from django.shortcuts import render + from django.shortcuts import render, redirect ? ++++++++++ from django.http import HttpResponse from django.http import HttpResponseRedirect + from login.views import isLogin + from login import auth def index(request): return render(request, 'index.html') def append(request): # open("data", "a").write(str(request.args.get("msg")) + "\n\r") open("/tmp/data", "ab").write(request.GET['msg'].encode('utf8') + "\n\r".encode('utf-8')) return HttpResponse("") def retreive(request): fil = open("/tmp/data", "rb") payload = fil.read() return HttpResponse(payload) def order(request): - return render(request, 'order.html') + if isLogin(request): + data = auth.get_user_data(request) + if auth.hasProfile(data.uuid): + profile = auth.get_user_profile(request) + return render(request, "order.html", {'realname' : profile.real_name, + 'email' : profile.email, + 'shipping_address' : profile.default_shipping_address, + 'phone' : profile.phone_number}) + + else: + redirect("/profile/") def faq(request): return render(request, 'faq.html') def about_us(request): return render(request, 'about_us.html') def progress(request): return render(request, 'progress.html') def exchange(request): return render(request, 'exchange.html') def chatroom(request): return render(request, 'chatroom.html')
91aa7ed06d168700692a33fd3c51add585d60ac0
backend/uclapi/roombookings/migrations/0007_auto_20170327_1323.py
backend/uclapi/roombookings/migrations/0007_auto_20170327_1323.py
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('roombookings', '0006_bookinga_bookingb_lock'), ] operations = [ migrations.AddField( model_name='bookinga', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AddField( model_name='bookingb', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AlterField( model_name='bookinga', name='slotid', field=models.BigIntegerField(), ), migrations.AlterField( model_name='bookingb', name='slotid', field=models.BigIntegerField(), ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('roombookings', '0006_bookinga_bookingb_lock'), ] operations = [ migrations.AlterField( model_name='bookinga', name='slotid', field=models.BigIntegerField(null=True, primary_key=False), ), migrations.AlterField( model_name='bookingb', name='slotid', field=models.BigIntegerField(null=True, primary_key=False), ), migrations.AddField( model_name='bookinga', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AddField( model_name='bookingb', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), ]
Fix up migration to have only one PK
Fix up migration to have only one PK
Python
mit
uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('roombookings', '0006_bookinga_bookingb_lock'), ] operations = [ + migrations.AlterField( + model_name='bookinga', + name='slotid', + field=models.BigIntegerField(null=True, primary_key=False), + ), + migrations.AlterField( + model_name='bookingb', + name='slotid', + field=models.BigIntegerField(null=True, primary_key=False), + ), migrations.AddField( model_name='bookinga', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AddField( model_name='bookingb', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), - migrations.AlterField( - model_name='bookinga', - name='slotid', - field=models.BigIntegerField(), - ), - migrations.AlterField( - model_name='bookingb', - name='slotid', - field=models.BigIntegerField(), - ), ]
Fix up migration to have only one PK
## Code Before: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('roombookings', '0006_bookinga_bookingb_lock'), ] operations = [ migrations.AddField( model_name='bookinga', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AddField( model_name='bookingb', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AlterField( model_name='bookinga', name='slotid', field=models.BigIntegerField(), ), migrations.AlterField( model_name='bookingb', name='slotid', field=models.BigIntegerField(), ), ] ## Instruction: Fix up migration to have only one PK ## Code After: from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('roombookings', '0006_bookinga_bookingb_lock'), ] operations = [ migrations.AlterField( model_name='bookinga', name='slotid', field=models.BigIntegerField(null=True, primary_key=False), ), migrations.AlterField( model_name='bookingb', name='slotid', field=models.BigIntegerField(null=True, primary_key=False), ), migrations.AddField( model_name='bookinga', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AddField( model_name='bookingb', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), ]
from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('roombookings', '0006_bookinga_bookingb_lock'), ] operations = [ + migrations.AlterField( + model_name='bookinga', + name='slotid', + field=models.BigIntegerField(null=True, primary_key=False), + ), + migrations.AlterField( + model_name='bookingb', + name='slotid', + field=models.BigIntegerField(null=True, primary_key=False), + ), migrations.AddField( model_name='bookinga', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), migrations.AddField( model_name='bookingb', name='id', field=models.AutoField(primary_key=True, serialize=False), preserve_default=False, ), - migrations.AlterField( - model_name='bookinga', - name='slotid', - field=models.BigIntegerField(), - ), - migrations.AlterField( - model_name='bookingb', - name='slotid', - field=models.BigIntegerField(), - ), ]
6b9d9c33b4d68a008bb992b9a11ab2f02a4d5cbd
shelltest/tests/test_runner.py
shelltest/tests/test_runner.py
import tempfile import StringIO import pytest from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner @pytest.fixture def tests(): return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)), ShellTest('echo $?', '0\n', ShellTestSource('', 2))] def test_run(tests): r = ShellTestRunner(tests) results = r.run() assert len(results) == 2 assert results[0].success assert results[0].ret_code == 0 assert results[0].test == tests[0] assert results[0].actual_output == tests[0].expected_output assert results[1].success assert results[1].ret_code == 0 assert results[1].test == tests[1] assert results[1].actual_output == tests[1].expected_output
import tempfile import StringIO import pytest from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner def runner(tests): tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests] return ShellTestRunner(tests) @pytest.mark.parametrize("cmd,output,ret_code,success", ( ('echo hello', 'hello\n', 0, True), ('echo $?', '0\n', 0, True), ('exit 42', '', 42, True))) def test_echo(cmd, output, ret_code, success): r = runner([(cmd, output)]) res = r.run()[0] assert res.success == success assert res.ret_code == ret_code assert res.test == r.tests[0] assert res.actual_output == output
Update runner tests to use parameters
Update runner tests to use parameters
Python
mit
jthacker/shelltest,jthacker/shelltest
import tempfile import StringIO import pytest from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner + def runner(tests): + tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests] + return ShellTestRunner(tests) - @pytest.fixture - def tests(): - return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)), - ShellTest('echo $?', '0\n', ShellTestSource('', 2))] - def test_run(tests): - r = ShellTestRunner(tests) + @pytest.mark.parametrize("cmd,output,ret_code,success", ( + ('echo hello', 'hello\n', 0, True), + ('echo $?', '0\n', 0, True), + ('exit 42', '', 42, True))) + def test_echo(cmd, output, ret_code, success): + r = runner([(cmd, output)]) - results = r.run() + res = r.run()[0] + assert res.success == success - assert len(results) == 2 - assert results[0].success - assert results[0].ret_code == 0 + assert res.ret_code == ret_code - assert results[0].test == tests[0] + assert res.test == r.tests[0] - assert results[0].actual_output == tests[0].expected_output + assert res.actual_output == output - assert results[1].success - assert results[1].ret_code == 0 - assert results[1].test == tests[1] - assert results[1].actual_output == tests[1].expected_output
Update runner tests to use parameters
## Code Before: import tempfile import StringIO import pytest from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner @pytest.fixture def tests(): return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)), ShellTest('echo $?', '0\n', ShellTestSource('', 2))] def test_run(tests): r = ShellTestRunner(tests) results = r.run() assert len(results) == 2 assert results[0].success assert results[0].ret_code == 0 assert results[0].test == tests[0] assert results[0].actual_output == tests[0].expected_output assert results[1].success assert results[1].ret_code == 0 assert results[1].test == tests[1] assert results[1].actual_output == tests[1].expected_output ## Instruction: Update runner tests to use parameters ## Code After: import tempfile import StringIO import pytest from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner def runner(tests): tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests] return ShellTestRunner(tests) @pytest.mark.parametrize("cmd,output,ret_code,success", ( ('echo hello', 'hello\n', 0, True), ('echo $?', '0\n', 0, True), ('exit 42', '', 42, True))) def test_echo(cmd, output, ret_code, success): r = runner([(cmd, output)]) res = r.run()[0] assert res.success == success assert res.ret_code == ret_code assert res.test == r.tests[0] assert res.actual_output == output
import tempfile import StringIO import pytest from shelltest.shelltest import ShellTest, ShellTestSource, ShellTestRunner + def runner(tests): + tests = [ShellTest(cmd, output, ShellTestSource('', 0)) for cmd, output in tests] + return ShellTestRunner(tests) - @pytest.fixture - def tests(): - return [ShellTest('echo hello', 'hello\n', ShellTestSource('', 0)), - ShellTest('echo $?', '0\n', ShellTestSource('', 2))] - def test_run(tests): - r = ShellTestRunner(tests) + @pytest.mark.parametrize("cmd,output,ret_code,success", ( + ('echo hello', 'hello\n', 0, True), + ('echo $?', '0\n', 0, True), + ('exit 42', '', 42, True))) + def test_echo(cmd, output, ret_code, success): + r = runner([(cmd, output)]) - results = r.run() ? ---- + res = r.run()[0] ? +++ + assert res.success == success - assert len(results) == 2 - assert results[0].success - assert results[0].ret_code == 0 ? ------- ^ + assert res.ret_code == ret_code ? ^^^^^^^^ - assert results[0].test == tests[0] ? ------- + assert res.test == r.tests[0] ? ++ - assert results[0].actual_output == tests[0].expected_output ? ------- ------------------ + assert res.actual_output == output - assert results[1].success - assert results[1].ret_code == 0 - assert results[1].test == tests[1] - assert results[1].actual_output == tests[1].expected_output
e159465d4495ed2ebcbd1515d82f4f85fc28c8f7
corral/views/private.py
corral/views/private.py
"""These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Method not allowed"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
"""These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Not Found"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
Use a better example error message
Use a better example error message
Python
mit
nickfrostatx/corral,nickfrostatx/corral,nickfrostatx/corral
"""These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): - """Return an error response like {"msg":"Method not allowed"}.""" + """Return an error response like {"msg":"Not Found"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
Use a better example error message
## Code Before: """These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Method not allowed"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response ## Instruction: Use a better example error message ## Code After: """These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): """Return an error response like {"msg":"Not Found"}.""" return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
"""These JSON-formatted views require authentication.""" from flask import Blueprint, jsonify, request, current_app, g from werkzeug.exceptions import NotFound from os.path import join from ..dl import download from ..error import handle_errors from ..util import enforce_auth private = Blueprint(__name__, 'private') private.before_request(enforce_auth) @handle_errors(private) def json_error(e): - """Return an error response like {"msg":"Method not allowed"}.""" ? ^^^^^^^^ ^^^ ^^ + """Return an error response like {"msg":"Not Found"}.""" ? ^ ^ ^^ return jsonify({'msg': e.name}), e.code @private.route('/download/<site_id>/<int:param>', methods=['POST']) def home(site_id, param): """Attempt to download the file.""" if site_id in current_app.config['SITES']: site = current_app.config['SITES'][site_id] g.site = site url = site['url'].format(param) filename = site['filename'].format(param) path = join(site['path'], filename) download(url, path) return jsonify({}) raise NotFound @private.after_request def cors(response): """Handle browser cross-origin requests.""" if 'origin' in request.headers: site = g.get('site') if site: allowed_origin = site['origin'] response.headers['Access-Control-Allow-Origin'] = allowed_origin return response
b7b41a160294edd987f73be7817c8b08aa8ed70e
herders/templatetags/utils.py
herders/templatetags/utils.py
from django import template register = template.Library() @register.filter def get_range(value): return range(value) @register.filter def absolute(value): return abs(value) @register.filter def subtract(value, arg): return value - arg @register.filter def multiply(value, arg): return value * arg @register.filter def remove_extension(string): return string.replace('.png', '').replace("'", "").replace('(', '_').replace(')', '_')
from django import template register = template.Library() @register.filter def get_range(value): if value: return range(value) else: return 0 @register.filter def absolute(value): return abs(value) @register.filter def subtract(value, arg): return value - arg @register.filter def multiply(value, arg): return value * arg @register.filter def remove_extension(string): return string.replace('.png', '').replace("'", "").replace('(', '_').replace(')', '_')
Return 0 with the get_range filter if value is invalid instead of raise exception
Return 0 with the get_range filter if value is invalid instead of raise exception
Python
apache-2.0
porksmash/swarfarm,PeteAndersen/swarfarm,PeteAndersen/swarfarm,porksmash/swarfarm,PeteAndersen/swarfarm,porksmash/swarfarm,porksmash/swarfarm,PeteAndersen/swarfarm
from django import template register = template.Library() @register.filter def get_range(value): + if value: - return range(value) + return range(value) + else: + return 0 @register.filter def absolute(value): return abs(value) @register.filter def subtract(value, arg): return value - arg @register.filter def multiply(value, arg): return value * arg @register.filter def remove_extension(string): return string.replace('.png', '').replace("'", "").replace('(', '_').replace(')', '_')
Return 0 with the get_range filter if value is invalid instead of raise exception
## Code Before: from django import template register = template.Library() @register.filter def get_range(value): return range(value) @register.filter def absolute(value): return abs(value) @register.filter def subtract(value, arg): return value - arg @register.filter def multiply(value, arg): return value * arg @register.filter def remove_extension(string): return string.replace('.png', '').replace("'", "").replace('(', '_').replace(')', '_') ## Instruction: Return 0 with the get_range filter if value is invalid instead of raise exception ## Code After: from django import template register = template.Library() @register.filter def get_range(value): if value: return range(value) else: return 0 @register.filter def absolute(value): return abs(value) @register.filter def subtract(value, arg): return value - arg @register.filter def multiply(value, arg): return value * arg @register.filter def remove_extension(string): return string.replace('.png', '').replace("'", "").replace('(', '_').replace(')', '_')
from django import template register = template.Library() @register.filter def get_range(value): + if value: - return range(value) + return range(value) ? ++++ + else: + return 0 @register.filter def absolute(value): return abs(value) @register.filter def subtract(value, arg): return value - arg @register.filter def multiply(value, arg): return value * arg @register.filter def remove_extension(string): return string.replace('.png', '').replace("'", "").replace('(', '_').replace(')', '_')
74db127246b7111a35c64079eec91d46f88ebd55
src/test/stresstest.py
src/test/stresstest.py
import unittest if __name__ == '__main__': testLoader = unittest.defaultTestLoader module = __import__('__main__') test = testLoader.loadTestsFromModule(module) testRunner = unittest.TextTestRunner(verbosity=2) for i in xrange(100): result = testRunner.run(test)
import unittest from test_openwire_async import * from test_openwire_sync import * from test_stomp_async import * from test_stomp_sync import * from test_types import * if __name__ == '__main__': testLoader = unittest.defaultTestLoader module = __import__('__main__') test = testLoader.loadTestsFromModule(module) testRunner = unittest.TextTestRunner(verbosity=2) for i in xrange(100): result = testRunner.run(test)
Test everything with stress test.
Test everything with stress test.
Python
apache-2.0
tabish121/pyActiveMQ,tabish121/pyActiveMQ,tabish121/pyActiveMQ
import unittest + + from test_openwire_async import * + from test_openwire_sync import * + from test_stomp_async import * + from test_stomp_sync import * + from test_types import * if __name__ == '__main__': testLoader = unittest.defaultTestLoader module = __import__('__main__') test = testLoader.loadTestsFromModule(module) testRunner = unittest.TextTestRunner(verbosity=2) for i in xrange(100): result = testRunner.run(test)
Test everything with stress test.
## Code Before: import unittest if __name__ == '__main__': testLoader = unittest.defaultTestLoader module = __import__('__main__') test = testLoader.loadTestsFromModule(module) testRunner = unittest.TextTestRunner(verbosity=2) for i in xrange(100): result = testRunner.run(test) ## Instruction: Test everything with stress test. ## Code After: import unittest from test_openwire_async import * from test_openwire_sync import * from test_stomp_async import * from test_stomp_sync import * from test_types import * if __name__ == '__main__': testLoader = unittest.defaultTestLoader module = __import__('__main__') test = testLoader.loadTestsFromModule(module) testRunner = unittest.TextTestRunner(verbosity=2) for i in xrange(100): result = testRunner.run(test)
import unittest + + from test_openwire_async import * + from test_openwire_sync import * + from test_stomp_async import * + from test_stomp_sync import * + from test_types import * if __name__ == '__main__': testLoader = unittest.defaultTestLoader module = __import__('__main__') test = testLoader.loadTestsFromModule(module) testRunner = unittest.TextTestRunner(verbosity=2) for i in xrange(100): result = testRunner.run(test)
ab73b2132825e9415ff24306a9d89da10294d79e
icekit/utils/management/base.py
icekit/utils/management/base.py
import time from django import db from django.core.management.base import BaseCommand from optparse import make_option class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() self.stdout.write('Sleeping for %s min.' % options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: self.stdout.write('Closing database connection: %s' % alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() logger.info('Sleeping for %s min.', options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: logger.info('Closing database connection: %s', alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
Use `logging` instead of printing to stdout by default.
Use `logging` instead of printing to stdout by default.
Python
mit
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
+ import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option + + logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() - self.stdout.write('Sleeping for %s min.' % options['interval']) + logger.info('Sleeping for %s min.', options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: - self.stdout.write('Closing database connection: %s' % alias) + logger.info('Closing database connection: %s', alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
Use `logging` instead of printing to stdout by default.
## Code Before: import time from django import db from django.core.management.base import BaseCommand from optparse import make_option class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() self.stdout.write('Sleeping for %s min.' % options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: self.stdout.write('Closing database connection: %s' % alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method') ## Instruction: Use `logging` instead of printing to stdout by default. ## Code After: import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() logger.info('Sleeping for %s min.', options['interval']) time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: logger.info('Closing database connection: %s', alias) db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
+ import logging import time from django import db from django.core.management.base import BaseCommand from optparse import make_option + + logger = logging.getLogger(__name__) class CronBaseCommand(BaseCommand): help = ('Long running process (indefinitely) that executes task on a ' 'specified interval (default is 1 min). The intent for the ' 'management command is to be used with `django-supervisor` or ' 'similar.') option_list = BaseCommand.option_list + ( make_option( '-i', '--interval', dest='interval', type='int', help='Number of minutes to wait before executing task.', default=1 ), ) def handle(self, *args, **options): while True: self.task(*args, **options) self.cleanup() - self.stdout.write('Sleeping for %s min.' % options['interval']) ? ^ ^ ---- -------- ^^ + logger.info('Sleeping for %s min.', options['interval']) ? ^^^^ ^^^^ ^ time.sleep(60 * options['interval']) def cleanup(self): """ Performs clean-up after task is completed before it is executed again in the next internal. """ # Closes connections to all databases to avoid the long running process # from holding connections indefinitely. for alias in db.connections.databases: - self.stdout.write('Closing database connection: %s' % alias) ? ^ ^ ---- -------- ^^ + logger.info('Closing database connection: %s', alias) ? ^^^^ ^^^^ ^ db.connections[alias].close() def task(self, *args, **options): """ The actual logic of the task to execute. Subclasses must implement this method. """ raise NotImplementedError( 'subclasses of CronBaseCommand must provide a task() method')
626c74d727140646d6123e2d86a828401d87abe0
spam.py
spam.py
from sklearn.cross_validation import train_test_split from spam.common import DATASET_META from spam.common.utils import get_file_path_list from spam.preprocess import Preprocess file_path_list = get_file_path_list(DATASET_META) # transform list of tuple into two list # e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam'] path, classification = zip(*file_path_list) # split the data into unlabeled labeled unlabeled_path, labeled_path, \ unlabeled_class, labeled_class = train_test_split( path, classification, test_size=0.1, random_state=0, ) # Preprocess preprocess = Preprocess()
import pandas as pd from sklearn.cross_validation import train_test_split from spam.common import DATASET_META from spam.common.utils import get_file_path_list from spam.preprocess import preprocess file_path_list = get_file_path_list(DATASET_META) # transform list of tuple into two list # e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam'] path, classification = zip(*file_path_list) # split the data into unlabeled and labeled data unlabeled_path, labeled_path, \ _, labeled_class = train_test_split( path, classification, test_size=0.1, random_state=0, ) # split data into train and test data train_path, test_path, \ train_class, test_class = train_test_split( labeled_path, labeled_class, test_size=0.2, random_state=0, ) unlabeled_data = pd.DataFrame( data=[preprocess.read_email(path) for path in unlabeled_path], columns=['email'], ) train_data = pd.DataFrame( data={ 'email': [preprocess.read_email(path) for path in train_path], 'class': [1 if cl == 'spam' else 0 for cl in train_class] }, columns=['email', 'class'], ) test_data = pd.DataFrame( data={ 'email': [preprocess.read_email(path) for path in test_path], 'class': [1 if cl == 'spam' else 0 for cl in test_class] }, columns=['email', 'class', 'class2'], ) unlabeled_data.to_csv('unlabel_data.csv') train_data.to_csv('train_data.csv') test_data.to_csv('test_data.csv')
Add pandas to generate csv.
Add pandas to generate csv.
Python
mit
benigls/spam,benigls/spam
+ import pandas as pd from sklearn.cross_validation import train_test_split from spam.common import DATASET_META from spam.common.utils import get_file_path_list - from spam.preprocess import Preprocess + from spam.preprocess import preprocess file_path_list = get_file_path_list(DATASET_META) # transform list of tuple into two list # e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam'] path, classification = zip(*file_path_list) - # split the data into unlabeled labeled + # split the data into unlabeled and labeled data unlabeled_path, labeled_path, \ - unlabeled_class, labeled_class = train_test_split( + _, labeled_class = train_test_split( path, classification, test_size=0.1, random_state=0, ) - # Preprocess - preprocess = Preprocess() + # split data into train and test data + train_path, test_path, \ + train_class, test_class = train_test_split( + labeled_path, + labeled_class, + test_size=0.2, + random_state=0, + ) + unlabeled_data = pd.DataFrame( + data=[preprocess.read_email(path) for path in unlabeled_path], + columns=['email'], + ) + + train_data = pd.DataFrame( + data={ + 'email': [preprocess.read_email(path) for path in train_path], + 'class': [1 if cl == 'spam' else 0 for cl in train_class] + }, + columns=['email', 'class'], + ) + + test_data = pd.DataFrame( + data={ + 'email': [preprocess.read_email(path) for path in test_path], + 'class': [1 if cl == 'spam' else 0 for cl in test_class] + }, + columns=['email', 'class', 'class2'], + ) + + unlabeled_data.to_csv('unlabel_data.csv') + train_data.to_csv('train_data.csv') + test_data.to_csv('test_data.csv') +
Add pandas to generate csv.
## Code Before: from sklearn.cross_validation import train_test_split from spam.common import DATASET_META from spam.common.utils import get_file_path_list from spam.preprocess import Preprocess file_path_list = get_file_path_list(DATASET_META) # transform list of tuple into two list # e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam'] path, classification = zip(*file_path_list) # split the data into unlabeled labeled unlabeled_path, labeled_path, \ unlabeled_class, labeled_class = train_test_split( path, classification, test_size=0.1, random_state=0, ) # Preprocess preprocess = Preprocess() ## Instruction: Add pandas to generate csv. ## Code After: import pandas as pd from sklearn.cross_validation import train_test_split from spam.common import DATASET_META from spam.common.utils import get_file_path_list from spam.preprocess import preprocess file_path_list = get_file_path_list(DATASET_META) # transform list of tuple into two list # e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam'] path, classification = zip(*file_path_list) # split the data into unlabeled and labeled data unlabeled_path, labeled_path, \ _, labeled_class = train_test_split( path, classification, test_size=0.1, random_state=0, ) # split data into train and test data train_path, test_path, \ train_class, test_class = train_test_split( labeled_path, labeled_class, test_size=0.2, random_state=0, ) unlabeled_data = pd.DataFrame( data=[preprocess.read_email(path) for path in unlabeled_path], columns=['email'], ) train_data = pd.DataFrame( data={ 'email': [preprocess.read_email(path) for path in train_path], 'class': [1 if cl == 'spam' else 0 for cl in train_class] }, columns=['email', 'class'], ) test_data = pd.DataFrame( data={ 'email': [preprocess.read_email(path) for path in test_path], 'class': [1 if cl == 'spam' else 0 for cl in test_class] }, columns=['email', 'class', 'class2'], ) unlabeled_data.to_csv('unlabel_data.csv') train_data.to_csv('train_data.csv') test_data.to_csv('test_data.csv')
+ import pandas as pd from sklearn.cross_validation import train_test_split from spam.common import DATASET_META from spam.common.utils import get_file_path_list - from spam.preprocess import Preprocess ? ^ + from spam.preprocess import preprocess ? ^ file_path_list = get_file_path_list(DATASET_META) # transform list of tuple into two list # e.g. [('/path/to/file', 'spam')] ==> ['path/to/file'], ['spam'] path, classification = zip(*file_path_list) - # split the data into unlabeled labeled + # split the data into unlabeled and labeled data ? ++++ +++++ unlabeled_path, labeled_path, \ - unlabeled_class, labeled_class = train_test_split( ? --------- ----- + _, labeled_class = train_test_split( path, classification, test_size=0.1, random_state=0, ) - # Preprocess - preprocess = Preprocess() + # split data into train and test data + train_path, test_path, \ + train_class, test_class = train_test_split( + labeled_path, + labeled_class, + test_size=0.2, + random_state=0, + ) + + unlabeled_data = pd.DataFrame( + data=[preprocess.read_email(path) for path in unlabeled_path], + columns=['email'], + ) + + train_data = pd.DataFrame( + data={ + 'email': [preprocess.read_email(path) for path in train_path], + 'class': [1 if cl == 'spam' else 0 for cl in train_class] + }, + columns=['email', 'class'], + ) + + test_data = pd.DataFrame( + data={ + 'email': [preprocess.read_email(path) for path in test_path], + 'class': [1 if cl == 'spam' else 0 for cl in test_class] + }, + columns=['email', 'class', 'class2'], + ) + + unlabeled_data.to_csv('unlabel_data.csv') + train_data.to_csv('train_data.csv') + test_data.to_csv('test_data.csv')
4d5af4869871b45839952dd9f881635bd07595c1
parsers/RPOnline.py
parsers/RPOnline.py
from baseparser import BaseParser from BeautifulSoup import BeautifulSoup, Tag class RPOParser(BaseParser): domains = ['www.rp-online.de'] feeder_pat = '1\.\d*$' feeder_pages = ['http://www.rp-online.de/'] def _parse(self, html): soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8') self.meta = soup.findAll('meta') # category keywords = soup.find('meta', {'property': 'vr:category'}) self.category = self.compute_category(keywords['content'] if keywords else '') #article headline elt = soup.find('meta', {'property': 'og:title'}) if elt is None: self.real_article = False return self.title = elt['content'] # byline / author author = soup.find('meta', {'itemprop': 'author'}) self.byline = author['content'] if author else '' # article date created_at = soup.find('meta', {'property': 'vr:published_time'}) self.date = created_at['content'] if created_at else '' #article content div = soup.find('div', {'class': 'main-text '}) if div is None: self.real_article = False return div = self.remove_non_content(div) self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator() if isinstance(x, Tag) and x.name == 'p'])
from baseparser import BaseParser from BeautifulSoup import BeautifulSoup, Tag class RPOParser(BaseParser): domains = ['www.rp-online.de'] feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$' feeder_pages = ['http://www.rp-online.de/'] def _parse(self, html): soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8') self.meta = soup.findAll('meta') # category keywords = soup.find('meta', {'property': 'vr:category'}) self.category = self.compute_category(keywords['content'] if keywords else '') #article headline elt = soup.find('meta', {'property': 'og:title'}) if elt is None: self.real_article = False return self.title = elt['content'] # byline / author author = soup.find('meta', {'itemprop': 'author'}) self.byline = author['content'] if author else '' # article date created_at = soup.find('meta', {'property': 'vr:published_time'}) self.date = created_at['content'] if created_at else '' #article content div = soup.find('div', {'class': 'main-text '}) if div is None: self.real_article = False return div = self.remove_non_content(div) self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator() if isinstance(x, Tag) and x.name == 'p'])
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern
Python
mit
catcosmo/newsdiffs,catcosmo/newsdiffs,catcosmo/newsdiffs
from baseparser import BaseParser from BeautifulSoup import BeautifulSoup, Tag class RPOParser(BaseParser): domains = ['www.rp-online.de'] - feeder_pat = '1\.\d*$' + feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$' feeder_pages = ['http://www.rp-online.de/'] def _parse(self, html): soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8') self.meta = soup.findAll('meta') # category keywords = soup.find('meta', {'property': 'vr:category'}) self.category = self.compute_category(keywords['content'] if keywords else '') #article headline elt = soup.find('meta', {'property': 'og:title'}) if elt is None: self.real_article = False return self.title = elt['content'] # byline / author author = soup.find('meta', {'itemprop': 'author'}) self.byline = author['content'] if author else '' # article date created_at = soup.find('meta', {'property': 'vr:published_time'}) self.date = created_at['content'] if created_at else '' #article content div = soup.find('div', {'class': 'main-text '}) if div is None: self.real_article = False return div = self.remove_non_content(div) self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator() if isinstance(x, Tag) and x.name == 'p'])
Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern
## Code Before: from baseparser import BaseParser from BeautifulSoup import BeautifulSoup, Tag class RPOParser(BaseParser): domains = ['www.rp-online.de'] feeder_pat = '1\.\d*$' feeder_pages = ['http://www.rp-online.de/'] def _parse(self, html): soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8') self.meta = soup.findAll('meta') # category keywords = soup.find('meta', {'property': 'vr:category'}) self.category = self.compute_category(keywords['content'] if keywords else '') #article headline elt = soup.find('meta', {'property': 'og:title'}) if elt is None: self.real_article = False return self.title = elt['content'] # byline / author author = soup.find('meta', {'itemprop': 'author'}) self.byline = author['content'] if author else '' # article date created_at = soup.find('meta', {'property': 'vr:published_time'}) self.date = created_at['content'] if created_at else '' #article content div = soup.find('div', {'class': 'main-text '}) if div is None: self.real_article = False return div = self.remove_non_content(div) self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator() if isinstance(x, Tag) and x.name == 'p']) ## Instruction: Exclude Videos article and non-scrapable info-galleries and picture-galleries via URL-pattern ## Code After: from baseparser import BaseParser from BeautifulSoup import BeautifulSoup, Tag class RPOParser(BaseParser): domains = ['www.rp-online.de'] feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$' feeder_pages = ['http://www.rp-online.de/'] def _parse(self, html): soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8') self.meta = soup.findAll('meta') # category keywords = soup.find('meta', {'property': 'vr:category'}) self.category = self.compute_category(keywords['content'] if keywords else '') #article headline elt = soup.find('meta', {'property': 'og:title'}) if elt is None: self.real_article = False return self.title = elt['content'] # byline / author author = soup.find('meta', {'itemprop': 'author'}) self.byline = author['content'] if author else '' # article date created_at = soup.find('meta', {'property': 'vr:published_time'}) self.date = created_at['content'] if created_at else '' #article content div = soup.find('div', {'class': 'main-text '}) if div is None: self.real_article = False return div = self.remove_non_content(div) self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator() if isinstance(x, Tag) and x.name == 'p'])
from baseparser import BaseParser from BeautifulSoup import BeautifulSoup, Tag class RPOParser(BaseParser): domains = ['www.rp-online.de'] - feeder_pat = '1\.\d*$' + feeder_pat = '(?<!(vid|bid|iid))(-1\.\d*)$' feeder_pages = ['http://www.rp-online.de/'] def _parse(self, html): soup = BeautifulSoup(html, convertEntities=BeautifulSoup.HTML_ENTITIES, fromEncoding='utf-8') self.meta = soup.findAll('meta') # category keywords = soup.find('meta', {'property': 'vr:category'}) self.category = self.compute_category(keywords['content'] if keywords else '') #article headline elt = soup.find('meta', {'property': 'og:title'}) if elt is None: self.real_article = False return self.title = elt['content'] # byline / author author = soup.find('meta', {'itemprop': 'author'}) self.byline = author['content'] if author else '' # article date created_at = soup.find('meta', {'property': 'vr:published_time'}) self.date = created_at['content'] if created_at else '' #article content div = soup.find('div', {'class': 'main-text '}) if div is None: self.real_article = False return div = self.remove_non_content(div) self.body = '\n' + '\n\n'.join([x.getText() for x in div.childGenerator() if isinstance(x, Tag) and x.name == 'p'])
d36e624acb349b3fd78bb3fb91ba0bcc696719c2
imagekit/utils.py
imagekit/utils.py
import tempfile import types from django.utils.functional import wraps from imagekit.lib import Image def img_to_fobj(img, format, **kwargs): tmp = tempfile.TemporaryFile() # Preserve transparency if the image is in Pallette (P) mode. if img.mode == 'P': kwargs['transparency'] = len(img.split()[-1].getcolors()) else: img.convert('RGB') img.save(tmp, format, **kwargs) tmp.seek(0) return tmp def get_spec_files(instance): try: ik = getattr(instance, '_ik') except AttributeError: return [] else: return [getattr(instance, n) for n in ik.spec_file_names] def open_image(target): img = Image.open(target) img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__) return img def _wrap_copy(f): @wraps(f) def copy(self): img = f() try: img.app = self.app except AttributeError: pass try: img._getexif = self._getexif except AttributeError: pass return img return copy
import tempfile import types from django.utils.functional import wraps from imagekit.lib import Image def img_to_fobj(img, format, **kwargs): tmp = tempfile.TemporaryFile() # Preserve transparency if the image is in Pallette (P) mode. transparency_formats = ('PNG', 'GIF', ) if img.mode == 'P' and format in transparency_formats: kwargs['transparency'] = len(img.split()[-1].getcolors()) else: img = img.convert('RGB') img.save(tmp, format, **kwargs) tmp.seek(0) return tmp def get_spec_files(instance): try: ik = getattr(instance, '_ik') except AttributeError: return [] else: return [getattr(instance, n) for n in ik.spec_file_names] def open_image(target): img = Image.open(target) img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__) return img def _wrap_copy(f): @wraps(f) def copy(self): img = f() try: img.app = self.app except AttributeError: pass try: img._getexif = self._getexif except AttributeError: pass return img return copy
Fix conversion of PNG "palette" or "P" mode images to JPEG. "P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.
Fix conversion of PNG "palette" or "P" mode images to JPEG. "P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.
Python
bsd-3-clause
pcompassion/django-imagekit,tawanda/django-imagekit,tawanda/django-imagekit,FundedByMe/django-imagekit,pcompassion/django-imagekit,pcompassion/django-imagekit,FundedByMe/django-imagekit
import tempfile import types from django.utils.functional import wraps from imagekit.lib import Image def img_to_fobj(img, format, **kwargs): tmp = tempfile.TemporaryFile() # Preserve transparency if the image is in Pallette (P) mode. - if img.mode == 'P': + transparency_formats = ('PNG', 'GIF', ) + if img.mode == 'P' and format in transparency_formats: kwargs['transparency'] = len(img.split()[-1].getcolors()) else: - img.convert('RGB') + img = img.convert('RGB') img.save(tmp, format, **kwargs) tmp.seek(0) return tmp def get_spec_files(instance): try: ik = getattr(instance, '_ik') except AttributeError: return [] else: return [getattr(instance, n) for n in ik.spec_file_names] def open_image(target): img = Image.open(target) img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__) return img def _wrap_copy(f): @wraps(f) def copy(self): img = f() try: img.app = self.app except AttributeError: pass try: img._getexif = self._getexif except AttributeError: pass return img return copy
Fix conversion of PNG "palette" or "P" mode images to JPEG. "P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF.
## Code Before: import tempfile import types from django.utils.functional import wraps from imagekit.lib import Image def img_to_fobj(img, format, **kwargs): tmp = tempfile.TemporaryFile() # Preserve transparency if the image is in Pallette (P) mode. if img.mode == 'P': kwargs['transparency'] = len(img.split()[-1].getcolors()) else: img.convert('RGB') img.save(tmp, format, **kwargs) tmp.seek(0) return tmp def get_spec_files(instance): try: ik = getattr(instance, '_ik') except AttributeError: return [] else: return [getattr(instance, n) for n in ik.spec_file_names] def open_image(target): img = Image.open(target) img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__) return img def _wrap_copy(f): @wraps(f) def copy(self): img = f() try: img.app = self.app except AttributeError: pass try: img._getexif = self._getexif except AttributeError: pass return img return copy ## Instruction: Fix conversion of PNG "palette" or "P" mode images to JPEG. "P" mode images need to be converted to 'RGB' if target image format is not PNG or GIF. ## Code After: import tempfile import types from django.utils.functional import wraps from imagekit.lib import Image def img_to_fobj(img, format, **kwargs): tmp = tempfile.TemporaryFile() # Preserve transparency if the image is in Pallette (P) mode. transparency_formats = ('PNG', 'GIF', ) if img.mode == 'P' and format in transparency_formats: kwargs['transparency'] = len(img.split()[-1].getcolors()) else: img = img.convert('RGB') img.save(tmp, format, **kwargs) tmp.seek(0) return tmp def get_spec_files(instance): try: ik = getattr(instance, '_ik') except AttributeError: return [] else: return [getattr(instance, n) for n in ik.spec_file_names] def open_image(target): img = Image.open(target) img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__) return img def _wrap_copy(f): @wraps(f) def copy(self): img = f() try: img.app = self.app except AttributeError: pass try: img._getexif = self._getexif except AttributeError: pass return img return copy
import tempfile import types from django.utils.functional import wraps from imagekit.lib import Image def img_to_fobj(img, format, **kwargs): tmp = tempfile.TemporaryFile() # Preserve transparency if the image is in Pallette (P) mode. - if img.mode == 'P': + transparency_formats = ('PNG', 'GIF', ) + if img.mode == 'P' and format in transparency_formats: kwargs['transparency'] = len(img.split()[-1].getcolors()) else: - img.convert('RGB') + img = img.convert('RGB') ? ++++++ img.save(tmp, format, **kwargs) tmp.seek(0) return tmp def get_spec_files(instance): try: ik = getattr(instance, '_ik') except AttributeError: return [] else: return [getattr(instance, n) for n in ik.spec_file_names] def open_image(target): img = Image.open(target) img.copy = types.MethodType(_wrap_copy(img.copy), img, img.__class__) return img def _wrap_copy(f): @wraps(f) def copy(self): img = f() try: img.app = self.app except AttributeError: pass try: img._getexif = self._getexif except AttributeError: pass return img return copy
4494f4835245990ed5380cbf9800eef5d74986e6
utils.py
utils.py
import argparse import sys def parse_basic_args(args=sys.argv[1:]): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--input', '-i', metavar='FILE', default=sys.stdin, type=argparse.FileType('r'), help='the file to process (default: stdin)', ) parser.add_argument( '--output', '-o', metavar='FILE', default=sys.stdout, type=argparse.FileType('w'), help='the file to write to (default: stdout)', ) return parser.parse_args()
import argparse import sys def parse_basic_args(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--input', '-i', metavar='FILE', default=sys.stdin, type=argparse.FileType('r'), help='the file to process (default: stdin)', ) parser.add_argument( '--output', '-o', metavar='FILE', default=sys.stdout, type=argparse.FileType('w'), help='the file to write to (default: stdout)', ) return parser.parse_args()
Remove args parameter from parse_basic_args
Remove args parameter from parse_basic_args This is already handled by argparse.
Python
mit
cdown/srt
import argparse import sys - def parse_basic_args(args=sys.argv[1:]): + def parse_basic_args(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--input', '-i', metavar='FILE', default=sys.stdin, type=argparse.FileType('r'), help='the file to process (default: stdin)', ) parser.add_argument( '--output', '-o', metavar='FILE', default=sys.stdout, type=argparse.FileType('w'), help='the file to write to (default: stdout)', ) return parser.parse_args()
Remove args parameter from parse_basic_args
## Code Before: import argparse import sys def parse_basic_args(args=sys.argv[1:]): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--input', '-i', metavar='FILE', default=sys.stdin, type=argparse.FileType('r'), help='the file to process (default: stdin)', ) parser.add_argument( '--output', '-o', metavar='FILE', default=sys.stdout, type=argparse.FileType('w'), help='the file to write to (default: stdout)', ) return parser.parse_args() ## Instruction: Remove args parameter from parse_basic_args ## Code After: import argparse import sys def parse_basic_args(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--input', '-i', metavar='FILE', default=sys.stdin, type=argparse.FileType('r'), help='the file to process (default: stdin)', ) parser.add_argument( '--output', '-o', metavar='FILE', default=sys.stdout, type=argparse.FileType('w'), help='the file to write to (default: stdout)', ) return parser.parse_args()
import argparse import sys - def parse_basic_args(args=sys.argv[1:]): + def parse_basic_args(): parser = argparse.ArgumentParser(description=__doc__) parser.add_argument( '--input', '-i', metavar='FILE', default=sys.stdin, type=argparse.FileType('r'), help='the file to process (default: stdin)', ) parser.add_argument( '--output', '-o', metavar='FILE', default=sys.stdout, type=argparse.FileType('w'), help='the file to write to (default: stdout)', ) return parser.parse_args()
c730184a6ec826f9773fa4130e59121c0fd06e4d
api_v3/misc/oauth2.py
api_v3/misc/oauth2.py
from urlparse import urljoin from django.conf import settings import jwt from social_core.backends.oauth import BaseOAuth2 class KeycloakOAuth2(BaseOAuth2): """Keycloak OAuth authentication backend""" name = 'keycloak' ID_KEY = 'email' BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo') AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth') ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token') ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): clients = response.get('resource_access', {}) client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {}) roles = set(client.get('roles', [])) return { 'username': response.get('preferred_username'), 'email': response.get('email'), 'first_name': response.get('given_name'), 'last_name': response.get('family_name'), 'is_staff': 'staff' in roles, 'is_superuser': 'superuser' in roles, } def user_data(self, access_token, *args, **kwargs): return jwt.decode(access_token, verify=False) def activate_user(backend, user, response, *args, **kwargs): user.is_active = True user.save()
from urlparse import urljoin from django.conf import settings import jwt from social_core.backends.oauth import BaseOAuth2 class KeycloakOAuth2(BaseOAuth2): """Keycloak OAuth authentication backend""" name = 'keycloak' ID_KEY = 'email' BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo') AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth') ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token') ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): clients = response.get('resource_access', {}) client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {}) roles = set(client.get('roles', [])) return { 'email': response.get('email'), 'first_name': response.get('given_name'), 'last_name': response.get('family_name'), 'is_staff': 'staff' in roles, 'is_superuser': 'superuser' in roles, } def user_data(self, access_token, *args, **kwargs): return jwt.decode(access_token, verify=False) def activate_user(backend, user, response, *args, **kwargs): user.is_active = True user.save()
Remove `username` from keycloack payload.
Remove `username` from keycloack payload.
Python
mit
occrp/id-backend
from urlparse import urljoin from django.conf import settings import jwt from social_core.backends.oauth import BaseOAuth2 class KeycloakOAuth2(BaseOAuth2): """Keycloak OAuth authentication backend""" name = 'keycloak' ID_KEY = 'email' BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo') AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth') ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token') ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): clients = response.get('resource_access', {}) client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {}) roles = set(client.get('roles', [])) return { - 'username': response.get('preferred_username'), 'email': response.get('email'), 'first_name': response.get('given_name'), 'last_name': response.get('family_name'), 'is_staff': 'staff' in roles, 'is_superuser': 'superuser' in roles, } def user_data(self, access_token, *args, **kwargs): return jwt.decode(access_token, verify=False) def activate_user(backend, user, response, *args, **kwargs): user.is_active = True user.save()
Remove `username` from keycloack payload.
## Code Before: from urlparse import urljoin from django.conf import settings import jwt from social_core.backends.oauth import BaseOAuth2 class KeycloakOAuth2(BaseOAuth2): """Keycloak OAuth authentication backend""" name = 'keycloak' ID_KEY = 'email' BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo') AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth') ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token') ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): clients = response.get('resource_access', {}) client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {}) roles = set(client.get('roles', [])) return { 'username': response.get('preferred_username'), 'email': response.get('email'), 'first_name': response.get('given_name'), 'last_name': response.get('family_name'), 'is_staff': 'staff' in roles, 'is_superuser': 'superuser' in roles, } def user_data(self, access_token, *args, **kwargs): return jwt.decode(access_token, verify=False) def activate_user(backend, user, response, *args, **kwargs): user.is_active = True user.save() ## Instruction: Remove `username` from keycloack payload. ## Code After: from urlparse import urljoin from django.conf import settings import jwt from social_core.backends.oauth import BaseOAuth2 class KeycloakOAuth2(BaseOAuth2): """Keycloak OAuth authentication backend""" name = 'keycloak' ID_KEY = 'email' BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo') AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth') ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token') ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): clients = response.get('resource_access', {}) client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {}) roles = set(client.get('roles', [])) return { 'email': response.get('email'), 'first_name': response.get('given_name'), 'last_name': response.get('family_name'), 'is_staff': 'staff' in roles, 'is_superuser': 'superuser' in roles, } def user_data(self, access_token, *args, **kwargs): return jwt.decode(access_token, verify=False) def activate_user(backend, user, response, *args, **kwargs): user.is_active = True user.save()
from urlparse import urljoin from django.conf import settings import jwt from social_core.backends.oauth import BaseOAuth2 class KeycloakOAuth2(BaseOAuth2): """Keycloak OAuth authentication backend""" name = 'keycloak' ID_KEY = 'email' BASE_URL = settings.SOCIAL_AUTH_KEYCLOAK_BASE USERINFO_URL = urljoin(BASE_URL, 'protocol/openid-connect/userinfo') AUTHORIZATION_URL = urljoin(BASE_URL, 'protocol/openid-connect/auth') ACCESS_TOKEN_URL = urljoin(BASE_URL, 'protocol/openid-connect/token') ACCESS_TOKEN_METHOD = 'POST' def get_user_details(self, response): clients = response.get('resource_access', {}) client = clients.get(settings.SOCIAL_AUTH_KEYCLOAK_KEY, {}) roles = set(client.get('roles', [])) return { - 'username': response.get('preferred_username'), 'email': response.get('email'), 'first_name': response.get('given_name'), 'last_name': response.get('family_name'), 'is_staff': 'staff' in roles, 'is_superuser': 'superuser' in roles, } def user_data(self, access_token, *args, **kwargs): return jwt.decode(access_token, verify=False) def activate_user(backend, user, response, *args, **kwargs): user.is_active = True user.save()
d30355ace2c84cad198fd4bfcc3d6a211275fb76
src/ggrc_basic_permissions/roles/AuditorReader.py
src/ggrc_basic_permissions/roles/AuditorReader.py
scope = "System Implied" description = """ A user with Auditor role for a program audit will also have this role in the default object context so that the auditor will have access to the objects required to perform the audit. """ permissions = { "read": [ "Categorization", "Category", "ControlCategory", "ControlAssertion", "Control", "Assessment", "Issue", "ControlControl", "DataAsset", "AccessGroup", "Directive", "Contract", "Policy", "Regulation", "Standard", "Document", "Facility", "Help", "Market", "Objective", "ObjectDocument", "ObjectPerson", "Option", "OrgGroup", "Vendor", "PopulationSample", "Product", "Project", "Relationship", "Section", "Clause", "SystemOrProcess", "System", "Process", "SystemControl", "SystemSystem", "ObjectOwner", "Person", "Program", "Role", "Context", { "type": "BackgroundTask", "terms": { "property_name": "modified_by", "value": "$current_user" }, "condition": "is" }, ], "create": [], "view_object_page": [], "update": [], "delete": [] }
scope = "System Implied" description = """ A user with Auditor role for a program audit will also have this role in the default object context so that the auditor will have access to the objects required to perform the audit. """ permissions = { "read": [ "Snapshot", "Categorization", "Category", "ControlCategory", "ControlAssertion", "Control", "Assessment", "Issue", "ControlControl", "DataAsset", "AccessGroup", "Directive", "Contract", "Policy", "Regulation", "Standard", "Document", "Facility", "Help", "Market", "Objective", "ObjectDocument", "ObjectPerson", "Option", "OrgGroup", "Vendor", "PopulationSample", "Product", "Project", "Relationship", "Section", "Clause", "SystemOrProcess", "System", "Process", "SystemControl", "SystemSystem", "ObjectOwner", "Person", "Program", "Role", "Context", { "type": "BackgroundTask", "terms": { "property_name": "modified_by", "value": "$current_user" }, "condition": "is" }, ], "create": [], "view_object_page": [], "update": [], "delete": [] }
Add support for reading snapshots for auditor reader
Add support for reading snapshots for auditor reader
Python
apache-2.0
AleksNeStu/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,plamut/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,VinnieJohns/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,AleksNeStu/ggrc-core,plamut/ggrc-core
scope = "System Implied" description = """ A user with Auditor role for a program audit will also have this role in the default object context so that the auditor will have access to the objects required to perform the audit. """ permissions = { "read": [ + "Snapshot", "Categorization", "Category", "ControlCategory", "ControlAssertion", "Control", "Assessment", "Issue", "ControlControl", "DataAsset", "AccessGroup", "Directive", "Contract", "Policy", "Regulation", "Standard", "Document", "Facility", "Help", "Market", "Objective", "ObjectDocument", "ObjectPerson", "Option", "OrgGroup", "Vendor", "PopulationSample", "Product", "Project", "Relationship", "Section", "Clause", "SystemOrProcess", "System", "Process", "SystemControl", "SystemSystem", "ObjectOwner", "Person", "Program", "Role", "Context", { "type": "BackgroundTask", "terms": { "property_name": "modified_by", "value": "$current_user" }, "condition": "is" }, ], "create": [], "view_object_page": [], "update": [], "delete": [] }
Add support for reading snapshots for auditor reader
## Code Before: scope = "System Implied" description = """ A user with Auditor role for a program audit will also have this role in the default object context so that the auditor will have access to the objects required to perform the audit. """ permissions = { "read": [ "Categorization", "Category", "ControlCategory", "ControlAssertion", "Control", "Assessment", "Issue", "ControlControl", "DataAsset", "AccessGroup", "Directive", "Contract", "Policy", "Regulation", "Standard", "Document", "Facility", "Help", "Market", "Objective", "ObjectDocument", "ObjectPerson", "Option", "OrgGroup", "Vendor", "PopulationSample", "Product", "Project", "Relationship", "Section", "Clause", "SystemOrProcess", "System", "Process", "SystemControl", "SystemSystem", "ObjectOwner", "Person", "Program", "Role", "Context", { "type": "BackgroundTask", "terms": { "property_name": "modified_by", "value": "$current_user" }, "condition": "is" }, ], "create": [], "view_object_page": [], "update": [], "delete": [] } ## Instruction: Add support for reading snapshots for auditor reader ## Code After: scope = "System Implied" description = """ A user with Auditor role for a program audit will also have this role in the default object context so that the auditor will have access to the objects required to perform the audit. """ permissions = { "read": [ "Snapshot", "Categorization", "Category", "ControlCategory", "ControlAssertion", "Control", "Assessment", "Issue", "ControlControl", "DataAsset", "AccessGroup", "Directive", "Contract", "Policy", "Regulation", "Standard", "Document", "Facility", "Help", "Market", "Objective", "ObjectDocument", "ObjectPerson", "Option", "OrgGroup", "Vendor", "PopulationSample", "Product", "Project", "Relationship", "Section", "Clause", "SystemOrProcess", "System", "Process", "SystemControl", "SystemSystem", "ObjectOwner", "Person", "Program", "Role", "Context", { "type": "BackgroundTask", "terms": { "property_name": "modified_by", "value": "$current_user" }, "condition": "is" }, ], "create": [], "view_object_page": [], "update": [], "delete": [] }
scope = "System Implied" description = """ A user with Auditor role for a program audit will also have this role in the default object context so that the auditor will have access to the objects required to perform the audit. """ permissions = { "read": [ + "Snapshot", "Categorization", "Category", "ControlCategory", "ControlAssertion", "Control", "Assessment", "Issue", "ControlControl", "DataAsset", "AccessGroup", "Directive", "Contract", "Policy", "Regulation", "Standard", "Document", "Facility", "Help", "Market", "Objective", "ObjectDocument", "ObjectPerson", "Option", "OrgGroup", "Vendor", "PopulationSample", "Product", "Project", "Relationship", "Section", "Clause", "SystemOrProcess", "System", "Process", "SystemControl", "SystemSystem", "ObjectOwner", "Person", "Program", "Role", "Context", { "type": "BackgroundTask", "terms": { "property_name": "modified_by", "value": "$current_user" }, "condition": "is" }, ], "create": [], "view_object_page": [], "update": [], "delete": [] }
97c66e1cbbc6fd691c2fec4f4e72ba22892fa13c
base/components/accounts/backends.py
base/components/accounts/backends.py
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend User = get_user_model() class HelloBaseIDBackend(ModelBackend): def authenticate(self, username=None): try: user = User.objects.filter(username=username)[0] except IndexError: return None else: return user def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend User = get_user_model() class HelloBaseIDBackend(ModelBackend): def authenticate(self, username=None): try: user = User.objects.filter(username=username)[0] except IndexError: return None else: return user def get_user(self, user_id): return User.objects.get(pk=user_id)
Remove the try/except clause from get_user().
Remove the try/except clause from get_user(). It doesn't seem like the code will -ever- hit the except clause as the method that calls this has fallbacks of its own.
Python
apache-2.0
hello-base/web,hello-base/web,hello-base/web,hello-base/web
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend User = get_user_model() class HelloBaseIDBackend(ModelBackend): def authenticate(self, username=None): try: user = User.objects.filter(username=username)[0] except IndexError: return None else: return user def get_user(self, user_id): - try: - return User.objects.get(pk=user_id) + return User.objects.get(pk=user_id) - except User.DoesNotExist: - return None
Remove the try/except clause from get_user().
## Code Before: from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend User = get_user_model() class HelloBaseIDBackend(ModelBackend): def authenticate(self, username=None): try: user = User.objects.filter(username=username)[0] except IndexError: return None else: return user def get_user(self, user_id): try: return User.objects.get(pk=user_id) except User.DoesNotExist: return None ## Instruction: Remove the try/except clause from get_user(). ## Code After: from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend User = get_user_model() class HelloBaseIDBackend(ModelBackend): def authenticate(self, username=None): try: user = User.objects.filter(username=username)[0] except IndexError: return None else: return user def get_user(self, user_id): return User.objects.get(pk=user_id)
from django.contrib.auth import get_user_model from django.contrib.auth.backends import ModelBackend User = get_user_model() class HelloBaseIDBackend(ModelBackend): def authenticate(self, username=None): try: user = User.objects.filter(username=username)[0] except IndexError: return None else: return user def get_user(self, user_id): - try: - return User.objects.get(pk=user_id) ? ---- + return User.objects.get(pk=user_id) - except User.DoesNotExist: - return None
0f0e0e91db679f18ad9dc7568047b76e447ac589
stock_inventory_chatter/__openerp__.py
stock_inventory_chatter/__openerp__.py
{ 'name': 'Stock Inventory Chatter', 'version': '9.0.1.0.0', 'author': "Eficent, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/stock-logistics-warehouse", 'category': 'Warehouse', 'summary': "Log changes being done in Inventory Adjustments", 'depends': ['stock'], "data": [ 'data/stock_data.xml', 'views/stock_inventory_view.xml', ], 'license': 'AGPL-3', 'installable': True, 'application': False, }
{ 'name': 'Stock Inventory Chatter', 'version': '8.0.1.0.0', 'author': "Eficent, " "initOS GmbH, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/stock-logistics-warehouse", 'category': 'Warehouse', 'summary': "Log changes being done in Inventory Adjustments", 'depends': ['stock'], "data": [ 'data/stock_data.xml', 'views/stock_inventory_view.xml', ], 'license': 'AGPL-3', 'installable': True, 'application': False, }
Change of the module version
Change of the module version
Python
agpl-3.0
kmee/stock-logistics-warehouse,acsone/stock-logistics-warehouse,open-synergy/stock-logistics-warehouse
{ 'name': 'Stock Inventory Chatter', - 'version': '9.0.1.0.0', + 'version': '8.0.1.0.0', 'author': "Eficent, " + "initOS GmbH, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/stock-logistics-warehouse", 'category': 'Warehouse', 'summary': "Log changes being done in Inventory Adjustments", 'depends': ['stock'], "data": [ 'data/stock_data.xml', 'views/stock_inventory_view.xml', ], 'license': 'AGPL-3', 'installable': True, 'application': False, }
Change of the module version
## Code Before: { 'name': 'Stock Inventory Chatter', 'version': '9.0.1.0.0', 'author': "Eficent, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/stock-logistics-warehouse", 'category': 'Warehouse', 'summary': "Log changes being done in Inventory Adjustments", 'depends': ['stock'], "data": [ 'data/stock_data.xml', 'views/stock_inventory_view.xml', ], 'license': 'AGPL-3', 'installable': True, 'application': False, } ## Instruction: Change of the module version ## Code After: { 'name': 'Stock Inventory Chatter', 'version': '8.0.1.0.0', 'author': "Eficent, " "initOS GmbH, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/stock-logistics-warehouse", 'category': 'Warehouse', 'summary': "Log changes being done in Inventory Adjustments", 'depends': ['stock'], "data": [ 'data/stock_data.xml', 'views/stock_inventory_view.xml', ], 'license': 'AGPL-3', 'installable': True, 'application': False, }
{ 'name': 'Stock Inventory Chatter', - 'version': '9.0.1.0.0', ? ^ + 'version': '8.0.1.0.0', ? ^ 'author': "Eficent, " + "initOS GmbH, " "Odoo Community Association (OCA)", "website": "https://github.com/OCA/stock-logistics-warehouse", 'category': 'Warehouse', 'summary': "Log changes being done in Inventory Adjustments", 'depends': ['stock'], "data": [ 'data/stock_data.xml', 'views/stock_inventory_view.xml', ], 'license': 'AGPL-3', 'installable': True, 'application': False, }
9504529dd4b9140be0026d0b30a0e88e5dea5e25
rtrss/config.py
rtrss/config.py
import os import logging import importlib # All configuration defaults are set in this module TRACKER_HOST = 'rutracker.org' # Timeone for the tracker times TZNAME = 'Europe/Moscow' LOGLEVEL = logging.INFO LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s' LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s' ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin') ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin') ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost') # path to save torrent files TORRENT_PATH_PATTERN = 'torrents/{}.torrent' APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT') if not APP_ENVIRONMENT: raise EnvironmentError('RTRSS_ENVIRONMENT must be set') _mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT)) _envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()} globals().update(_envconf)
import os import logging import importlib # All configuration defaults are set in this module TRACKER_HOST = 'rutracker.org' # Timeone for the tracker times TZNAME = 'Europe/Moscow' LOGLEVEL = logging.INFO LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s' LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s' ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin') ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin') ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost') # path to save torrent files TORRENT_PATH_PATTERN = 'torrents/{}.torrent' APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT') if not APP_ENVIRONMENT: raise EnvironmentError('RTRSS_ENVIRONMENT must be set') IP = '0.0.0.0' PORT = 8080 _mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT)) _envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()} globals().update(_envconf)
Add default IP and PORT
Add default IP and PORT
Python
apache-2.0
notapresent/rtrss,notapresent/rtrss,notapresent/rtrss,notapresent/rtrss
import os import logging import importlib # All configuration defaults are set in this module TRACKER_HOST = 'rutracker.org' # Timeone for the tracker times TZNAME = 'Europe/Moscow' LOGLEVEL = logging.INFO LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s' LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s' ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin') ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin') ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost') # path to save torrent files TORRENT_PATH_PATTERN = 'torrents/{}.torrent' APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT') if not APP_ENVIRONMENT: raise EnvironmentError('RTRSS_ENVIRONMENT must be set') + IP = '0.0.0.0' + PORT = 8080 + _mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT)) _envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()} globals().update(_envconf)
Add default IP and PORT
## Code Before: import os import logging import importlib # All configuration defaults are set in this module TRACKER_HOST = 'rutracker.org' # Timeone for the tracker times TZNAME = 'Europe/Moscow' LOGLEVEL = logging.INFO LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s' LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s' ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin') ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin') ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost') # path to save torrent files TORRENT_PATH_PATTERN = 'torrents/{}.torrent' APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT') if not APP_ENVIRONMENT: raise EnvironmentError('RTRSS_ENVIRONMENT must be set') _mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT)) _envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()} globals().update(_envconf) ## Instruction: Add default IP and PORT ## Code After: import os import logging import importlib # All configuration defaults are set in this module TRACKER_HOST = 'rutracker.org' # Timeone for the tracker times TZNAME = 'Europe/Moscow' LOGLEVEL = logging.INFO LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s' LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s' ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin') ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin') ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost') # path to save torrent files TORRENT_PATH_PATTERN = 'torrents/{}.torrent' APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT') if not APP_ENVIRONMENT: raise EnvironmentError('RTRSS_ENVIRONMENT must be set') IP = '0.0.0.0' PORT = 8080 _mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT)) _envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()} globals().update(_envconf)
import os import logging import importlib # All configuration defaults are set in this module TRACKER_HOST = 'rutracker.org' # Timeone for the tracker times TZNAME = 'Europe/Moscow' LOGLEVEL = logging.INFO LOG_FORMAT_LOGENTRIES = '%(levelname)s %(name)s %(message)s' LOG_FORMAT_BRIEF = '%(asctime)s %(levelname)s %(name)s %(message)s' ADMIN_LOGIN = os.environ.get('ADMIN_LOGIN', 'admin') ADMIN_PASSWORD = os.environ.get('ADMIN_PASSWORD', 'admin') ADMIN_EMAIL = os.environ.get('ADMIN_EMAIL', 'admin@localhost') # path to save torrent files TORRENT_PATH_PATTERN = 'torrents/{}.torrent' APP_ENVIRONMENT = os.environ.get('RTRSS_ENVIRONMENT') if not APP_ENVIRONMENT: raise EnvironmentError('RTRSS_ENVIRONMENT must be set') + IP = '0.0.0.0' + PORT = 8080 + _mod = importlib.import_module('rtrss.config_{}'.format(APP_ENVIRONMENT)) _envconf = {k: v for k, v in _mod.__dict__.items() if k == k.upper()} globals().update(_envconf)
93d0f11658c7417371ec2e040397c7a572559585
django_remote_submission/consumers.py
django_remote_submission/consumers.py
"""Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job @channel_session_user_from_http def ws_connect(message): message.reply_channel.send({ 'accept': True, }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
"""Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job import json @channel_session_user_from_http def ws_connect(message): last_jobs = message.user.jobs.order_by('-modified')[:10] for job in last_jobs: message.reply_channel.send({ 'text': json.dumps({ 'job_id': job.id, 'title': job.title, 'status': job.status, }), }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
Send last jobs on initial connection
Send last jobs on initial connection
Python
isc
ornl-ndav/django-remote-submission,ornl-ndav/django-remote-submission,ornl-ndav/django-remote-submission
"""Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job + import json + @channel_session_user_from_http def ws_connect(message): + last_jobs = message.user.jobs.order_by('-modified')[:10] + + for job in last_jobs: - message.reply_channel.send({ + message.reply_channel.send({ - 'accept': True, + 'text': json.dumps({ + 'job_id': job.id, + 'title': job.title, + 'status': job.status, + }), - }) + }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
Send last jobs on initial connection
## Code Before: """Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job @channel_session_user_from_http def ws_connect(message): message.reply_channel.send({ 'accept': True, }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, ) ## Instruction: Send last jobs on initial connection ## Code After: """Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job import json @channel_session_user_from_http def ws_connect(message): last_jobs = message.user.jobs.order_by('-modified')[:10] for job in last_jobs: message.reply_channel.send({ 'text': json.dumps({ 'job_id': job.id, 'title': job.title, 'status': job.status, }), }) Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
"""Manage websocket connections.""" # -*- coding: utf-8 -*- import json from channels import Group from channels.auth import channel_session_user_from_http, channel_session_user from .models import Job + import json + @channel_session_user_from_http def ws_connect(message): + last_jobs = message.user.jobs.order_by('-modified')[:10] + + for job in last_jobs: - message.reply_channel.send({ + message.reply_channel.send({ ? ++++ - 'accept': True, + 'text': json.dumps({ + 'job_id': job.id, + 'title': job.title, + 'status': job.status, + }), - }) + }) ? ++++ Group('job-user-{}'.format(message.user.username)).add( message.reply_channel, ) @channel_session_user def ws_disconnect(message): Group('job-user-{}'.format(message.user.username)).discard( message.reply_channel, )
93a7616d949494888f5357f5491aa3278e7de234
cupy/logic/truth.py
cupy/logic/truth.py
import cupy def all(a, axis=None, out=None, keepdims=False): assert isinstance(a, cupy.ndarray) return a.all(axis=axis, out=out, keepdims=keepdims) def any(a, axis=None, out=None, keepdims=False): assert isinstance(a, cupy.ndarray) return a.any(axis=axis, out=out, keepdims=keepdims)
import cupy def all(a, axis=None, out=None, keepdims=False): """Tests whether all array elements along a given axis evaluate to True. Args: a (cupy.ndarray): Input array. axis (int or tuple of ints): Along which axis to compute all. The flattened array is used by default. out (cupy.ndarray): Output array. keepdims (bool): If ``True``, the axis is remained as an axis of size one. Returns: cupy.ndarray: An array reduced of the input array along the axis. .. seealso:: :data:`numpy.all` """ assert isinstance(a, cupy.ndarray) return a.all(axis=axis, out=out, keepdims=keepdims) def any(a, axis=None, out=None, keepdims=False): """Tests whether any array elements along a given axis evaluate to True. Args: a (cupy.ndarray): Input array. axis (int or tuple of ints): Along which axis to compute all. The flattened array is used by default. out (cupy.ndarray): Output array. keepdims (bool): If ``True``, the axis is remained as an axis of size one. Returns: cupy.ndarray: An array reduced of the input array along the axis. .. seealso:: :data:`numpy.any` """ assert isinstance(a, cupy.ndarray) return a.any(axis=axis, out=out, keepdims=keepdims)
Add documents of cupy.all and cupy.any function
Add documents of cupy.all and cupy.any function
Python
mit
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
import cupy def all(a, axis=None, out=None, keepdims=False): + """Tests whether all array elements along a given axis evaluate to True. + + Args: + a (cupy.ndarray): Input array. + axis (int or tuple of ints): Along which axis to compute all. + The flattened array is used by default. + out (cupy.ndarray): Output array. + keepdims (bool): If ``True``, the axis is remained as an axis of + size one. + + Returns: + cupy.ndarray: An array reduced of the input array along the axis. + + .. seealso:: :data:`numpy.all` + + """ assert isinstance(a, cupy.ndarray) return a.all(axis=axis, out=out, keepdims=keepdims) def any(a, axis=None, out=None, keepdims=False): + """Tests whether any array elements along a given axis evaluate to True. + + Args: + a (cupy.ndarray): Input array. + axis (int or tuple of ints): Along which axis to compute all. + The flattened array is used by default. + out (cupy.ndarray): Output array. + keepdims (bool): If ``True``, the axis is remained as an axis of + size one. + + Returns: + cupy.ndarray: An array reduced of the input array along the axis. + + .. seealso:: :data:`numpy.any` + + """ assert isinstance(a, cupy.ndarray) return a.any(axis=axis, out=out, keepdims=keepdims)
Add documents of cupy.all and cupy.any function
## Code Before: import cupy def all(a, axis=None, out=None, keepdims=False): assert isinstance(a, cupy.ndarray) return a.all(axis=axis, out=out, keepdims=keepdims) def any(a, axis=None, out=None, keepdims=False): assert isinstance(a, cupy.ndarray) return a.any(axis=axis, out=out, keepdims=keepdims) ## Instruction: Add documents of cupy.all and cupy.any function ## Code After: import cupy def all(a, axis=None, out=None, keepdims=False): """Tests whether all array elements along a given axis evaluate to True. Args: a (cupy.ndarray): Input array. axis (int or tuple of ints): Along which axis to compute all. The flattened array is used by default. out (cupy.ndarray): Output array. keepdims (bool): If ``True``, the axis is remained as an axis of size one. Returns: cupy.ndarray: An array reduced of the input array along the axis. .. seealso:: :data:`numpy.all` """ assert isinstance(a, cupy.ndarray) return a.all(axis=axis, out=out, keepdims=keepdims) def any(a, axis=None, out=None, keepdims=False): """Tests whether any array elements along a given axis evaluate to True. Args: a (cupy.ndarray): Input array. axis (int or tuple of ints): Along which axis to compute all. The flattened array is used by default. out (cupy.ndarray): Output array. keepdims (bool): If ``True``, the axis is remained as an axis of size one. Returns: cupy.ndarray: An array reduced of the input array along the axis. .. seealso:: :data:`numpy.any` """ assert isinstance(a, cupy.ndarray) return a.any(axis=axis, out=out, keepdims=keepdims)
import cupy def all(a, axis=None, out=None, keepdims=False): + """Tests whether all array elements along a given axis evaluate to True. + + Args: + a (cupy.ndarray): Input array. + axis (int or tuple of ints): Along which axis to compute all. + The flattened array is used by default. + out (cupy.ndarray): Output array. + keepdims (bool): If ``True``, the axis is remained as an axis of + size one. + + Returns: + cupy.ndarray: An array reduced of the input array along the axis. + + .. seealso:: :data:`numpy.all` + + """ assert isinstance(a, cupy.ndarray) return a.all(axis=axis, out=out, keepdims=keepdims) def any(a, axis=None, out=None, keepdims=False): + """Tests whether any array elements along a given axis evaluate to True. + + Args: + a (cupy.ndarray): Input array. + axis (int or tuple of ints): Along which axis to compute all. + The flattened array is used by default. + out (cupy.ndarray): Output array. + keepdims (bool): If ``True``, the axis is remained as an axis of + size one. + + Returns: + cupy.ndarray: An array reduced of the input array along the axis. + + .. seealso:: :data:`numpy.any` + + """ assert isinstance(a, cupy.ndarray) return a.any(axis=axis, out=out, keepdims=keepdims)
a5befe542e857ec36717f7f8da53ff9f2c2af7e6
natasha/__init__.py
natasha/__init__.py
from copy import copy from collections import deque from yargy import FactParser from natasha.grammars import Person, Geo class Combinator(object): DEFAULT_GRAMMARS = [ Person, Geo, ] def __init__(self, grammars=None, cache_size=50000): self.grammars = grammars or self.DEFAULT_GRAMMARS self.parser = FactParser(cache_size=cache_size) def extract(self, text): tokens = deque(self.parser.tokenizer.transform(text)) for grammar in self.grammars: for grammar_type, rule in grammar.__members__.items(): for match in self.parser.extract(copy(tokens), rule.value): yield (grammar, grammar_type, match)
from copy import copy from collections import deque from yargy import FactParser from natasha.grammars import Person, Geo, Money, Date class Combinator(object): DEFAULT_GRAMMARS = [ Money, Person, Geo, Date, ] def __init__(self, grammars=None, cache_size=50000): self.grammars = grammars or self.DEFAULT_GRAMMARS self.parser = FactParser(cache_size=cache_size) def extract(self, text): tokens = deque(self.parser.tokenizer.transform(text)) for grammar in self.grammars: for grammar_type, rule in grammar.__members__.items(): for match in self.parser.extract(copy(tokens), rule.value): yield (grammar, grammar_type, match)
Add new grammars to Combinator.DEFAULT_GRAMMARS
Add new grammars to Combinator.DEFAULT_GRAMMARS
Python
mit
natasha/natasha
from copy import copy from collections import deque from yargy import FactParser - from natasha.grammars import Person, Geo + from natasha.grammars import Person, Geo, Money, Date class Combinator(object): DEFAULT_GRAMMARS = [ + Money, Person, Geo, + Date, ] def __init__(self, grammars=None, cache_size=50000): self.grammars = grammars or self.DEFAULT_GRAMMARS self.parser = FactParser(cache_size=cache_size) def extract(self, text): tokens = deque(self.parser.tokenizer.transform(text)) for grammar in self.grammars: for grammar_type, rule in grammar.__members__.items(): for match in self.parser.extract(copy(tokens), rule.value): yield (grammar, grammar_type, match)
Add new grammars to Combinator.DEFAULT_GRAMMARS
## Code Before: from copy import copy from collections import deque from yargy import FactParser from natasha.grammars import Person, Geo class Combinator(object): DEFAULT_GRAMMARS = [ Person, Geo, ] def __init__(self, grammars=None, cache_size=50000): self.grammars = grammars or self.DEFAULT_GRAMMARS self.parser = FactParser(cache_size=cache_size) def extract(self, text): tokens = deque(self.parser.tokenizer.transform(text)) for grammar in self.grammars: for grammar_type, rule in grammar.__members__.items(): for match in self.parser.extract(copy(tokens), rule.value): yield (grammar, grammar_type, match) ## Instruction: Add new grammars to Combinator.DEFAULT_GRAMMARS ## Code After: from copy import copy from collections import deque from yargy import FactParser from natasha.grammars import Person, Geo, Money, Date class Combinator(object): DEFAULT_GRAMMARS = [ Money, Person, Geo, Date, ] def __init__(self, grammars=None, cache_size=50000): self.grammars = grammars or self.DEFAULT_GRAMMARS self.parser = FactParser(cache_size=cache_size) def extract(self, text): tokens = deque(self.parser.tokenizer.transform(text)) for grammar in self.grammars: for grammar_type, rule in grammar.__members__.items(): for match in self.parser.extract(copy(tokens), rule.value): yield (grammar, grammar_type, match)
from copy import copy from collections import deque from yargy import FactParser - from natasha.grammars import Person, Geo + from natasha.grammars import Person, Geo, Money, Date ? +++++++++++++ class Combinator(object): DEFAULT_GRAMMARS = [ + Money, Person, Geo, + Date, ] def __init__(self, grammars=None, cache_size=50000): self.grammars = grammars or self.DEFAULT_GRAMMARS self.parser = FactParser(cache_size=cache_size) def extract(self, text): tokens = deque(self.parser.tokenizer.transform(text)) for grammar in self.grammars: for grammar_type, rule in grammar.__members__.items(): for match in self.parser.extract(copy(tokens), rule.value): yield (grammar, grammar_type, match)
58ed8c24288ee8f470acfa85cc6ae267f0ad2fd8
pbag/tests/test_serialize.py
pbag/tests/test_serialize.py
from pbag.serialize import dump, load data = [b'Hello\n', 1, b'world!', None] def test_core(): with open('_foo.pack', 'wb') as f: dump(data, f) with open('_foo.pack', 'rb') as f: data2 = load(f) assert data == data2 def test_multiple_dumps(): with open('_foo.pack', 'wb') as f: dump(1, f) dump(data, f) dump(2, f) with open('_foo.pack', 'rb') as f: a = load(f) b = load(f) c = load(f) assert a == 1 assert b == data assert c == 2
from tempfile import TemporaryFile from pbag.serialize import dump, load data = [b'Hello\n', 1, b'world!', None] def test_core(): with TemporaryFile(mode='wb+') as f: dump(data, f) f.seek(0) data2 = load(f) assert data == data2 def test_multiple_dumps(): with TemporaryFile(mode='wb+') as f: dump(1, f) dump(data, f) dump(2, f) f.seek(0) a = load(f) b = load(f) c = load(f) assert a == 1 assert b == data assert c == 2
Use temporary files for testing.
Use temporary files for testing.
Python
bsd-3-clause
jakirkham/dask,clarkfitzg/dask,marianotepper/dask,ContinuumIO/dask,esc/dask,pombredanne/dask,wiso/dask,jcrist/dask,mraspaud/dask,hainm/dask,pombredanne/dask,jakirkham/dask,ssanderson/dask,mraspaud/dask,cpcloud/dask,wiso/dask,mrocklin/dask,minrk/dask,ContinuumIO/dask,PhE/dask,PhE/dask,esc/dask,jayhetee/dask,blaze/dask,dask/dask,mrocklin/dask,cowlicks/dask,mikegraham/dask,chrisbarber/dask,hainm/dask,freeman-lab/dask,gameduell/dask,dask/dask,marianotepper/dask,jcrist/dask,clarkfitzg/dask,minrk/dask,simudream/dask,vikhyat/dask,simudream/dask,freeman-lab/dask,jayhetee/dask,vikhyat/dask,ssanderson/dask,blaze/dask
+ from tempfile import TemporaryFile from pbag.serialize import dump, load data = [b'Hello\n', 1, b'world!', None] def test_core(): - with open('_foo.pack', 'wb') as f: + with TemporaryFile(mode='wb+') as f: dump(data, f) + f.seek(0) - - with open('_foo.pack', 'rb') as f: data2 = load(f) assert data == data2 def test_multiple_dumps(): - with open('_foo.pack', 'wb') as f: + with TemporaryFile(mode='wb+') as f: dump(1, f) dump(data, f) dump(2, f) - with open('_foo.pack', 'rb') as f: + f.seek(0) + a = load(f) b = load(f) c = load(f) assert a == 1 assert b == data assert c == 2
Use temporary files for testing.
## Code Before: from pbag.serialize import dump, load data = [b'Hello\n', 1, b'world!', None] def test_core(): with open('_foo.pack', 'wb') as f: dump(data, f) with open('_foo.pack', 'rb') as f: data2 = load(f) assert data == data2 def test_multiple_dumps(): with open('_foo.pack', 'wb') as f: dump(1, f) dump(data, f) dump(2, f) with open('_foo.pack', 'rb') as f: a = load(f) b = load(f) c = load(f) assert a == 1 assert b == data assert c == 2 ## Instruction: Use temporary files for testing. ## Code After: from tempfile import TemporaryFile from pbag.serialize import dump, load data = [b'Hello\n', 1, b'world!', None] def test_core(): with TemporaryFile(mode='wb+') as f: dump(data, f) f.seek(0) data2 = load(f) assert data == data2 def test_multiple_dumps(): with TemporaryFile(mode='wb+') as f: dump(1, f) dump(data, f) dump(2, f) f.seek(0) a = load(f) b = load(f) c = load(f) assert a == 1 assert b == data assert c == 2
+ from tempfile import TemporaryFile from pbag.serialize import dump, load data = [b'Hello\n', 1, b'world!', None] def test_core(): - with open('_foo.pack', 'wb') as f: + with TemporaryFile(mode='wb+') as f: dump(data, f) + f.seek(0) - - with open('_foo.pack', 'rb') as f: data2 = load(f) assert data == data2 def test_multiple_dumps(): - with open('_foo.pack', 'wb') as f: + with TemporaryFile(mode='wb+') as f: dump(1, f) dump(data, f) dump(2, f) - with open('_foo.pack', 'rb') as f: + f.seek(0) + a = load(f) b = load(f) c = load(f) assert a == 1 assert b == data assert c == 2
6a9d8a10d6fdf4f4cfdf8ae5af9b172d9b53e8e9
drawer.py
drawer.py
import matplotlib.pyplot as plt import numpy as np def display_result(vectors, clusters): colors = [np.random.rand(3, 1) for i in range(len(clusters))] for cluster_index, (centroid, cluster) in enumerate(clusters.items()): current_cluster = [vectors[i] for i in cluster] xs = list(map(lambda x: x[0], current_cluster)) ys = list(map(lambda x: x[1], current_cluster)) plt.scatter(xs, ys, c=colors[cluster_index]) plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x') plt.show()
import matplotlib.pyplot as plt import numpy as np def display_result(vectors, clusters): colors = [np.random.rand(3, 1) for i in range(len(clusters))] centroids_colors = [[1-x for x in color] for color in colors] for cluster_index, (centroid, cluster) in enumerate(clusters.items()): current_cluster = [vectors[i] for i in cluster] xs = list(map(lambda x: x[0], current_cluster)) ys = list(map(lambda x: x[1], current_cluster)) plt.scatter(xs, ys, c=colors[cluster_index]) plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x') plt.show()
Add drawing centroids with inverted colors
Add drawing centroids with inverted colors
Python
mit
vanashimko/k-means
import matplotlib.pyplot as plt import numpy as np def display_result(vectors, clusters): colors = [np.random.rand(3, 1) for i in range(len(clusters))] + centroids_colors = [[1-x for x in color] for color in colors] for cluster_index, (centroid, cluster) in enumerate(clusters.items()): current_cluster = [vectors[i] for i in cluster] xs = list(map(lambda x: x[0], current_cluster)) ys = list(map(lambda x: x[1], current_cluster)) plt.scatter(xs, ys, c=colors[cluster_index]) - plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x') + plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x') plt.show()
Add drawing centroids with inverted colors
## Code Before: import matplotlib.pyplot as plt import numpy as np def display_result(vectors, clusters): colors = [np.random.rand(3, 1) for i in range(len(clusters))] for cluster_index, (centroid, cluster) in enumerate(clusters.items()): current_cluster = [vectors[i] for i in cluster] xs = list(map(lambda x: x[0], current_cluster)) ys = list(map(lambda x: x[1], current_cluster)) plt.scatter(xs, ys, c=colors[cluster_index]) plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x') plt.show() ## Instruction: Add drawing centroids with inverted colors ## Code After: import matplotlib.pyplot as plt import numpy as np def display_result(vectors, clusters): colors = [np.random.rand(3, 1) for i in range(len(clusters))] centroids_colors = [[1-x for x in color] for color in colors] for cluster_index, (centroid, cluster) in enumerate(clusters.items()): current_cluster = [vectors[i] for i in cluster] xs = list(map(lambda x: x[0], current_cluster)) ys = list(map(lambda x: x[1], current_cluster)) plt.scatter(xs, ys, c=colors[cluster_index]) plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x') plt.show()
import matplotlib.pyplot as plt import numpy as np def display_result(vectors, clusters): colors = [np.random.rand(3, 1) for i in range(len(clusters))] + centroids_colors = [[1-x for x in color] for color in colors] for cluster_index, (centroid, cluster) in enumerate(clusters.items()): current_cluster = [vectors[i] for i in cluster] xs = list(map(lambda x: x[0], current_cluster)) ys = list(map(lambda x: x[1], current_cluster)) plt.scatter(xs, ys, c=colors[cluster_index]) - plt.scatter(centroid[0], centroid[1], c=colors[cluster_index], marker='x') + plt.scatter(centroid[0], centroid[1], c=centroids_colors[cluster_index], marker='x') ? ++++++++++ plt.show()
634e389ed260b404327e303afb4f5a1dc931ee36
storm/db.py
storm/db.py
from random import randrange import time from storm import error class Connection(object): def __init__(self, host='localhost', port=None, database=None, user=None, password=None): self.host = host self.port = port self.database = database self.user = user self.password = password class ConnectionPool(object): def __init__(self, connection, count=10, lifetime=3600): self.connection = connection self.count = count self.lifetime = lifetime self.db_connections = []; def create_new_connection(self): cls = self.get_db_class() instance = cls(self.connection) self.db_connections.append(instance) return instance def get_db(self): if len(self.db_connections) < self.count: return self.create_new_connection() index = randrange(0, len(self.db_connections)) connection = self.db_connections[index] if (time.time() - connection.start_time) > self.lifetime: removed = self.db_connections.pop(index) removed.close() return self.create_new_connection() return self.db_connections[index] def get_db_class(self): raise NotImplementedError('The "get_db_class" method is not implemented') class Database(object): def __init__(self, connection): if not isinstance(connection, Connection): raise error.StormError('connection must be instance of storm.db.Connection') self.connection = connection self.is_connected = False self.start_time = time.time()
import time from storm import error from tornado import gen class Connection(object): def __init__(self, host='localhost', port=None, database=None, user=None, password=None): self.host = host self.port = port self.database = database self.user = user self.password = password class ConnectionPool(object): def __init__(self, connection, count=10, lifetime=3600): self.connection = connection self.count = count self.lifetime = lifetime @gen.coroutine def get_db(self, callback=None): raise NotImplementedError('The "get_db" method is not implemented') def get_db_class(self): raise NotImplementedError('The "get_db_class" method is not implemented') class Database(object): def __init__(self, connection): if not isinstance(connection, Connection): raise error.StormError('connection must be instance of storm.db.Connection') self.connection = connection self.is_connected = False self.start_time = time.time()
Make connection pool less smart
Make connection pool less smart You have to extend it and implement your own get_db function to use a connection pool now
Python
mit
liujiantong/storm,ccampbell/storm
- from random import randrange import time from storm import error + from tornado import gen class Connection(object): def __init__(self, host='localhost', port=None, database=None, user=None, password=None): self.host = host self.port = port self.database = database self.user = user self.password = password class ConnectionPool(object): def __init__(self, connection, count=10, lifetime=3600): self.connection = connection self.count = count self.lifetime = lifetime - self.db_connections = []; + @gen.coroutine + def get_db(self, callback=None): + raise NotImplementedError('The "get_db" method is not implemented') - def create_new_connection(self): - cls = self.get_db_class() - instance = cls(self.connection) - self.db_connections.append(instance) - return instance - - def get_db(self): - if len(self.db_connections) < self.count: - return self.create_new_connection() - - index = randrange(0, len(self.db_connections)) - connection = self.db_connections[index] - if (time.time() - connection.start_time) > self.lifetime: - removed = self.db_connections.pop(index) - removed.close() - return self.create_new_connection() - - return self.db_connections[index] def get_db_class(self): raise NotImplementedError('The "get_db_class" method is not implemented') class Database(object): def __init__(self, connection): if not isinstance(connection, Connection): raise error.StormError('connection must be instance of storm.db.Connection') self.connection = connection self.is_connected = False self.start_time = time.time()
Make connection pool less smart
## Code Before: from random import randrange import time from storm import error class Connection(object): def __init__(self, host='localhost', port=None, database=None, user=None, password=None): self.host = host self.port = port self.database = database self.user = user self.password = password class ConnectionPool(object): def __init__(self, connection, count=10, lifetime=3600): self.connection = connection self.count = count self.lifetime = lifetime self.db_connections = []; def create_new_connection(self): cls = self.get_db_class() instance = cls(self.connection) self.db_connections.append(instance) return instance def get_db(self): if len(self.db_connections) < self.count: return self.create_new_connection() index = randrange(0, len(self.db_connections)) connection = self.db_connections[index] if (time.time() - connection.start_time) > self.lifetime: removed = self.db_connections.pop(index) removed.close() return self.create_new_connection() return self.db_connections[index] def get_db_class(self): raise NotImplementedError('The "get_db_class" method is not implemented') class Database(object): def __init__(self, connection): if not isinstance(connection, Connection): raise error.StormError('connection must be instance of storm.db.Connection') self.connection = connection self.is_connected = False self.start_time = time.time() ## Instruction: Make connection pool less smart ## Code After: import time from storm import error from tornado import gen class Connection(object): def __init__(self, host='localhost', port=None, database=None, user=None, password=None): self.host = host self.port = port self.database = database self.user = user self.password = password class ConnectionPool(object): def __init__(self, connection, count=10, lifetime=3600): self.connection = connection self.count = count self.lifetime = lifetime @gen.coroutine def get_db(self, callback=None): raise NotImplementedError('The "get_db" method is not implemented') def get_db_class(self): raise NotImplementedError('The "get_db_class" method is not implemented') class Database(object): def __init__(self, connection): if not isinstance(connection, Connection): raise error.StormError('connection must be instance of storm.db.Connection') self.connection = connection self.is_connected = False self.start_time = time.time()
- from random import randrange import time from storm import error + from tornado import gen class Connection(object): def __init__(self, host='localhost', port=None, database=None, user=None, password=None): self.host = host self.port = port self.database = database self.user = user self.password = password class ConnectionPool(object): def __init__(self, connection, count=10, lifetime=3600): self.connection = connection self.count = count self.lifetime = lifetime - self.db_connections = []; + @gen.coroutine + def get_db(self, callback=None): + raise NotImplementedError('The "get_db" method is not implemented') - def create_new_connection(self): - cls = self.get_db_class() - instance = cls(self.connection) - self.db_connections.append(instance) - return instance - - def get_db(self): - if len(self.db_connections) < self.count: - return self.create_new_connection() - - index = randrange(0, len(self.db_connections)) - connection = self.db_connections[index] - if (time.time() - connection.start_time) > self.lifetime: - removed = self.db_connections.pop(index) - removed.close() - return self.create_new_connection() - - return self.db_connections[index] def get_db_class(self): raise NotImplementedError('The "get_db_class" method is not implemented') class Database(object): def __init__(self, connection): if not isinstance(connection, Connection): raise error.StormError('connection must be instance of storm.db.Connection') self.connection = connection self.is_connected = False self.start_time = time.time()
98405875fd8ec682caa04244a900e6ce9eac9acb
pavement.py
pavement.py
import sys from paver.easy import task, needs, path, sh, cmdopts, options from paver.setuputils import setup, install_distutils_tasks from distutils.extension import Extension from distutils.dep_util import newer sys.path.insert(0, path('.').abspath()) import version setup(name='microdrop-plugin-manager', version=version.getVersion(), description='Microdrop plugin manager.', keywords='', author='Christian Fobel', author_email='christian@fobel.net', url='https://github.com/wheeler-microfluidics/mpm', license='LGPLv2.1', packages=['mpm', ], install_requires=['configobj', 'path-helpers', 'pip-helpers>=0.6', 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'], # Install data listed in `MANIFEST.in` include_package_data=True, entry_points = {'console_scripts': ['mpm = mpm.bin:main']}) @task @needs('generate_setup', 'minilib', 'setuptools.command.sdist') def sdist(): """Overrides sdist to make sure that our setup.py is generated.""" pass
import platform import sys from paver.easy import task, needs, path, sh, cmdopts, options from paver.setuputils import setup, install_distutils_tasks from distutils.extension import Extension from distutils.dep_util import newer sys.path.insert(0, path('.').abspath()) import version install_requires = ['configobj', 'path-helpers', 'pip-helpers>=0.6', 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'] if platform.system() == 'Windows': install_requires += ['pywin32'] setup(name='microdrop-plugin-manager', version=version.getVersion(), description='Microdrop plugin manager.', keywords='', author='Christian Fobel', author_email='christian@fobel.net', url='https://github.com/wheeler-microfluidics/mpm', license='LGPLv2.1', packages=['mpm', ], install_requires=install_requires, # Install data listed in `MANIFEST.in` include_package_data=True, entry_points = {'console_scripts': ['mpm = mpm.bin:main']}) @task @needs('generate_setup', 'minilib', 'setuptools.command.sdist') def sdist(): """Overrides sdist to make sure that our setup.py is generated.""" pass
Add pywin32 as Windows required package
[FIX] Add pywin32 as Windows required package
Python
bsd-3-clause
wheeler-microfluidics/mpm
+ import platform import sys from paver.easy import task, needs, path, sh, cmdopts, options from paver.setuputils import setup, install_distutils_tasks from distutils.extension import Extension from distutils.dep_util import newer sys.path.insert(0, path('.').abspath()) import version + + install_requires = ['configobj', 'path-helpers', 'pip-helpers>=0.6', + 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'] + + if platform.system() == 'Windows': + install_requires += ['pywin32'] + setup(name='microdrop-plugin-manager', version=version.getVersion(), description='Microdrop plugin manager.', keywords='', author='Christian Fobel', author_email='christian@fobel.net', url='https://github.com/wheeler-microfluidics/mpm', license='LGPLv2.1', packages=['mpm', ], + install_requires=install_requires, - install_requires=['configobj', 'path-helpers', 'pip-helpers>=0.6', - 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'], # Install data listed in `MANIFEST.in` include_package_data=True, entry_points = {'console_scripts': ['mpm = mpm.bin:main']}) @task @needs('generate_setup', 'minilib', 'setuptools.command.sdist') def sdist(): """Overrides sdist to make sure that our setup.py is generated.""" pass
Add pywin32 as Windows required package
## Code Before: import sys from paver.easy import task, needs, path, sh, cmdopts, options from paver.setuputils import setup, install_distutils_tasks from distutils.extension import Extension from distutils.dep_util import newer sys.path.insert(0, path('.').abspath()) import version setup(name='microdrop-plugin-manager', version=version.getVersion(), description='Microdrop plugin manager.', keywords='', author='Christian Fobel', author_email='christian@fobel.net', url='https://github.com/wheeler-microfluidics/mpm', license='LGPLv2.1', packages=['mpm', ], install_requires=['configobj', 'path-helpers', 'pip-helpers>=0.6', 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'], # Install data listed in `MANIFEST.in` include_package_data=True, entry_points = {'console_scripts': ['mpm = mpm.bin:main']}) @task @needs('generate_setup', 'minilib', 'setuptools.command.sdist') def sdist(): """Overrides sdist to make sure that our setup.py is generated.""" pass ## Instruction: Add pywin32 as Windows required package ## Code After: import platform import sys from paver.easy import task, needs, path, sh, cmdopts, options from paver.setuputils import setup, install_distutils_tasks from distutils.extension import Extension from distutils.dep_util import newer sys.path.insert(0, path('.').abspath()) import version install_requires = ['configobj', 'path-helpers', 'pip-helpers>=0.6', 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'] if platform.system() == 'Windows': install_requires += ['pywin32'] setup(name='microdrop-plugin-manager', version=version.getVersion(), description='Microdrop plugin manager.', keywords='', author='Christian Fobel', author_email='christian@fobel.net', url='https://github.com/wheeler-microfluidics/mpm', license='LGPLv2.1', packages=['mpm', ], install_requires=install_requires, # Install data listed in `MANIFEST.in` include_package_data=True, entry_points = {'console_scripts': ['mpm = mpm.bin:main']}) @task @needs('generate_setup', 'minilib', 'setuptools.command.sdist') def sdist(): """Overrides sdist to make sure that our setup.py is generated.""" pass
+ import platform import sys from paver.easy import task, needs, path, sh, cmdopts, options from paver.setuputils import setup, install_distutils_tasks from distutils.extension import Extension from distutils.dep_util import newer sys.path.insert(0, path('.').abspath()) import version + + install_requires = ['configobj', 'path-helpers', 'pip-helpers>=0.6', + 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'] + + if platform.system() == 'Windows': + install_requires += ['pywin32'] + setup(name='microdrop-plugin-manager', version=version.getVersion(), description='Microdrop plugin manager.', keywords='', author='Christian Fobel', author_email='christian@fobel.net', url='https://github.com/wheeler-microfluidics/mpm', license='LGPLv2.1', packages=['mpm', ], + install_requires=install_requires, - install_requires=['configobj', 'path-helpers', 'pip-helpers>=0.6', - 'progressbar2', 'pyyaml', 'si-prefix>=0.4.post3'], # Install data listed in `MANIFEST.in` include_package_data=True, entry_points = {'console_scripts': ['mpm = mpm.bin:main']}) @task @needs('generate_setup', 'minilib', 'setuptools.command.sdist') def sdist(): """Overrides sdist to make sure that our setup.py is generated.""" pass
dd42c1c1b1cd0cbe55c27cafe9d2db5466782bc4
server/users-microservice/src/api/users/userModel.py
server/users-microservice/src/api/users/userModel.py
from index import db class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email self.password = password self.application = application def __repr__(self): return self.name
from index import db, brcypt class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email self.application = application self.set_password(password) def __repr__(self): return self.name def set_password(self, password): self.password = bcrypt.generate_password_hash(password) def check_password(self, password): return bcrypt.check_password_hash(self.password, password)
Encrypt password before saving user
Encrypt password before saving user
Python
mit
Madmous/Trello-Clone,Madmous/madClones,Madmous/madClones,Madmous/madClones,Madmous/madClones,Madmous/Trello-Clone,Madmous/Trello-Clone
- from index import db + from index import db, brcypt class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email - self.password = password self.application = application + + self.set_password(password) def __repr__(self): return self.name + + def set_password(self, password): + self.password = bcrypt.generate_password_hash(password) + + def check_password(self, password): + return bcrypt.check_password_hash(self.password, password)
Encrypt password before saving user
## Code Before: from index import db class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email self.password = password self.application = application def __repr__(self): return self.name ## Instruction: Encrypt password before saving user ## Code After: from index import db, brcypt class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email self.application = application self.set_password(password) def __repr__(self): return self.name def set_password(self, password): self.password = bcrypt.generate_password_hash(password) def check_password(self, password): return bcrypt.check_password_hash(self.password, password)
- from index import db + from index import db, brcypt ? ++++++++ class UserModel(db.Model): __tablename__ = 'User' id = db.Column(db.Integer, primary_key=True, nullable=False) name = db.Column(db.String(80), unique=True, nullable=False) fullname = db.Column(db.String(80), unique=True, nullable=False) initials = db.Column(db.String(10), unique=True, nullable=False) email = db.Column(db.String(255), unique=True, nullable=False) password = db.Column(db.String(80), unique=True, nullable=False) application = db.Column(db.String(80), unique=True, nullable=False) def __init__(self, name, fullname, initials, email, password, application): self.name = name self.fullname = fullname self.initials = initials self.email = email - self.password = password self.application = application + + self.set_password(password) def __repr__(self): return self.name + + def set_password(self, password): + self.password = bcrypt.generate_password_hash(password) + + def check_password(self, password): + return bcrypt.check_password_hash(self.password, password)
983df9ceaebb42ca31b131f437362193070eb1db
paasta_tools/clusterman.py
paasta_tools/clusterman.py
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
Fix regression in manpages build
Fix regression in manpages build
Python
apache-2.0
Yelp/paasta,Yelp/paasta
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') - except ImportError: + except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
Fix regression in manpages build
## Code Before: import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except ImportError: # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml ## Instruction: Fix regression in manpages build ## Code After: import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
import staticconf CLUSTERMAN_YAML_FILE_PATH = '/nail/srv/configs/clusterman.yaml' CLUSTERMAN_METRICS_YAML_FILE_PATH = '/nail/srv/configs/clusterman_metrics.yaml' def get_clusterman_metrics(): try: import clusterman_metrics clusterman_yaml = CLUSTERMAN_YAML_FILE_PATH staticconf.YamlConfiguration(CLUSTERMAN_METRICS_YAML_FILE_PATH, namespace='clusterman_metrics') - except ImportError: + except (ImportError, FileNotFoundError): # our cluster autoscaler is not currently open source, sorry! clusterman_metrics = None clusterman_yaml = None return clusterman_metrics, clusterman_yaml
e1bfa7170d4cf6a78cd0f2ca9c3d5302e04323f5
utensils/forms.py
utensils/forms.py
from django import forms class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput())
from django import forms from django.utils.functional import curry class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) class UniqueModelFieldsMixin(object): """ Mixin that enforces unique fields on ModelForm forms. Must be left of ModelForm when defining the form class (see https://code.djangoproject.com/ticket/13075). unique_fields = ['name', 'username'] unique_fields = ['name', {'field': 'username', case_insensitive=True}] """ unique_fields = [] def __init__(self, *args, **kwargs): super(UniqueModelFieldsMixin, self).__init__(*args, **kwargs) def _make_validator(field, case_insensitive): model = self.Meta.model value = self.cleaned_data.get(field) case = 'i' if case_insensitive else '' qs = model.objects.filter( **{field + '__{}exact'.format(case): value}) if self.instance.pk: qs = qs.exclude(pk=self.instance.pk) if qs.exists(): raise forms.ValidationError( "That {} is not available.".format(field)) return value for field in self.unique_fields: if isinstance(field, dict): case_insensitive = field.get('case_insensitive', False) field_name = field['field'] else: field_name = field case_insensitive = False func_name = "clean_{}".format(field_name) setattr(self, func_name, curry(_make_validator, field_name, case_insensitive))
Add unique model fields form mixin.
Add unique model fields form mixin.
Python
mit
code-kitchen/django-utensils,code-kitchen/django-utensils,code-kitchen/django-utensils
from django import forms + from django.utils.functional import curry class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) + + class UniqueModelFieldsMixin(object): + """ + Mixin that enforces unique fields on ModelForm forms. + + Must be left of ModelForm when defining the form class (see + https://code.djangoproject.com/ticket/13075). + + unique_fields = ['name', 'username'] + unique_fields = ['name', {'field': 'username', case_insensitive=True}] + """ + unique_fields = [] + + def __init__(self, *args, **kwargs): + super(UniqueModelFieldsMixin, self).__init__(*args, **kwargs) + + def _make_validator(field, case_insensitive): + model = self.Meta.model + value = self.cleaned_data.get(field) + + case = 'i' if case_insensitive else '' + qs = model.objects.filter( + **{field + '__{}exact'.format(case): value}) + if self.instance.pk: + qs = qs.exclude(pk=self.instance.pk) + + if qs.exists(): + raise forms.ValidationError( + "That {} is not available.".format(field)) + return value + + for field in self.unique_fields: + if isinstance(field, dict): + case_insensitive = field.get('case_insensitive', False) + field_name = field['field'] + else: + field_name = field + case_insensitive = False + func_name = "clean_{}".format(field_name) + setattr(self, func_name, + curry(_make_validator, field_name, case_insensitive)) +
Add unique model fields form mixin.
## Code Before: from django import forms class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) ## Instruction: Add unique model fields form mixin. ## Code After: from django import forms from django.utils.functional import curry class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) class UniqueModelFieldsMixin(object): """ Mixin that enforces unique fields on ModelForm forms. Must be left of ModelForm when defining the form class (see https://code.djangoproject.com/ticket/13075). unique_fields = ['name', 'username'] unique_fields = ['name', {'field': 'username', case_insensitive=True}] """ unique_fields = [] def __init__(self, *args, **kwargs): super(UniqueModelFieldsMixin, self).__init__(*args, **kwargs) def _make_validator(field, case_insensitive): model = self.Meta.model value = self.cleaned_data.get(field) case = 'i' if case_insensitive else '' qs = model.objects.filter( **{field + '__{}exact'.format(case): value}) if self.instance.pk: qs = qs.exclude(pk=self.instance.pk) if qs.exists(): raise forms.ValidationError( "That {} is not available.".format(field)) return value for field in self.unique_fields: if isinstance(field, dict): case_insensitive = field.get('case_insensitive', False) field_name = field['field'] else: field_name = field case_insensitive = False func_name = "clean_{}".format(field_name) setattr(self, func_name, curry(_make_validator, field_name, case_insensitive))
from django import forms + from django.utils.functional import curry class SearchForm(forms.Form): search = forms.CharField( label='', required=False, widget=forms.widgets.TextInput()) + + + class UniqueModelFieldsMixin(object): + """ + Mixin that enforces unique fields on ModelForm forms. + + Must be left of ModelForm when defining the form class (see + https://code.djangoproject.com/ticket/13075). + + unique_fields = ['name', 'username'] + unique_fields = ['name', {'field': 'username', case_insensitive=True}] + """ + unique_fields = [] + + def __init__(self, *args, **kwargs): + super(UniqueModelFieldsMixin, self).__init__(*args, **kwargs) + + def _make_validator(field, case_insensitive): + model = self.Meta.model + value = self.cleaned_data.get(field) + + case = 'i' if case_insensitive else '' + qs = model.objects.filter( + **{field + '__{}exact'.format(case): value}) + if self.instance.pk: + qs = qs.exclude(pk=self.instance.pk) + + if qs.exists(): + raise forms.ValidationError( + "That {} is not available.".format(field)) + return value + + for field in self.unique_fields: + if isinstance(field, dict): + case_insensitive = field.get('case_insensitive', False) + field_name = field['field'] + else: + field_name = field + case_insensitive = False + func_name = "clean_{}".format(field_name) + setattr(self, func_name, + curry(_make_validator, field_name, case_insensitive))
2a986d7c0bab1612e96cace5ce54a188e22af2aa
services/wordpress.py
services/wordpress.py
import json import foauth class Wordpress(foauth.providers.OAuth2): # General info about the provider provider_url = 'https://www.wordpress.com/' favicon_url = 'http://s2.wp.com/i/favicon.ico' docs_url = 'http://developer.wordpress.com/docs/api/' # URLs to interact with the API authorize_url = 'https://public-api.wordpress.com/oauth2/authorize' access_token_url = 'https://public-api.wordpress.com/oauth2/token' api_domain = 'public-api.wordpress.com' available_permissions = [ (None, 'read and post to your blog'), ] def parse_token(self, content): data = json.loads(content) return data['access_token'], None
import json import foauth.providers class Wordpress(foauth.providers.OAuth2): # General info about the provider provider_url = 'https://www.wordpress.com/' favicon_url = 'http://s2.wp.com/i/favicon.ico' docs_url = 'http://developer.wordpress.com/docs/api/' # URLs to interact with the API authorize_url = 'https://public-api.wordpress.com/oauth2/authorize' access_token_url = 'https://public-api.wordpress.com/oauth2/token' api_domain = 'public-api.wordpress.com' available_permissions = [ (None, 'read and post to your blog'), ] def parse_token(self, content): data = json.loads(content) return data['access_token'], None
Fix the import for Wordpress
Fix the import for Wordpress
Python
bsd-3-clause
foauth/foauth.org,foauth/foauth.org,foauth/foauth.org
import json - import foauth + import foauth.providers class Wordpress(foauth.providers.OAuth2): # General info about the provider provider_url = 'https://www.wordpress.com/' favicon_url = 'http://s2.wp.com/i/favicon.ico' docs_url = 'http://developer.wordpress.com/docs/api/' # URLs to interact with the API authorize_url = 'https://public-api.wordpress.com/oauth2/authorize' access_token_url = 'https://public-api.wordpress.com/oauth2/token' api_domain = 'public-api.wordpress.com' available_permissions = [ (None, 'read and post to your blog'), ] def parse_token(self, content): data = json.loads(content) return data['access_token'], None
Fix the import for Wordpress
## Code Before: import json import foauth class Wordpress(foauth.providers.OAuth2): # General info about the provider provider_url = 'https://www.wordpress.com/' favicon_url = 'http://s2.wp.com/i/favicon.ico' docs_url = 'http://developer.wordpress.com/docs/api/' # URLs to interact with the API authorize_url = 'https://public-api.wordpress.com/oauth2/authorize' access_token_url = 'https://public-api.wordpress.com/oauth2/token' api_domain = 'public-api.wordpress.com' available_permissions = [ (None, 'read and post to your blog'), ] def parse_token(self, content): data = json.loads(content) return data['access_token'], None ## Instruction: Fix the import for Wordpress ## Code After: import json import foauth.providers class Wordpress(foauth.providers.OAuth2): # General info about the provider provider_url = 'https://www.wordpress.com/' favicon_url = 'http://s2.wp.com/i/favicon.ico' docs_url = 'http://developer.wordpress.com/docs/api/' # URLs to interact with the API authorize_url = 'https://public-api.wordpress.com/oauth2/authorize' access_token_url = 'https://public-api.wordpress.com/oauth2/token' api_domain = 'public-api.wordpress.com' available_permissions = [ (None, 'read and post to your blog'), ] def parse_token(self, content): data = json.loads(content) return data['access_token'], None
import json - import foauth + import foauth.providers class Wordpress(foauth.providers.OAuth2): # General info about the provider provider_url = 'https://www.wordpress.com/' favicon_url = 'http://s2.wp.com/i/favicon.ico' docs_url = 'http://developer.wordpress.com/docs/api/' # URLs to interact with the API authorize_url = 'https://public-api.wordpress.com/oauth2/authorize' access_token_url = 'https://public-api.wordpress.com/oauth2/token' api_domain = 'public-api.wordpress.com' available_permissions = [ (None, 'read and post to your blog'), ] def parse_token(self, content): data = json.loads(content) return data['access_token'], None
074520d7de93db9b83d8d20dd03640146609eeb2
critical_critiques/submission/forms.py
critical_critiques/submission/forms.py
from django.forms import ModelForm from .models import Submission class SubmissionForm(ModelForm): class Meta: model = Submission fields = ('url',)
from urlparse import urlparse from django import forms from .models import Submission class SubmissionForm(forms.ModelForm): class Meta: model = Submission fields = ('url',) def clean_url(self): url = self.cleaned_data['url'] parsed_url = urlparse(url) if not (parsed_url.scheme == 'https'): raise forms.ValidationError("Must be a HTTPS URL") if parsed_url.params or parsed_url.query or parsed_url.fragment: self._raise_url_error() domain = parsed_url.netloc path = parsed_url.path.split('/') if domain == "github.com": return self._clean_pull_request_url(url, path) if domain == "gist.github.com": return self._clean_gist_url(url, path) else: self._raise_url_error() # Valid Gist: https://gist.github.com/rmeritz/2863145 def _clean_gist_url(self, url, path): if not (self._is_valid_url_length(3, path) and self._path_has_id(path, 2)): self._raise_url_error() return url # Valid Pull Request: https://github.com/basho/webmachine/pull/143 def _clean_pull_request_url(self, url, path): if not (self._is_valid_url_length(5, path) and self._path_has_id(path, 4) and (path[3] == 'pull')): self._raise_url_error() return url def _is_valid_url_length(self, length, path): return (((len(path) == length) or (len(path) == (length + 1) and path[length] == '')) and (path[0] == '')) def _path_has_id(self, path, index): return path[index].isdigit() def _raise_url_error(self): raise forms.ValidationError("Must be a valid Github Pull Request URL")
Validate that the submission URLs are for GitHub
Validate that the submission URLs are for GitHub
Python
mit
team-stroller/critical_critiques,team-stroller/critical_critiques,team-stroller/critical_critiques
+ from urlparse import urlparse + - from django.forms import ModelForm + from django import forms from .models import Submission - class SubmissionForm(ModelForm): + class SubmissionForm(forms.ModelForm): class Meta: model = Submission fields = ('url',) + def clean_url(self): + url = self.cleaned_data['url'] + parsed_url = urlparse(url) + if not (parsed_url.scheme == 'https'): + raise forms.ValidationError("Must be a HTTPS URL") + if parsed_url.params or parsed_url.query or parsed_url.fragment: + self._raise_url_error() + domain = parsed_url.netloc + path = parsed_url.path.split('/') + if domain == "github.com": + return self._clean_pull_request_url(url, path) + if domain == "gist.github.com": + return self._clean_gist_url(url, path) + else: + self._raise_url_error() + + # Valid Gist: https://gist.github.com/rmeritz/2863145 + def _clean_gist_url(self, url, path): + if not (self._is_valid_url_length(3, path) + and self._path_has_id(path, 2)): + self._raise_url_error() + return url + + # Valid Pull Request: https://github.com/basho/webmachine/pull/143 + def _clean_pull_request_url(self, url, path): + if not (self._is_valid_url_length(5, path) and + self._path_has_id(path, 4) and + (path[3] == 'pull')): + self._raise_url_error() + return url + + def _is_valid_url_length(self, length, path): + return (((len(path) == length) or + (len(path) == (length + 1) and path[length] == '')) and + (path[0] == '')) + + def _path_has_id(self, path, index): + return path[index].isdigit() + + def _raise_url_error(self): + raise forms.ValidationError("Must be a valid Github Pull Request URL") +
Validate that the submission URLs are for GitHub
## Code Before: from django.forms import ModelForm from .models import Submission class SubmissionForm(ModelForm): class Meta: model = Submission fields = ('url',) ## Instruction: Validate that the submission URLs are for GitHub ## Code After: from urlparse import urlparse from django import forms from .models import Submission class SubmissionForm(forms.ModelForm): class Meta: model = Submission fields = ('url',) def clean_url(self): url = self.cleaned_data['url'] parsed_url = urlparse(url) if not (parsed_url.scheme == 'https'): raise forms.ValidationError("Must be a HTTPS URL") if parsed_url.params or parsed_url.query or parsed_url.fragment: self._raise_url_error() domain = parsed_url.netloc path = parsed_url.path.split('/') if domain == "github.com": return self._clean_pull_request_url(url, path) if domain == "gist.github.com": return self._clean_gist_url(url, path) else: self._raise_url_error() # Valid Gist: https://gist.github.com/rmeritz/2863145 def _clean_gist_url(self, url, path): if not (self._is_valid_url_length(3, path) and self._path_has_id(path, 2)): self._raise_url_error() return url # Valid Pull Request: https://github.com/basho/webmachine/pull/143 def _clean_pull_request_url(self, url, path): if not (self._is_valid_url_length(5, path) and self._path_has_id(path, 4) and (path[3] == 'pull')): self._raise_url_error() return url def _is_valid_url_length(self, length, path): return (((len(path) == length) or (len(path) == (length + 1) and path[length] == '')) and (path[0] == '')) def _path_has_id(self, path, index): return path[index].isdigit() def _raise_url_error(self): raise forms.ValidationError("Must be a valid Github Pull Request URL")
+ from urlparse import urlparse + - from django.forms import ModelForm ? ------ ^^^^^^ + from django import forms ? ^ + from .models import Submission - class SubmissionForm(ModelForm): + class SubmissionForm(forms.ModelForm): ? ++++++ class Meta: model = Submission fields = ('url',) + + def clean_url(self): + url = self.cleaned_data['url'] + parsed_url = urlparse(url) + if not (parsed_url.scheme == 'https'): + raise forms.ValidationError("Must be a HTTPS URL") + if parsed_url.params or parsed_url.query or parsed_url.fragment: + self._raise_url_error() + domain = parsed_url.netloc + path = parsed_url.path.split('/') + if domain == "github.com": + return self._clean_pull_request_url(url, path) + if domain == "gist.github.com": + return self._clean_gist_url(url, path) + else: + self._raise_url_error() + + # Valid Gist: https://gist.github.com/rmeritz/2863145 + def _clean_gist_url(self, url, path): + if not (self._is_valid_url_length(3, path) + and self._path_has_id(path, 2)): + self._raise_url_error() + return url + + # Valid Pull Request: https://github.com/basho/webmachine/pull/143 + def _clean_pull_request_url(self, url, path): + if not (self._is_valid_url_length(5, path) and + self._path_has_id(path, 4) and + (path[3] == 'pull')): + self._raise_url_error() + return url + + def _is_valid_url_length(self, length, path): + return (((len(path) == length) or + (len(path) == (length + 1) and path[length] == '')) and + (path[0] == '')) + + def _path_has_id(self, path, index): + return path[index].isdigit() + + def _raise_url_error(self): + raise forms.ValidationError("Must be a valid Github Pull Request URL")
7d8f291dea725c28e4d904a3195fde46a3418925
parafermions/tests/test_peschel_emery.py
parafermions/tests/test_peschel_emery.py
import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros N, l = 8, 0.2 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) assert((d[1]-d[0]) < 1e-15) assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2)
import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros N, l = 8, 0.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) # check that all eigenvalues are degenerate assert(np.sum(d[1:10:2]-d[:10:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) # check only the ground state eigenvalues are degenerate assert((d[1]-d[0]) < 1e-15) assert(np.sum(d[1:10:2]-d[:10:2]) > 1e-2)
Update slicing so that array sizes match
Update slicing so that array sizes match
Python
bsd-2-clause
nmoran/pf_resonances
import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros - N, l = 8, 0.2 + N, l = 8, 0.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) + # check that all eigenvalues are degenerate - assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10) + assert(np.sum(d[1:10:2]-d[:10:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) + # check only the ground state eigenvalues are degenerate assert((d[1]-d[0]) < 1e-15) - assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2) + assert(np.sum(d[1:10:2]-d[:10:2]) > 1e-2)
Update slicing so that array sizes match
## Code Before: import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros N, l = 8, 0.2 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) assert((d[1]-d[0]) < 1e-15) assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2) ## Instruction: Update slicing so that array sizes match ## Code After: import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros N, l = 8, 0.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) # check that all eigenvalues are degenerate assert(np.sum(d[1:10:2]-d[:10:2]) < 1e-10) N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) # check only the ground state eigenvalues are degenerate assert((d[1]-d[0]) < 1e-15) assert(np.sum(d[1:10:2]-d[:10:2]) > 1e-2)
import unittest import numpy as np import parafermions as pf class Test(unittest.TestCase): def test_pe_degeneracy(self): # should initialise with all zeros - N, l = 8, 0.2 ? ^ + N, l = 8, 0.0 ? ^ pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) + # check that all eigenvalues are degenerate - assert(np.sum(d[1:11:2]-d[:11:2]) < 1e-10) ? ^ ^ + assert(np.sum(d[1:10:2]-d[:10:2]) < 1e-10) ? ^ ^ N, l = 8, 1.0 pe = pf.PeschelEmerySpinHalf(N, l, dtype=np.dtype('float64')) d, v = pe.Diagonalise(k=100) + # check only the ground state eigenvalues are degenerate assert((d[1]-d[0]) < 1e-15) - assert(np.sum(d[1:11:2]-d[:11:2]) > 1e-2) ? ^ ^ + assert(np.sum(d[1:10:2]-d[:10:2]) > 1e-2) ? ^ ^
f733300f622a4ffc1f0179c90590d543dc37113e
weber_utils/pagination.py
weber_utils/pagination.py
import functools from flask import jsonify, request from flask.ext.sqlalchemy import Pagination from .request_utils import dictify_model, error_abort def paginate_query(query, default_page_size=100, renderer=dictify_model): try: page_size = int(request.args.get("page_size", default_page_size)) page = int(request.args.get("page", 1)) except ValueError: error_abort(httplib.BAD_REQUEST, "Invalid integer value") num_objects = query.count() return { "metadata": { "total_num_objects": num_objects, "total_num_pages": _ceil_div(num_objects, page_size) or 1, "page": page, }, "result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)], } def _ceil_div(value, divisor): returned = float(value) / divisor if int(returned) != returned: return int(returned) + 1 return int(returned) def paginated_view(func): @functools.wraps(func) def new_func(*args, **kwargs): returned = func(*args, **kwargs) return jsonify(paginate_query(returned)) return new_func
import functools from flask import jsonify, request from flask.ext.sqlalchemy import Pagination from .request_utils import dictify_model, error_abort def paginate_query(query, default_page_size=100, renderer=dictify_model): try: page_size = int(request.args.get("page_size", default_page_size)) page = int(request.args.get("page", 1)) except ValueError: error_abort(httplib.BAD_REQUEST, "Invalid integer value") num_objects = query.count() return { "metadata": { "total_num_objects": num_objects, "total_num_pages": _ceil_div(num_objects, page_size) or 1, "page": page, }, "result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)], } def _ceil_div(value, divisor): returned = float(value) / divisor if int(returned) != returned: return int(returned) + 1 return int(returned) def paginated_view(func=None, renderer=dictify_model): if func is None: return functools.partial(paginated_view, renderer=renderer) @functools.wraps(func) def new_func(*args, **kwargs): returned = func(*args, **kwargs) return jsonify(paginate_query(returned, renderer=renderer)) return new_func
Allow renderer argument to paginated_view decorator
Allow renderer argument to paginated_view decorator
Python
bsd-3-clause
vmalloc/weber-utils
import functools from flask import jsonify, request from flask.ext.sqlalchemy import Pagination from .request_utils import dictify_model, error_abort def paginate_query(query, default_page_size=100, renderer=dictify_model): try: page_size = int(request.args.get("page_size", default_page_size)) page = int(request.args.get("page", 1)) except ValueError: error_abort(httplib.BAD_REQUEST, "Invalid integer value") num_objects = query.count() return { "metadata": { "total_num_objects": num_objects, "total_num_pages": _ceil_div(num_objects, page_size) or 1, "page": page, }, "result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)], } def _ceil_div(value, divisor): returned = float(value) / divisor if int(returned) != returned: return int(returned) + 1 return int(returned) - def paginated_view(func): + def paginated_view(func=None, renderer=dictify_model): + if func is None: + return functools.partial(paginated_view, renderer=renderer) @functools.wraps(func) def new_func(*args, **kwargs): returned = func(*args, **kwargs) - return jsonify(paginate_query(returned)) + return jsonify(paginate_query(returned, renderer=renderer)) return new_func
Allow renderer argument to paginated_view decorator
## Code Before: import functools from flask import jsonify, request from flask.ext.sqlalchemy import Pagination from .request_utils import dictify_model, error_abort def paginate_query(query, default_page_size=100, renderer=dictify_model): try: page_size = int(request.args.get("page_size", default_page_size)) page = int(request.args.get("page", 1)) except ValueError: error_abort(httplib.BAD_REQUEST, "Invalid integer value") num_objects = query.count() return { "metadata": { "total_num_objects": num_objects, "total_num_pages": _ceil_div(num_objects, page_size) or 1, "page": page, }, "result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)], } def _ceil_div(value, divisor): returned = float(value) / divisor if int(returned) != returned: return int(returned) + 1 return int(returned) def paginated_view(func): @functools.wraps(func) def new_func(*args, **kwargs): returned = func(*args, **kwargs) return jsonify(paginate_query(returned)) return new_func ## Instruction: Allow renderer argument to paginated_view decorator ## Code After: import functools from flask import jsonify, request from flask.ext.sqlalchemy import Pagination from .request_utils import dictify_model, error_abort def paginate_query(query, default_page_size=100, renderer=dictify_model): try: page_size = int(request.args.get("page_size", default_page_size)) page = int(request.args.get("page", 1)) except ValueError: error_abort(httplib.BAD_REQUEST, "Invalid integer value") num_objects = query.count() return { "metadata": { "total_num_objects": num_objects, "total_num_pages": _ceil_div(num_objects, page_size) or 1, "page": page, }, "result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)], } def _ceil_div(value, divisor): returned = float(value) / divisor if int(returned) != returned: return int(returned) + 1 return int(returned) def paginated_view(func=None, renderer=dictify_model): if func is None: return functools.partial(paginated_view, renderer=renderer) @functools.wraps(func) def new_func(*args, **kwargs): returned = func(*args, **kwargs) return jsonify(paginate_query(returned, renderer=renderer)) return new_func
import functools from flask import jsonify, request from flask.ext.sqlalchemy import Pagination from .request_utils import dictify_model, error_abort def paginate_query(query, default_page_size=100, renderer=dictify_model): try: page_size = int(request.args.get("page_size", default_page_size)) page = int(request.args.get("page", 1)) except ValueError: error_abort(httplib.BAD_REQUEST, "Invalid integer value") num_objects = query.count() return { "metadata": { "total_num_objects": num_objects, "total_num_pages": _ceil_div(num_objects, page_size) or 1, "page": page, }, "result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)], } def _ceil_div(value, divisor): returned = float(value) / divisor if int(returned) != returned: return int(returned) + 1 return int(returned) - def paginated_view(func): + def paginated_view(func=None, renderer=dictify_model): + if func is None: + return functools.partial(paginated_view, renderer=renderer) @functools.wraps(func) def new_func(*args, **kwargs): returned = func(*args, **kwargs) - return jsonify(paginate_query(returned)) + return jsonify(paginate_query(returned, renderer=renderer)) ? +++++++++++++++++++ return new_func
8989258dab574cff0bc8001f1d59232983d15f68
grammpy/Grammars/PrettyApiGrammar.py
grammpy/Grammars/PrettyApiGrammar.py
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar class PrettyApiGrammar(Grammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): if isinstance(terminals, str): temp = [] for ch in terminals: temp.append(ch) terminals = temp super().__init__(terminals=terminals, nonterminals=nonterminals, rules=rules, start_symbol=start_symbol)
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar class PrettyApiGrammar(Grammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): if isinstance(terminals, str): temp = [] for ch in terminals: temp.append(ch) terminals = temp super().__init__(terminals=terminals, nonterminals=nonterminals, rules=rules, start_symbol=start_symbol) def __copy__(self): return PrettyApiGrammar(terminals=(t.s for t in self.terms()), nonterminals=self.nonterms(), rules=self.rules(), start_symbol=self.start_get())
Add __copy__ method to grammar
Add __copy__ method to grammar
Python
mit
PatrikValkovic/grammpy
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar class PrettyApiGrammar(Grammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): if isinstance(terminals, str): temp = [] for ch in terminals: temp.append(ch) terminals = temp super().__init__(terminals=terminals, nonterminals=nonterminals, rules=rules, start_symbol=start_symbol) + def __copy__(self): + return PrettyApiGrammar(terminals=(t.s for t in self.terms()), + nonterminals=self.nonterms(), + rules=self.rules(), + start_symbol=self.start_get())
Add __copy__ method to grammar
## Code Before: from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar class PrettyApiGrammar(Grammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): if isinstance(terminals, str): temp = [] for ch in terminals: temp.append(ch) terminals = temp super().__init__(terminals=terminals, nonterminals=nonterminals, rules=rules, start_symbol=start_symbol) ## Instruction: Add __copy__ method to grammar ## Code After: from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar class PrettyApiGrammar(Grammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): if isinstance(terminals, str): temp = [] for ch in terminals: temp.append(ch) terminals = temp super().__init__(terminals=terminals, nonterminals=nonterminals, rules=rules, start_symbol=start_symbol) def __copy__(self): return PrettyApiGrammar(terminals=(t.s for t in self.terms()), nonterminals=self.nonterms(), rules=self.rules(), start_symbol=self.start_get())
from .MultipleRulesGrammar import MultipleRulesGrammar as Grammar class PrettyApiGrammar(Grammar): def __init__(self, terminals=None, nonterminals=None, rules=None, start_symbol=None): if isinstance(terminals, str): temp = [] for ch in terminals: temp.append(ch) terminals = temp super().__init__(terminals=terminals, nonterminals=nonterminals, rules=rules, start_symbol=start_symbol) + def __copy__(self): + return PrettyApiGrammar(terminals=(t.s for t in self.terms()), + nonterminals=self.nonterms(), + rules=self.rules(), + start_symbol=self.start_get())
695ee95faf0ae80f0c69bf47e881af22ab0f00cd
l10n_it_esigibilita_iva/models/account.py
l10n_it_esigibilita_iva/models/account.py
from odoo import models, fields class AccountTax(models.Model): _inherit = 'account.tax' payability = fields.Selection([ ('I', 'Immediate payability'), ('D', 'Deferred payability'), ('S', 'Split payment'), ], string="VAT payability")
from odoo import models, fields class AccountTax(models.Model): _inherit = 'account.tax' payability = fields.Selection([ ('I', 'VAT payable immediately'), ('D', 'unrealized VAT'), ('S', 'split payments'), ], string="VAT payability")
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
Python
agpl-3.0
dcorio/l10n-italy,OCA/l10n-italy,OCA/l10n-italy,dcorio/l10n-italy,dcorio/l10n-italy,OCA/l10n-italy
from odoo import models, fields class AccountTax(models.Model): _inherit = 'account.tax' payability = fields.Selection([ - ('I', 'Immediate payability'), - ('D', 'Deferred payability'), + ('I', 'VAT payable immediately'), + ('D', 'unrealized VAT'), - ('S', 'Split payment'), + ('S', 'split payments'), ], string="VAT payability")
Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS
## Code Before: from odoo import models, fields class AccountTax(models.Model): _inherit = 'account.tax' payability = fields.Selection([ ('I', 'Immediate payability'), ('D', 'Deferred payability'), ('S', 'Split payment'), ], string="VAT payability") ## Instruction: Use correct english terms, from APPENDIX A -TECHNICAL SPECIFICATIONS ## Code After: from odoo import models, fields class AccountTax(models.Model): _inherit = 'account.tax' payability = fields.Selection([ ('I', 'VAT payable immediately'), ('D', 'unrealized VAT'), ('S', 'split payments'), ], string="VAT payability")
from odoo import models, fields class AccountTax(models.Model): _inherit = 'account.tax' payability = fields.Selection([ - ('I', 'Immediate payability'), - ('D', 'Deferred payability'), + ('I', 'VAT payable immediately'), + ('D', 'unrealized VAT'), - ('S', 'Split payment'), ? ^ + ('S', 'split payments'), ? ^ + ], string="VAT payability")
30dbda17bfa3b52dc2aace6eba6b8c1e4b3f7542
robot-name/robot_name.py
robot-name/robot_name.py
import string import random class Robot(): """Robot facory settings""" def __init__(self): self.name = ""
import string import random class Robot(): """Robot facory settings""" def __init__(self): self.name = "" def factory_name(self): char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2)) num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3)) self.name = char + num return self.name R1 = Robot() print(R1.factory_name())
Add methord to generate unique robot name
Add methord to generate unique robot name
Python
mit
amalshehu/exercism-python
import string import random class Robot(): """Robot facory settings""" def __init__(self): self.name = "" - + def factory_name(self): + char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2)) + num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3)) + self.name = char + num + return self.name + + R1 = Robot() + print(R1.factory_name()) +
Add methord to generate unique robot name
## Code Before: import string import random class Robot(): """Robot facory settings""" def __init__(self): self.name = "" ## Instruction: Add methord to generate unique robot name ## Code After: import string import random class Robot(): """Robot facory settings""" def __init__(self): self.name = "" def factory_name(self): char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2)) num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3)) self.name = char + num return self.name R1 = Robot() print(R1.factory_name())
import string import random class Robot(): """Robot facory settings""" def __init__(self): self.name = "" - + def factory_name(self): + + char = ''.join(random.SystemRandom().choice(string.ascii_uppercase) for _ in range(2)) + num = ''.join(random.SystemRandom().choice(string.digits) for _ in range(3)) + self.name = char + num + return self.name + + R1 = Robot() + print(R1.factory_name())
fffe7392cb486f7218fc8afd2d42769660a1f558
tests/test_main.py
tests/test_main.py
from subprocess import check_call from pytest import raises from csft import __main__ as main def test_call(): check_call(['python', '-m', 'csft', '.']) def test_main(): main.main(argv=['.']) with raises(SystemExit): main.main(argv=[])
from subprocess import check_call from pytest import raises from csft import __main__ as main def test_call(): check_call(['python', '-m', 'csft', '.']) def test_main(): main.main(argv=['.']) with raises(SystemExit): main.main(argv=[]) with raises(TypeError): main.main(argv=['path/is/not/a/directory'])
Test when path not is a directory.
Test when path not is a directory.
Python
mit
yanqd0/csft
from subprocess import check_call from pytest import raises from csft import __main__ as main def test_call(): check_call(['python', '-m', 'csft', '.']) def test_main(): main.main(argv=['.']) with raises(SystemExit): main.main(argv=[]) + with raises(TypeError): + main.main(argv=['path/is/not/a/directory']) +
Test when path not is a directory.
## Code Before: from subprocess import check_call from pytest import raises from csft import __main__ as main def test_call(): check_call(['python', '-m', 'csft', '.']) def test_main(): main.main(argv=['.']) with raises(SystemExit): main.main(argv=[]) ## Instruction: Test when path not is a directory. ## Code After: from subprocess import check_call from pytest import raises from csft import __main__ as main def test_call(): check_call(['python', '-m', 'csft', '.']) def test_main(): main.main(argv=['.']) with raises(SystemExit): main.main(argv=[]) with raises(TypeError): main.main(argv=['path/is/not/a/directory'])
from subprocess import check_call from pytest import raises from csft import __main__ as main def test_call(): check_call(['python', '-m', 'csft', '.']) def test_main(): main.main(argv=['.']) with raises(SystemExit): main.main(argv=[]) + + with raises(TypeError): + main.main(argv=['path/is/not/a/directory'])
0574705dcbc473805aee35b482a41bdef060b0c9
setup.py
setup.py
from distutils.core import setup import py2pack with open('README') as f: README = f.read() setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, long_description = README, author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
from distutils.core import setup import py2pack setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, long_description = open('README').read(), author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
Load README file traditionally, with-statement is not supported by older Python releases.
Load README file traditionally, with-statement is not supported by older Python releases.
Python
apache-2.0
saschpe/py2pack,toabctl/py2pack
from distutils.core import setup import py2pack - - with open('README') as f: - README = f.read() setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, - long_description = README, + long_description = open('README').read(), author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
Load README file traditionally, with-statement is not supported by older Python releases.
## Code Before: from distutils.core import setup import py2pack with open('README') as f: README = f.read() setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, long_description = README, author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], ) ## Instruction: Load README file traditionally, with-statement is not supported by older Python releases. ## Code After: from distutils.core import setup import py2pack setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, long_description = open('README').read(), author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
from distutils.core import setup import py2pack - - with open('README') as f: - README = f.read() setup( name = py2pack.__name__, version = py2pack.__version__, license = "GPLv2", description = py2pack.__doc__, - long_description = README, + long_description = open('README').read(), ? ++++++ +++++++++ author = py2pack.__author__.rsplit(' ', 1)[0], author_email = py2pack.__author__.rsplit(' ', 1)[1][1:-1], url = 'http://github.com/saschpe/py2pack', scripts = ['scripts/py2pack'], packages = ['py2pack'], package_data = {'py2pack': ['templates/*']}, #data_files = [('doc/py2pack', ['AUTHORS', 'LICENSE', 'README'])], requires = ['argparse', 'Jinja2'], classifiers = [ 'Development Status :: 4 - Beta', 'Environment :: Console', 'Intended Audience :: End Users/Desktop', 'Intended Audience :: Developers', 'Intended Audience :: System Administrators', 'License :: OSI Approved :: GNU General Public License (GPL)', 'Operating System :: MacOS :: MacOS X', 'Operating System :: Microsoft :: Windows', 'Operating System :: POSIX', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Software Development :: Code Generators', 'Topic :: Software Development :: Pre-processors', ], )
3b768fdc642471446092a08446ec8f2ab08281c3
clean.py
clean.py
import GutterColor.settings as settings class Clean: """Clean up the cache and generated icons""" def __init__(self, view): pass
import GutterColor.settings as settings from os import walk, remove, path, listdir from shutil import rmtree from threading import Thread class Clean(Thread): """Clean up the cache and generated icons""" def __init__(self, files): Thread.__init__(self) self.files = files def run(self): self.remove_folders() self.remove_files() def folder_ids(self, name): """Return all the open folder ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def file_ids(self, name): """Return all file ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def remove_folders(self): """Remove all the icon folders which are not currently open""" # Get all the folder ids folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) # Delete the folders for folder in folders: if folder not in self.files: rmtree(path.join(settings.ICON_PATH, str(folder))) def remove_files(self): """Remove all the cached files which are not currently open""" files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] for f in files: if f == '.keep': pass if int(f) not in self.files: remove(path.join(settings.CACHE_PATH, f))
Add Clean class to remove files/folders.
Add Clean class to remove files/folders.
Python
mit
ggordan/GutterColor,ggordan/GutterColor
import GutterColor.settings as settings + from os import walk, remove, path, listdir + from shutil import rmtree + from threading import Thread - class Clean: + class Clean(Thread): """Clean up the cache and generated icons""" - def __init__(self, view): + def __init__(self, files): - pass + Thread.__init__(self) + self.files = files + def run(self): + self.remove_folders() + self.remove_files() + + + def folder_ids(self, name): + """Return all the open folder ids""" + name = name.split('/')[-1] + return int(name) if not name == 'icons' else None + + + def file_ids(self, name): + """Return all file ids""" + name = name.split('/')[-1] + return int(name) if not name == 'icons' else None + + + def remove_folders(self): + """Remove all the icon folders which are not currently open""" + # Get all the folder ids + folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) + # Delete the folders + for folder in folders: + if folder not in self.files: + rmtree(path.join(settings.ICON_PATH, str(folder))) + + + def remove_files(self): + """Remove all the cached files which are not currently open""" + files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] + for f in files: + if f == '.keep': pass + if int(f) not in self.files: + remove(path.join(settings.CACHE_PATH, f)) +
Add Clean class to remove files/folders.
## Code Before: import GutterColor.settings as settings class Clean: """Clean up the cache and generated icons""" def __init__(self, view): pass ## Instruction: Add Clean class to remove files/folders. ## Code After: import GutterColor.settings as settings from os import walk, remove, path, listdir from shutil import rmtree from threading import Thread class Clean(Thread): """Clean up the cache and generated icons""" def __init__(self, files): Thread.__init__(self) self.files = files def run(self): self.remove_folders() self.remove_files() def folder_ids(self, name): """Return all the open folder ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def file_ids(self, name): """Return all file ids""" name = name.split('/')[-1] return int(name) if not name == 'icons' else None def remove_folders(self): """Remove all the icon folders which are not currently open""" # Get all the folder ids folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) # Delete the folders for folder in folders: if folder not in self.files: rmtree(path.join(settings.ICON_PATH, str(folder))) def remove_files(self): """Remove all the cached files which are not currently open""" files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] for f in files: if f == '.keep': pass if int(f) not in self.files: remove(path.join(settings.CACHE_PATH, f))
import GutterColor.settings as settings + from os import walk, remove, path, listdir + from shutil import rmtree + from threading import Thread - class Clean: + class Clean(Thread): ? ++++++++ """Clean up the cache and generated icons""" - def __init__(self, view): ? ^ ^ + def __init__(self, files): ? ^ + ^ - pass + Thread.__init__(self) + self.files = files + + def run(self): + self.remove_folders() + self.remove_files() + + + def folder_ids(self, name): + """Return all the open folder ids""" + name = name.split('/')[-1] + return int(name) if not name == 'icons' else None + + + def file_ids(self, name): + """Return all file ids""" + name = name.split('/')[-1] + return int(name) if not name == 'icons' else None + + + def remove_folders(self): + """Remove all the icon folders which are not currently open""" + # Get all the folder ids + folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)]))) + # Delete the folders + for folder in folders: + if folder not in self.files: + rmtree(path.join(settings.ICON_PATH, str(folder))) + + + def remove_files(self): + """Remove all the cached files which are not currently open""" + files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ] + for f in files: + if f == '.keep': pass + if int(f) not in self.files: + remove(path.join(settings.CACHE_PATH, f))
e92fa763729ce68e86da3664ae1a1ed37e3200a5
ynr/apps/uk_results/serializers.py
ynr/apps/uk_results/serializers.py
from __future__ import unicode_literals from rest_framework import serializers from candidates.serializers import MembershipSerializer from .models import CandidateResult, ResultSet class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( 'id', 'url', 'candidate_results', 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', # 'post_result', 'user', 'user_id', ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True)
from __future__ import unicode_literals from rest_framework import serializers from candidates.serializers import MembershipSerializer from .models import CandidateResult, ResultSet class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( 'id', 'url', 'candidate_results', 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', 'user', 'user_id', 'ballot_paper_id' ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') ballot_paper_id = serializers.ReadOnlyField( source='post_election.ballot_paper_id') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True)
Add ballot paper ID to API
Add ballot paper ID to API
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
from __future__ import unicode_literals from rest_framework import serializers from candidates.serializers import MembershipSerializer from .models import CandidateResult, ResultSet class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( + 'id', - 'id', 'url', + 'url', 'candidate_results', 'ip_address', - 'num_turnout_reported', 'num_spoilt_ballots', + 'num_turnout_reported', - # 'post_result', + 'num_spoilt_ballots', + 'user', - 'user', 'user_id', + 'user_id', + 'ballot_paper_id' ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') + ballot_paper_id = serializers.ReadOnlyField( + source='post_election.ballot_paper_id') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True)
Add ballot paper ID to API
## Code Before: from __future__ import unicode_literals from rest_framework import serializers from candidates.serializers import MembershipSerializer from .models import CandidateResult, ResultSet class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( 'id', 'url', 'candidate_results', 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', # 'post_result', 'user', 'user_id', ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True) ## Instruction: Add ballot paper ID to API ## Code After: from __future__ import unicode_literals from rest_framework import serializers from candidates.serializers import MembershipSerializer from .models import CandidateResult, ResultSet class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( 'id', 'url', 'candidate_results', 'ip_address', 'num_turnout_reported', 'num_spoilt_ballots', 'user', 'user_id', 'ballot_paper_id' ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') ballot_paper_id = serializers.ReadOnlyField( source='post_election.ballot_paper_id') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True)
from __future__ import unicode_literals from rest_framework import serializers from candidates.serializers import MembershipSerializer from .models import CandidateResult, ResultSet class CandidateResultSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = CandidateResult fields = ( 'id', 'url', 'membership', 'result_set', 'num_ballots', 'is_winner', ) membership = MembershipSerializer(read_only=True) # result_set = ResultSetSerializer(read_only=True) class ResultSetSerializer(serializers.HyperlinkedModelSerializer): class Meta: model = ResultSet fields = ( + 'id', - 'id', 'url', ? ------ + 'url', 'candidate_results', 'ip_address', - 'num_turnout_reported', 'num_spoilt_ballots', ? ---------------------- + 'num_turnout_reported', - # 'post_result', + 'num_spoilt_ballots', + 'user', - 'user', 'user_id', ? -------- + 'user_id', + 'ballot_paper_id' ) # post_result = PostResultSerializer() user = serializers.ReadOnlyField(source='user.username') + ballot_paper_id = serializers.ReadOnlyField( + source='post_election.ballot_paper_id') user_id = serializers.ReadOnlyField(source='user.id') candidate_results = CandidateResultSerializer(many=True, read_only=True)
351bfe236f183c069314f5df7d3c4b8f9d8699b4
final/problem6.py
final/problem6.py
class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Person.say(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Lecturer.lecture(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
Modify lecture method in ArrogantProfessor class using inheritance
Modify lecture method in ArrogantProfessor class using inheritance
Python
mit
Kunal57/MIT_6.00.1x
class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): - return 'It is obvious that ' + Person.say(self, stuff) + return 'It is obvious that ' + Lecturer.lecture(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
Modify lecture method in ArrogantProfessor class using inheritance
## Code Before: class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Person.say(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue')) ## Instruction: Modify lecture method in ArrogantProfessor class using inheritance ## Code After: class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): return 'It is obvious that ' + Lecturer.lecture(self, stuff) def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
class Person(object): def __init__(self, name): self.name = name def say(self, stuff): return self.name + ' says: ' + stuff def __str__(self): return self.name class Lecturer(Person): def lecture(self, stuff): return 'I believe that ' + Person.say(self, stuff) class Professor(Lecturer): def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) class ArrogantProfessor(Professor): def lecture(self, stuff): - return 'It is obvious that ' + Person.say(self, stuff) ? ^ --- ^^^ + return 'It is obvious that ' + Lecturer.lecture(self, stuff) ? ^^^^^^ ^^^^^^^ def say(self, stuff): return self.name + ' says: ' + self.lecture(stuff) e = Person('eric') le = Lecturer('eric') pe = Professor('eric') ae = ArrogantProfessor('eric') e.say('the sky is blue') le.say('the sky is blue') le.lecture('the sky is blue') pe.say('the sky is blue') pe.lecture('the sky is blue') print(ae.say('the sky is blue')) print(ae.lecture('the sky is blue'))
38b2bccc4146226d698f5abd1bed1107fe3bbe68
canon.py
canon.py
from melopy import * m = Melopy('canon', 50) melody = [] for start in ['d4', 'a3', 'b3m', 'f#3m', 'g3', 'd3', 'g3', 'a3']: if start.endswith('m'): scale = generate_minor_triad(start[:-1]) else: scale = generate_major_triad(start) for note in scale: melody.append(note) m.add_melody(melody, 0.2) m.add_note('e4', 0.2) m.render()
from melopy import * m = Melopy('canon', 50) melody = [] for start in ['d4', 'a3', 'bm3', 'f#m3', 'g3', 'd3', 'g3', 'a3']: if start.endswith('m'): scale = m.generate_minor_triad(start[:-1]) else: scale = m.generate_major_triad(start) for note in scale: melody.append(note) m.add_melody(melody, 0.2) m.add_note('e4', 0.2) m.render()
Revert "Added "add_rest(length)" method. Changed iterate and generate functions to be outside of the class."
Revert "Added "add_rest(length)" method. Changed iterate and generate functions to be outside of the class." This reverts commit 672069e8f8f7ded4537362707378f32cccde1ae6.
Python
mit
juliowaissman/Melopy,jdan/Melopy
from melopy import * m = Melopy('canon', 50) melody = [] - for start in ['d4', 'a3', 'b3m', 'f#3m', 'g3', 'd3', 'g3', 'a3']: + for start in ['d4', 'a3', 'bm3', 'f#m3', 'g3', 'd3', 'g3', 'a3']: if start.endswith('m'): - scale = generate_minor_triad(start[:-1]) + scale = m.generate_minor_triad(start[:-1]) else: - scale = generate_major_triad(start) + scale = m.generate_major_triad(start) for note in scale: melody.append(note) - + m.add_melody(melody, 0.2) m.add_note('e4', 0.2) m.render()
Revert "Added "add_rest(length)" method. Changed iterate and generate functions to be outside of the class."
## Code Before: from melopy import * m = Melopy('canon', 50) melody = [] for start in ['d4', 'a3', 'b3m', 'f#3m', 'g3', 'd3', 'g3', 'a3']: if start.endswith('m'): scale = generate_minor_triad(start[:-1]) else: scale = generate_major_triad(start) for note in scale: melody.append(note) m.add_melody(melody, 0.2) m.add_note('e4', 0.2) m.render() ## Instruction: Revert "Added "add_rest(length)" method. Changed iterate and generate functions to be outside of the class." ## Code After: from melopy import * m = Melopy('canon', 50) melody = [] for start in ['d4', 'a3', 'bm3', 'f#m3', 'g3', 'd3', 'g3', 'a3']: if start.endswith('m'): scale = m.generate_minor_triad(start[:-1]) else: scale = m.generate_major_triad(start) for note in scale: melody.append(note) m.add_melody(melody, 0.2) m.add_note('e4', 0.2) m.render()
from melopy import * m = Melopy('canon', 50) melody = [] - for start in ['d4', 'a3', 'b3m', 'f#3m', 'g3', 'd3', 'g3', 'a3']: ? - - + for start in ['d4', 'a3', 'bm3', 'f#m3', 'g3', 'd3', 'g3', 'a3']: ? + + if start.endswith('m'): - scale = generate_minor_triad(start[:-1]) + scale = m.generate_minor_triad(start[:-1]) ? ++ else: - scale = generate_major_triad(start) + scale = m.generate_major_triad(start) ? ++ for note in scale: melody.append(note) - + m.add_melody(melody, 0.2) m.add_note('e4', 0.2) m.render()
aeef2c319ea5c7d59a0bdf69a5fbe5dc8a1ab1bc
wagtailnews/feeds.py
wagtailnews/feeds.py
from django.contrib.syndication.views import Feed from django.utils import timezone class LatestEnteriesFeed(Feed): description = "Latest news" def items(self): now = timezone.now() NewsItem = self.news_index.get_newsitem_model() newsitem_list = NewsItem.objects.live().order_by('-date').filter( newsindex=self.news_index, date__lte=now)[:20] return newsitem_list def item_link(self, item): return item.url() def __init__(self, news_index): super(LatestEnteriesFeed, self).__init__() self.news_index = news_index self.title = news_index.title self.link = news_index.url def item_pubdate(self, item): return item.date
from django.contrib.syndication.views import Feed from django.utils import timezone class LatestEnteriesFeed(Feed): def items(self): now = timezone.now() NewsItem = self.news_index.get_newsitem_model() newsitem_list = NewsItem.objects.live().order_by('-date').filter( newsindex=self.news_index, date__lte=now)[:20] return newsitem_list def item_link(self, item): return item.full_url() def item_guid(self, item): return item.full_url() item_guid_is_permalink = True def item_pubdate(self, item): return item.date def __init__(self, news_index): super(LatestEnteriesFeed, self).__init__() self.news_index = news_index self.title = news_index.title self.description = news_index.title self.link = news_index.full_url self.feed_url = self.link + news_index.reverse_subpage('feed')
Add some extra item methods / parameters to LatestEntriesFeed
Add some extra item methods / parameters to LatestEntriesFeed
Python
bsd-2-clause
takeflight/wagtailnews,takeflight/wagtailnews,takeflight/wagtailnews,takeflight/wagtailnews
from django.contrib.syndication.views import Feed from django.utils import timezone class LatestEnteriesFeed(Feed): - description = "Latest news" def items(self): now = timezone.now() NewsItem = self.news_index.get_newsitem_model() newsitem_list = NewsItem.objects.live().order_by('-date').filter( newsindex=self.news_index, date__lte=now)[:20] return newsitem_list def item_link(self, item): - return item.url() + return item.full_url() + + def item_guid(self, item): + return item.full_url() + + item_guid_is_permalink = True + + def item_pubdate(self, item): + return item.date def __init__(self, news_index): super(LatestEnteriesFeed, self).__init__() self.news_index = news_index + self.title = news_index.title - self.link = news_index.url + self.description = news_index.title - def item_pubdate(self, item): - return item.date + self.link = news_index.full_url + self.feed_url = self.link + news_index.reverse_subpage('feed')
Add some extra item methods / parameters to LatestEntriesFeed
## Code Before: from django.contrib.syndication.views import Feed from django.utils import timezone class LatestEnteriesFeed(Feed): description = "Latest news" def items(self): now = timezone.now() NewsItem = self.news_index.get_newsitem_model() newsitem_list = NewsItem.objects.live().order_by('-date').filter( newsindex=self.news_index, date__lte=now)[:20] return newsitem_list def item_link(self, item): return item.url() def __init__(self, news_index): super(LatestEnteriesFeed, self).__init__() self.news_index = news_index self.title = news_index.title self.link = news_index.url def item_pubdate(self, item): return item.date ## Instruction: Add some extra item methods / parameters to LatestEntriesFeed ## Code After: from django.contrib.syndication.views import Feed from django.utils import timezone class LatestEnteriesFeed(Feed): def items(self): now = timezone.now() NewsItem = self.news_index.get_newsitem_model() newsitem_list = NewsItem.objects.live().order_by('-date').filter( newsindex=self.news_index, date__lte=now)[:20] return newsitem_list def item_link(self, item): return item.full_url() def item_guid(self, item): return item.full_url() item_guid_is_permalink = True def item_pubdate(self, item): return item.date def __init__(self, news_index): super(LatestEnteriesFeed, self).__init__() self.news_index = news_index self.title = news_index.title self.description = news_index.title self.link = news_index.full_url self.feed_url = self.link + news_index.reverse_subpage('feed')
from django.contrib.syndication.views import Feed from django.utils import timezone class LatestEnteriesFeed(Feed): - description = "Latest news" def items(self): now = timezone.now() NewsItem = self.news_index.get_newsitem_model() newsitem_list = NewsItem.objects.live().order_by('-date').filter( newsindex=self.news_index, date__lte=now)[:20] return newsitem_list def item_link(self, item): - return item.url() + return item.full_url() ? +++++ + + def item_guid(self, item): + return item.full_url() + + item_guid_is_permalink = True + + def item_pubdate(self, item): + return item.date def __init__(self, news_index): super(LatestEnteriesFeed, self).__init__() self.news_index = news_index + self.title = news_index.title - self.link = news_index.url ? ^ - ^^ + self.description = news_index.title ? ^^^^^ ++++ ^^^ + - def item_pubdate(self, item): - return item.date + self.link = news_index.full_url + self.feed_url = self.link + news_index.reverse_subpage('feed')
782c1b8379d38f99de413398919aa797af0df645
plot_s_curve.py
plot_s_curve.py
import matplotlib.pyplot as plt from numpy import array, log import sys x = [] y = [] infile = open(sys.argv[1]) for line in infile: data = line.replace('\n','').split() print(data) try : x.append(float(data[0])) y.append(float(data[1])) except ValueError: pass #x = array(x) #y = array(y) figManager = plt.get_current_fig_manager() figManager.window.showMaximized() #plt.plot(log(x),log(y)) plt.plot(x,y,"o") plt.ylabel('$\log T$') plt.xlabel('$\log \Sigma$') plt.grid() plt.show()
import matplotlib.pyplot as plt from numpy import array, log import sys import os import matplotlib.animation as animation fig = plt.figure() inpath = sys.argv[1] if os.path.isfile(inpath): print('Visiting {}'.format(inpath)) filenames = [inpath] else: _filenames = os.listdir(inpath) _filenames.sort() filesnames = [inpath + '/' + fname for fname in _filesnames if '_tot.dat' in fname] print('Visiting all files of {}'.format(inpath)) axline, = plt.plot(0, 0, 'o') def draw_once(filename): x = [] y = [] if not 'tot.dat' in filename: return ([0], [0]) else: print('Visiting {}'.format(filename)) outfile = filename.replace('.dat', '.png') for line in open(filename): data = line.replace('\n', '').split() try : print (data) xData = float(data[0]) yData = float(data[1]) x.append(xData) y.append(yData) except ValueError: pass axline.set_xdata(x) axline.set_ydata(y) return axline, def init(): print('Initialisation') plt.ylabel('$\log T$') plt.xlabel('$\log \Sigma$') plt.xlim(1.8, 4) plt.ylim(6, 8) plt.grid() if len(filenames) > 1: ani = animation.FuncAnimation(fig, draw_once, filenames, init_func=init, interval=10) else: init() draw_once(filenames[0]) plt.show() # x, y = draw_once(filenames[2]) # plt.plot(x, y, 'o')
Use animation if dirname is provided
Use animation if dirname is provided
Python
mit
M2-AAIS/BAD
import matplotlib.pyplot as plt from numpy import array, log import sys + import os - x = [] + import matplotlib.animation as animation - y = [] + fig = plt.figure() - infile = open(sys.argv[1]) + inpath = sys.argv[1] - for line in infile: - data = line.replace('\n','').split() - print(data) - try : - x.append(float(data[0])) - y.append(float(data[1])) - except ValueError: - pass + if os.path.isfile(inpath): + print('Visiting {}'.format(inpath)) + filenames = [inpath] + else: + _filenames = os.listdir(inpath) + _filenames.sort() + filesnames = [inpath + '/' + fname for fname in _filesnames if '_tot.dat' in fname] + + print('Visiting all files of {}'.format(inpath)) + axline, = plt.plot(0, 0, 'o') - #x = array(x) - #y = array(y) - figManager = plt.get_current_fig_manager() - figManager.window.showMaximized() - #plt.plot(log(x),log(y)) - plt.plot(x,y,"o") + def draw_once(filename): + x = [] + y = [] + if not 'tot.dat' in filename: + return ([0], [0]) + else: + print('Visiting {}'.format(filename)) + outfile = filename.replace('.dat', '.png') + + for line in open(filename): + data = line.replace('\n', '').split() + try : + print (data) + xData = float(data[0]) + yData = float(data[1]) + x.append(xData) + y.append(yData) + except ValueError: + pass + axline.set_xdata(x) + axline.set_ydata(y) - plt.ylabel('$\log T$') - plt.xlabel('$\log \Sigma$') - plt.grid() - plt.show() + return axline, + + def init(): + print('Initialisation') + plt.ylabel('$\log T$') + plt.xlabel('$\log \Sigma$') + plt.xlim(1.8, 4) + plt.ylim(6, 8) + plt.grid() + + if len(filenames) > 1: + ani = animation.FuncAnimation(fig, draw_once, filenames, init_func=init, interval=10) + else: + init() + draw_once(filenames[0]) + plt.show() + # x, y = draw_once(filenames[2]) + # plt.plot(x, y, 'o') + +
Use animation if dirname is provided
## Code Before: import matplotlib.pyplot as plt from numpy import array, log import sys x = [] y = [] infile = open(sys.argv[1]) for line in infile: data = line.replace('\n','').split() print(data) try : x.append(float(data[0])) y.append(float(data[1])) except ValueError: pass #x = array(x) #y = array(y) figManager = plt.get_current_fig_manager() figManager.window.showMaximized() #plt.plot(log(x),log(y)) plt.plot(x,y,"o") plt.ylabel('$\log T$') plt.xlabel('$\log \Sigma$') plt.grid() plt.show() ## Instruction: Use animation if dirname is provided ## Code After: import matplotlib.pyplot as plt from numpy import array, log import sys import os import matplotlib.animation as animation fig = plt.figure() inpath = sys.argv[1] if os.path.isfile(inpath): print('Visiting {}'.format(inpath)) filenames = [inpath] else: _filenames = os.listdir(inpath) _filenames.sort() filesnames = [inpath + '/' + fname for fname in _filesnames if '_tot.dat' in fname] print('Visiting all files of {}'.format(inpath)) axline, = plt.plot(0, 0, 'o') def draw_once(filename): x = [] y = [] if not 'tot.dat' in filename: return ([0], [0]) else: print('Visiting {}'.format(filename)) outfile = filename.replace('.dat', '.png') for line in open(filename): data = line.replace('\n', '').split() try : print (data) xData = float(data[0]) yData = float(data[1]) x.append(xData) y.append(yData) except ValueError: pass axline.set_xdata(x) axline.set_ydata(y) return axline, def init(): print('Initialisation') plt.ylabel('$\log T$') plt.xlabel('$\log \Sigma$') plt.xlim(1.8, 4) plt.ylim(6, 8) plt.grid() if len(filenames) > 1: ani = animation.FuncAnimation(fig, draw_once, filenames, init_func=init, interval=10) else: init() draw_once(filenames[0]) plt.show() # x, y = draw_once(filenames[2]) # plt.plot(x, y, 'o')
import matplotlib.pyplot as plt from numpy import array, log import sys + import os - x = [] + import matplotlib.animation as animation - y = [] + fig = plt.figure() - infile = open(sys.argv[1]) + inpath = sys.argv[1] - for line in infile: - data = line.replace('\n','').split() - print(data) - try : - x.append(float(data[0])) - y.append(float(data[1])) - except ValueError: - pass + if os.path.isfile(inpath): + print('Visiting {}'.format(inpath)) + filenames = [inpath] + else: + _filenames = os.listdir(inpath) + _filenames.sort() + filesnames = [inpath + '/' + fname for fname in _filesnames if '_tot.dat' in fname] + + print('Visiting all files of {}'.format(inpath)) + axline, = plt.plot(0, 0, 'o') - #x = array(x) - #y = array(y) - figManager = plt.get_current_fig_manager() - figManager.window.showMaximized() - #plt.plot(log(x),log(y)) - plt.plot(x,y,"o") + def draw_once(filename): + x = [] + y = [] + if not 'tot.dat' in filename: + return ([0], [0]) + else: + print('Visiting {}'.format(filename)) + outfile = filename.replace('.dat', '.png') + + for line in open(filename): + data = line.replace('\n', '').split() + try : + print (data) + xData = float(data[0]) + yData = float(data[1]) + x.append(xData) + y.append(yData) + except ValueError: + pass + axline.set_xdata(x) + axline.set_ydata(y) + + return axline, + + def init(): + print('Initialisation') - plt.ylabel('$\log T$') + plt.ylabel('$\log T$') ? ++++ - plt.xlabel('$\log \Sigma$') + plt.xlabel('$\log \Sigma$') ? ++++ + plt.xlim(1.8, 4) + plt.ylim(6, 8) - plt.grid() + plt.grid() ? ++++ + + if len(filenames) > 1: + ani = animation.FuncAnimation(fig, draw_once, filenames, init_func=init, interval=10) + else: + init() + draw_once(filenames[0]) - plt.show() + plt.show() ? ++++ + # x, y = draw_once(filenames[2]) + # plt.plot(x, y, 'o') +
7cc76c2716ce54882b7eced67f4435acd100cd83
example/src/hello-world/hello-world.py
example/src/hello-world/hello-world.py
import sys sys.path.append('../../../') import numpy as np import cv2 import cvui cvui.random_number_generator(1, 2) # Create a black image img = np.zeros((512,512,3), np.uint8) cv2.namedWindow('Window') # Draw a diagonal blue line with thickness of 5 px cv2.line(img,(0,0),(511,511),(255,0,0),5) cv2.rectangle(img,(384,0),(510,128),(0,255,0),3) cv2.circle(img,(447,63), 63, (0,0,255), -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'OpenCV',(10,500), font, 4,(255,255,255),2,cv2.LINE_AA) cv2.imshow('Window', img) k = cv2.waitKey(0) if k == 27: # wait for ESC key to exit cv2.destroyAllWindows()
import sys sys.path.append('../../../') import numpy as np import cv2 import cvui cvui.random_number_generator(1, 2) # Create a black image img = np.zeros((512,512,3), np.uint8) cv2.namedWindow('Window') # Change background color img[:] = (49, 52, 49) # Draw a diagonal blue line with thickness of 5 px cv2.line(img,(0,0),(511,511),(255,0,0),5) cv2.rectangle(img,(384,0),(510,128),(0,255,0),3) cv2.circle(img,(447,63), 63, (0,0,255), -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'OpenCV',(10,500), font, 4,(255,255,255),2,cv2.LINE_AA) cv2.imshow('Window', img) k = cv2.waitKey(0) if k == 27: # wait for ESC key to exit cv2.destroyAllWindows()
Add a nice background color
Add a nice background color
Python
mit
Dovyski/cvui,Dovyski/cvui,Dovyski/cvui
import sys sys.path.append('../../../') import numpy as np import cv2 import cvui cvui.random_number_generator(1, 2) # Create a black image img = np.zeros((512,512,3), np.uint8) cv2.namedWindow('Window') + + # Change background color + img[:] = (49, 52, 49) # Draw a diagonal blue line with thickness of 5 px cv2.line(img,(0,0),(511,511),(255,0,0),5) cv2.rectangle(img,(384,0),(510,128),(0,255,0),3) cv2.circle(img,(447,63), 63, (0,0,255), -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'OpenCV',(10,500), font, 4,(255,255,255),2,cv2.LINE_AA) cv2.imshow('Window', img) k = cv2.waitKey(0) if k == 27: # wait for ESC key to exit cv2.destroyAllWindows()
Add a nice background color
## Code Before: import sys sys.path.append('../../../') import numpy as np import cv2 import cvui cvui.random_number_generator(1, 2) # Create a black image img = np.zeros((512,512,3), np.uint8) cv2.namedWindow('Window') # Draw a diagonal blue line with thickness of 5 px cv2.line(img,(0,0),(511,511),(255,0,0),5) cv2.rectangle(img,(384,0),(510,128),(0,255,0),3) cv2.circle(img,(447,63), 63, (0,0,255), -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'OpenCV',(10,500), font, 4,(255,255,255),2,cv2.LINE_AA) cv2.imshow('Window', img) k = cv2.waitKey(0) if k == 27: # wait for ESC key to exit cv2.destroyAllWindows() ## Instruction: Add a nice background color ## Code After: import sys sys.path.append('../../../') import numpy as np import cv2 import cvui cvui.random_number_generator(1, 2) # Create a black image img = np.zeros((512,512,3), np.uint8) cv2.namedWindow('Window') # Change background color img[:] = (49, 52, 49) # Draw a diagonal blue line with thickness of 5 px cv2.line(img,(0,0),(511,511),(255,0,0),5) cv2.rectangle(img,(384,0),(510,128),(0,255,0),3) cv2.circle(img,(447,63), 63, (0,0,255), -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'OpenCV',(10,500), font, 4,(255,255,255),2,cv2.LINE_AA) cv2.imshow('Window', img) k = cv2.waitKey(0) if k == 27: # wait for ESC key to exit cv2.destroyAllWindows()
import sys sys.path.append('../../../') import numpy as np import cv2 import cvui cvui.random_number_generator(1, 2) # Create a black image img = np.zeros((512,512,3), np.uint8) cv2.namedWindow('Window') + + # Change background color + img[:] = (49, 52, 49) # Draw a diagonal blue line with thickness of 5 px cv2.line(img,(0,0),(511,511),(255,0,0),5) cv2.rectangle(img,(384,0),(510,128),(0,255,0),3) cv2.circle(img,(447,63), 63, (0,0,255), -1) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'OpenCV',(10,500), font, 4,(255,255,255),2,cv2.LINE_AA) cv2.imshow('Window', img) k = cv2.waitKey(0) if k == 27: # wait for ESC key to exit cv2.destroyAllWindows()
c1e17f9501fb9afc69f9fba288fa9e4cfac262e2
tviit/models.py
tviit/models.py
from __future__ import unicode_literals from django.conf import settings import uuid from django.db import models class Tviit(models.Model): uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False) sender = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name="Tviit sender", ) content = models.TextField(max_length=160) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) reply = models.ForeignKey("self", null=True, blank=True) class Meta: ordering = ('created',)
from __future__ import unicode_literals from django.conf import settings from django.db import models from django.utils.deconstruct import deconstructible from django.dispatch import receiver from django.forms import ModelForm import uuid, os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @deconstructible class PathAndRename(object): def __init__(self, sub_path): self.path = sub_path def __call__(self, instance, filename): ext = filename.split('.')[-1] # set filename as random string filename = '{}.{}'.format(uuid.uuid4().hex, ext) # return the whole path to the file return os.path.join(self.path, filename) path_and_rename = PathAndRename("attachments") class Tviit(models.Model): uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False) sender = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name="Tviit sender", ) content = models.TextField(max_length=160) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) image = models.ImageField(upload_to=path_and_rename, null=True, blank=True) reply = models.ForeignKey("self", null=True, blank=True) class Meta: ordering = ('created',) def __str__(self): return '%s - %s' % (self.created, self.sender.username) class TviitForm(ModelForm): class Meta: model = Tviit fields = ['content', 'image'] class EditTviitForm(ModelForm): #attachments = MultiFileField(required=False, max_num=12, max_file_size=1024 * 1024 * 500) class Meta: model = Tviit fields = ['content', 'image']
Add image into Tviit Model Add PathAndRename function to rename image path Add TviitForm
Add image into Tviit Model Add PathAndRename function to rename image path Add TviitForm
Python
mit
DeWaster/Tviserrys,DeWaster/Tviserrys
from __future__ import unicode_literals from django.conf import settings + from django.db import models + from django.utils.deconstruct import deconstructible + from django.dispatch import receiver + from django.forms import ModelForm - import uuid + import uuid, os + + BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + @deconstructible + class PathAndRename(object): + + def __init__(self, sub_path): + self.path = sub_path + + def __call__(self, instance, filename): + ext = filename.split('.')[-1] + # set filename as random string + filename = '{}.{}'.format(uuid.uuid4().hex, ext) + # return the whole path to the file + return os.path.join(self.path, filename) + + path_and_rename = PathAndRename("attachments") - from django.db import models + class Tviit(models.Model): - class Tviit(models.Model): uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False) sender = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name="Tviit sender", ) content = models.TextField(max_length=160) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) + image = models.ImageField(upload_to=path_and_rename, null=True, blank=True) reply = models.ForeignKey("self", null=True, blank=True) - class Meta: ordering = ('created',) + + def __str__(self): + return '%s - %s' % (self.created, self.sender.username) + + + + + + class TviitForm(ModelForm): + + class Meta: + model = Tviit + fields = ['content', 'image'] + + + class EditTviitForm(ModelForm): + #attachments = MultiFileField(required=False, max_num=12, max_file_size=1024 * 1024 * 500) + + class Meta: + model = Tviit + fields = ['content', 'image']
Add image into Tviit Model Add PathAndRename function to rename image path Add TviitForm
## Code Before: from __future__ import unicode_literals from django.conf import settings import uuid from django.db import models class Tviit(models.Model): uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False) sender = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name="Tviit sender", ) content = models.TextField(max_length=160) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) reply = models.ForeignKey("self", null=True, blank=True) class Meta: ordering = ('created',) ## Instruction: Add image into Tviit Model Add PathAndRename function to rename image path Add TviitForm ## Code After: from __future__ import unicode_literals from django.conf import settings from django.db import models from django.utils.deconstruct import deconstructible from django.dispatch import receiver from django.forms import ModelForm import uuid, os BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) @deconstructible class PathAndRename(object): def __init__(self, sub_path): self.path = sub_path def __call__(self, instance, filename): ext = filename.split('.')[-1] # set filename as random string filename = '{}.{}'.format(uuid.uuid4().hex, ext) # return the whole path to the file return os.path.join(self.path, filename) path_and_rename = PathAndRename("attachments") class Tviit(models.Model): uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False) sender = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name="Tviit sender", ) content = models.TextField(max_length=160) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) image = models.ImageField(upload_to=path_and_rename, null=True, blank=True) reply = models.ForeignKey("self", null=True, blank=True) class Meta: ordering = ('created',) def __str__(self): return '%s - %s' % (self.created, self.sender.username) class TviitForm(ModelForm): class Meta: model = Tviit fields = ['content', 'image'] class EditTviitForm(ModelForm): #attachments = MultiFileField(required=False, max_num=12, max_file_size=1024 * 1024 * 500) class Meta: model = Tviit fields = ['content', 'image']
from __future__ import unicode_literals from django.conf import settings + from django.db import models + from django.utils.deconstruct import deconstructible + from django.dispatch import receiver + from django.forms import ModelForm - import uuid + import uuid, os ? ++++ + + BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + @deconstructible + class PathAndRename(object): + + def __init__(self, sub_path): + self.path = sub_path + + def __call__(self, instance, filename): + ext = filename.split('.')[-1] + # set filename as random string + filename = '{}.{}'.format(uuid.uuid4().hex, ext) + # return the whole path to the file + return os.path.join(self.path, filename) + + path_and_rename = PathAndRename("attachments") - from django.db import models + class Tviit(models.Model): - class Tviit(models.Model): uuid = models.CharField(unique=True, max_length=40, default=uuid.uuid4().int, editable=False) sender = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, verbose_name="Tviit sender", ) content = models.TextField(max_length=160) created = models.DateTimeField(auto_now_add=True) modified = models.DateTimeField(auto_now=True) + image = models.ImageField(upload_to=path_and_rename, null=True, blank=True) reply = models.ForeignKey("self", null=True, blank=True) - class Meta: ordering = ('created',) + + def __str__(self): + return '%s - %s' % (self.created, self.sender.username) + + + + + + class TviitForm(ModelForm): + + class Meta: + model = Tviit + fields = ['content', 'image'] + + + class EditTviitForm(ModelForm): + #attachments = MultiFileField(required=False, max_num=12, max_file_size=1024 * 1024 * 500) + + class Meta: + model = Tviit + fields = ['content', 'image']
5e50f8127a48a08d66bdc9d8aec28064b33ad864
game.py
game.py
import datetime import map_loader class Game(object): def __init__(self, name=name, players=players, map=None): """ Initialize a new game. """ self.name = name, self.players = players, # List of player usernames self.status = 'Waiting', self.raw_state = self.generate_clean_state(), # JSON object self.created = datetime.datetime.now(), map = 'default' if map is None else map self.map = map.read_map_file(map) def generate_clean_state(self): """ Generates a blank game state JSON object. """ return '{}' def load_from_state(self): """ Load game attributes from raw game state. """ pass def serialize(self): """ Turn game into a serialized game state for storage. """ pass def update(self): """ Execute a round. """ pass
import datetime import json import map_loader class GAME_STATUS(object): """ Game status constants. """ lobby = 'waiting for players' waiting = 'waiting for moves' playing = 'playing' cancelled = 'cancelled' complete = 'complete' class Game(object): def __init__(self, name=name, players=players, map='default'): """ Initialize a new game. """ self.name = name, self.players = players, # List of player usernames self.status = GAME_STATUS.lobby, self.created = datetime.datetime.now(), # These attributes are persisted in the raw_state, not DB properties self.map = map.read_map_file(map) self.current_turn = 0 self.max_turns = 0 self.raw_state = self.serialize(), # JSON state (a DB property) def load_state_from_json(self): """ Load game attributes from raw game state. """ state = json.loads(self.raw_state) self.map = state['map'] self.current_turn, self.max_turns = state['turn'] def serialize_state(self): """ Turn game state into a serialized game state for storage. """ state = { 'map': self.map, 'turn': [self.current_turn, self.max_turns], } return json.dumps(state) def update(self): """ Execute a round. """ self.current_turn += 1 if self.current_turn == self.max_turns: self.status = GAME_STATUS.complete
Add some state related methods to Game
Add some state related methods to Game
Python
mit
supermitch/mech-ai,supermitch/mech-ai,supermitch/mech-ai
import datetime + import json import map_loader + class GAME_STATUS(object): + """ Game status constants. """ + lobby = 'waiting for players' + waiting = 'waiting for moves' + playing = 'playing' + cancelled = 'cancelled' + complete = 'complete' + + class Game(object): - def __init__(self, name=name, players=players, map=None): + def __init__(self, name=name, players=players, map='default'): """ Initialize a new game. """ - self.name = name, self.players = players, # List of player usernames + self.status = GAME_STATUS.lobby, - self.status = 'Waiting', - self.raw_state = self.generate_clean_state(), # JSON object self.created = datetime.datetime.now(), - map = 'default' if map is None else map + # These attributes are persisted in the raw_state, not DB properties self.map = map.read_map_file(map) + self.current_turn = 0 + self.max_turns = 0 + self.raw_state = self.serialize(), # JSON state (a DB property) - def generate_clean_state(self): - """ Generates a blank game state JSON object. """ - return '{}' - def load_from_state(self): + def load_state_from_json(self): """ Load game attributes from raw game state. """ - pass + state = json.loads(self.raw_state) + self.map = state['map'] + self.current_turn, self.max_turns = state['turn'] - def serialize(self): + def serialize_state(self): - """ Turn game into a serialized game state for storage. """ + """ Turn game state into a serialized game state for storage. """ + state = { + 'map': self.map, + 'turn': [self.current_turn, self.max_turns], - pass + } + return json.dumps(state) def update(self): """ Execute a round. """ - pass + self.current_turn += 1 + if self.current_turn == self.max_turns: + self.status = GAME_STATUS.complete
Add some state related methods to Game
## Code Before: import datetime import map_loader class Game(object): def __init__(self, name=name, players=players, map=None): """ Initialize a new game. """ self.name = name, self.players = players, # List of player usernames self.status = 'Waiting', self.raw_state = self.generate_clean_state(), # JSON object self.created = datetime.datetime.now(), map = 'default' if map is None else map self.map = map.read_map_file(map) def generate_clean_state(self): """ Generates a blank game state JSON object. """ return '{}' def load_from_state(self): """ Load game attributes from raw game state. """ pass def serialize(self): """ Turn game into a serialized game state for storage. """ pass def update(self): """ Execute a round. """ pass ## Instruction: Add some state related methods to Game ## Code After: import datetime import json import map_loader class GAME_STATUS(object): """ Game status constants. """ lobby = 'waiting for players' waiting = 'waiting for moves' playing = 'playing' cancelled = 'cancelled' complete = 'complete' class Game(object): def __init__(self, name=name, players=players, map='default'): """ Initialize a new game. """ self.name = name, self.players = players, # List of player usernames self.status = GAME_STATUS.lobby, self.created = datetime.datetime.now(), # These attributes are persisted in the raw_state, not DB properties self.map = map.read_map_file(map) self.current_turn = 0 self.max_turns = 0 self.raw_state = self.serialize(), # JSON state (a DB property) def load_state_from_json(self): """ Load game attributes from raw game state. """ state = json.loads(self.raw_state) self.map = state['map'] self.current_turn, self.max_turns = state['turn'] def serialize_state(self): """ Turn game state into a serialized game state for storage. """ state = { 'map': self.map, 'turn': [self.current_turn, self.max_turns], } return json.dumps(state) def update(self): """ Execute a round. """ self.current_turn += 1 if self.current_turn == self.max_turns: self.status = GAME_STATUS.complete
import datetime + import json import map_loader + class GAME_STATUS(object): + """ Game status constants. """ + lobby = 'waiting for players' + waiting = 'waiting for moves' + playing = 'playing' + cancelled = 'cancelled' + complete = 'complete' + + class Game(object): - def __init__(self, name=name, players=players, map=None): ? ^^^ + def __init__(self, name=name, players=players, map='default'): ? ^^ ++++++ """ Initialize a new game. """ - self.name = name, self.players = players, # List of player usernames + self.status = GAME_STATUS.lobby, - self.status = 'Waiting', - self.raw_state = self.generate_clean_state(), # JSON object self.created = datetime.datetime.now(), - map = 'default' if map is None else map + # These attributes are persisted in the raw_state, not DB properties self.map = map.read_map_file(map) + self.current_turn = 0 + self.max_turns = 0 + self.raw_state = self.serialize(), # JSON state (a DB property) - def generate_clean_state(self): - """ Generates a blank game state JSON object. """ - return '{}' - def load_from_state(self): ? ^^^^ + def load_state_from_json(self): ? ++++++ + ^^ """ Load game attributes from raw game state. """ - pass + state = json.loads(self.raw_state) + self.map = state['map'] + self.current_turn, self.max_turns = state['turn'] - def serialize(self): + def serialize_state(self): ? ++++++ - """ Turn game into a serialized game state for storage. """ + """ Turn game state into a serialized game state for storage. """ ? ++++++ + state = { + 'map': self.map, + 'turn': [self.current_turn, self.max_turns], - pass ? ^^^^ + } ? ^ + return json.dumps(state) def update(self): """ Execute a round. """ - pass + self.current_turn += 1 + if self.current_turn == self.max_turns: + self.status = GAME_STATUS.complete
51f0cd392cab5737cb444b9eef8700fcd4713ea0
tests/recipes_test.py
tests/recipes_test.py
"""Runs simulation tests and lint on the recipes.""" import os import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def recipes_py(*args): subprocess.check_call([ os.path.join(ROOT_DIR, 'recipes', 'recipes.py'), '--use-bootstrap'] + list(args)) recipes_py('test', 'run') recipes_py('lint')
"""Runs simulation tests and lint on the recipes.""" import os import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def recipes_py(*args): subprocess.check_call([ os.path.join(ROOT_DIR, 'recipes', 'recipes.py') ] + list(args)) recipes_py('test', 'run') recipes_py('lint')
Remove deprecated (ignored) --use-bootstrap flag
[recipes] Remove deprecated (ignored) --use-bootstrap flag This has been ignored for a long time, and I'd like to get rid of it. R=jchinlee@chromium.org, ef4933a197ef7b4b3f55f1bec4942aead3637a2a@chromium.org Change-Id: I240d59cc10a2882041ac2c8abfeb1894237516a6 Reviewed-on: https://chromium-review.googlesource.com/c/1407425 Reviewed-by: Nodir Turakulov <ef4933a197ef7b4b3f55f1bec4942aead3637a2a@chromium.org> Commit-Queue: Robbie Iannucci <40f3d43a28ebae3cb819288542e1c84d73d962d5@chromium.org>
Python
bsd-3-clause
CoherentLabs/depot_tools,CoherentLabs/depot_tools
"""Runs simulation tests and lint on the recipes.""" import os import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def recipes_py(*args): subprocess.check_call([ - os.path.join(ROOT_DIR, 'recipes', 'recipes.py'), + os.path.join(ROOT_DIR, 'recipes', 'recipes.py') - '--use-bootstrap'] + list(args)) + ] + list(args)) recipes_py('test', 'run') recipes_py('lint')
Remove deprecated (ignored) --use-bootstrap flag
## Code Before: """Runs simulation tests and lint on the recipes.""" import os import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def recipes_py(*args): subprocess.check_call([ os.path.join(ROOT_DIR, 'recipes', 'recipes.py'), '--use-bootstrap'] + list(args)) recipes_py('test', 'run') recipes_py('lint') ## Instruction: Remove deprecated (ignored) --use-bootstrap flag ## Code After: """Runs simulation tests and lint on the recipes.""" import os import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def recipes_py(*args): subprocess.check_call([ os.path.join(ROOT_DIR, 'recipes', 'recipes.py') ] + list(args)) recipes_py('test', 'run') recipes_py('lint')
"""Runs simulation tests and lint on the recipes.""" import os import subprocess ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) def recipes_py(*args): subprocess.check_call([ - os.path.join(ROOT_DIR, 'recipes', 'recipes.py'), ? - + os.path.join(ROOT_DIR, 'recipes', 'recipes.py') - '--use-bootstrap'] + list(args)) + ] + list(args)) recipes_py('test', 'run') recipes_py('lint')
8ea3350c6944946b60732308c912dc240952237c
project/settings_production.py
project/settings_production.py
from .settings import * # Update SITE infos to use the common port 80 to publish the webapp SITE_FIXED = { 'name': "Recalbox Manager", 'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname 'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80 } # Production path to the Recalbox logs file RECALBOX_LOGFILE_PATH = "/recalbox/share/system/logs" # Use packaged assets ASSETS_PACKAGED = True
from .settings import * # Update SITE infos to use the common port 80 to publish the webapp SITE_FIXED = { 'name': "Recalbox Manager", 'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname 'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80 } # Production path to the Recalbox logs file RECALBOX_LOGFILE_PATH = "/root/recalbox.log" # Use packaged assets ASSETS_PACKAGED = True
Revert "Set the right recalbox.log path"
Revert "Set the right recalbox.log path"
Python
mit
recalbox/recalbox-manager,recalbox/recalbox-manager,recalbox/recalbox-manager,sveetch/recalbox-manager,sveetch/recalbox-manager,sveetch/recalbox-manager,sveetch/recalbox-manager,recalbox/recalbox-manager,sveetch/recalbox-manager,recalbox/recalbox-manager
from .settings import * # Update SITE infos to use the common port 80 to publish the webapp SITE_FIXED = { 'name': "Recalbox Manager", 'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname 'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80 } # Production path to the Recalbox logs file - RECALBOX_LOGFILE_PATH = "/recalbox/share/system/logs" + RECALBOX_LOGFILE_PATH = "/root/recalbox.log" # Use packaged assets ASSETS_PACKAGED = True
Revert "Set the right recalbox.log path"
## Code Before: from .settings import * # Update SITE infos to use the common port 80 to publish the webapp SITE_FIXED = { 'name': "Recalbox Manager", 'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname 'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80 } # Production path to the Recalbox logs file RECALBOX_LOGFILE_PATH = "/recalbox/share/system/logs" # Use packaged assets ASSETS_PACKAGED = True ## Instruction: Revert "Set the right recalbox.log path" ## Code After: from .settings import * # Update SITE infos to use the common port 80 to publish the webapp SITE_FIXED = { 'name': "Recalbox Manager", 'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname 'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80 } # Production path to the Recalbox logs file RECALBOX_LOGFILE_PATH = "/root/recalbox.log" # Use packaged assets ASSETS_PACKAGED = True
from .settings import * # Update SITE infos to use the common port 80 to publish the webapp SITE_FIXED = { 'name': "Recalbox Manager", 'ip': None, # If 'None' find the ip automatically. Use a string to define another ip/hostname 'port': None, # If 'None' no port is added to hostname, so the server have to be reachable from port 80 } # Production path to the Recalbox logs file - RECALBOX_LOGFILE_PATH = "/recalbox/share/system/logs" ? ^^^^^^^^^^^^^^ - + RECALBOX_LOGFILE_PATH = "/root/recalbox.log" ? +++++ ^ # Use packaged assets ASSETS_PACKAGED = True
76ec25090ece865d67f63c07c32aff7cebf105c1
ynr/apps/people/migrations/0034_get_birth_year.py
ynr/apps/people/migrations/0034_get_birth_year.py
from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.all(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.exclude(birth_date="").iterator(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
Improve performance of birth date data migration
Improve performance of birth date data migration
Python
agpl-3.0
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") - for person in Person.objects.all(): + for person in Person.objects.exclude(birth_date="").iterator(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
Improve performance of birth date data migration
## Code Before: from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.all(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ] ## Instruction: Improve performance of birth date data migration ## Code After: from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") for person in Person.objects.exclude(birth_date="").iterator(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
from django.db import migrations def get_birth_year(apps, schema_editor): Person = apps.get_model("people", "Person") - for person in Person.objects.all(): + for person in Person.objects.exclude(birth_date="").iterator(): birth_year = person.birth_date.split("-")[0] person.birth_date = birth_year person.save() class Migration(migrations.Migration): dependencies = [("people", "0033_auto_20210928_1007")] operations = [ migrations.RunPython(get_birth_year, migrations.RunPython.noop) ]
6d03be0c21ac21e9d5ed826c5fe4991fa01743cb
main.py
main.py
import Tool import time from Tieba import Tieba def main(): print("Local Time:", time.asctime(time.localtime())) # Read Cookies cookies = Tool.load_cookies_path(".") for cookie in cookies: # Login user = Tieba(cookie) # List Likes print(user.get_likes()) # Sign print(user.username, "Signing") for name in user.get_likes(): if user.sign_Wap(name): time.sleep(10) main()
import Tool import time from Tieba import Tieba def main(): print("Local Time:", time.asctime(time.localtime())) # Read Cookies cookies = Tool.load_cookies_path(".") for cookie in cookies: # Login user = Tieba(cookie) # List Likes print(user.get_likes()) # Sign print(user.username, "Signing") for name in user.get_likes(): if user.sign_Wap(name): time.sleep(10) main()
Change newline character to LF
Change newline character to LF
Python
apache-2.0
jiangzc/TiebaSign
- import Tool + import Tool - import time + import time - from Tieba import Tieba + from Tieba import Tieba - - - def main(): - print("Local Time:", time.asctime(time.localtime())) - # Read Cookies - cookies = Tool.load_cookies_path(".") - for cookie in cookies: - # Login - user = Tieba(cookie) - # List Likes - print(user.get_likes()) - # Sign - print(user.username, "Signing") - for name in user.get_likes(): - if user.sign_Wap(name): - time.sleep(10) - - main() + + def main(): + print("Local Time:", time.asctime(time.localtime())) + # Read Cookies + cookies = Tool.load_cookies_path(".") + for cookie in cookies: + # Login + user = Tieba(cookie) + # List Likes + print(user.get_likes()) + # Sign + print(user.username, "Signing") + for name in user.get_likes(): + if user.sign_Wap(name): + time.sleep(10) + + main() +
Change newline character to LF
## Code Before: import Tool import time from Tieba import Tieba def main(): print("Local Time:", time.asctime(time.localtime())) # Read Cookies cookies = Tool.load_cookies_path(".") for cookie in cookies: # Login user = Tieba(cookie) # List Likes print(user.get_likes()) # Sign print(user.username, "Signing") for name in user.get_likes(): if user.sign_Wap(name): time.sleep(10) main() ## Instruction: Change newline character to LF ## Code After: import Tool import time from Tieba import Tieba def main(): print("Local Time:", time.asctime(time.localtime())) # Read Cookies cookies = Tool.load_cookies_path(".") for cookie in cookies: # Login user = Tieba(cookie) # List Likes print(user.get_likes()) # Sign print(user.username, "Signing") for name in user.get_likes(): if user.sign_Wap(name): time.sleep(10) main()
import Tool import time from Tieba import Tieba def main(): print("Local Time:", time.asctime(time.localtime())) # Read Cookies cookies = Tool.load_cookies_path(".") for cookie in cookies: # Login user = Tieba(cookie) # List Likes print(user.get_likes()) # Sign print(user.username, "Signing") for name in user.get_likes(): if user.sign_Wap(name): time.sleep(10) main()
60870a3e471637d44da32f3aac74064e4ca60208
pyplot.py
pyplot.py
import argparse import argcomplete import plotter def parse_arguments(): """Argument Parser, providing available scripts""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title = 'plotter', description = 'available plotting scripts' ) module_subparser = {} for module_str in plotter.__all__: module = __import__('.'.join(('plotter', module_str)), fromlist=module_str) module_subparser[module_str] = subparsers.add_parser( module_str, parents=[module.get_parser(add_help=False)], help=module.__doc__.split('\n', 1)[0] ) configure = subparsers.add_parser('configure', help='configure this script.') argcomplete.autocomplete(parser) args = parser.parse_args() return args if __name__ == '__main__': args = parse_arguments() from plotter.plotn import main main(args)
import argparse import argcomplete import plotter def parse_arguments(): """Argument Parser, providing available scripts""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title = 'plotter', description = 'available plotting scripts', dest='used_subparser', ) module_subparser = {} for module_str in plotter.__all__: module = __import__('plotter.' + module_str, fromlist=module_str) module_subparser[module_str] = subparsers.add_parser( module_str, parents=[module.get_parser(add_help=False)], help=module.__doc__.split('\n', 1)[0] ) module_subparser[module_str].set_defaults(run=module.main) configure = subparsers.add_parser('configure', help='configure this script.') argcomplete.autocomplete(parser) args = parser.parse_args() return args if __name__ == '__main__': args = parse_arguments() args.run(args)
Use `set_defaults` of subparser to launch scripts
Use `set_defaults` of subparser to launch scripts
Python
mit
DerWeh/pyplot
import argparse import argcomplete import plotter def parse_arguments(): """Argument Parser, providing available scripts""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title = 'plotter', - description = 'available plotting scripts' + description = 'available plotting scripts', + dest='used_subparser', ) module_subparser = {} for module_str in plotter.__all__: - module = __import__('.'.join(('plotter', module_str)), fromlist=module_str) + module = __import__('plotter.' + module_str, fromlist=module_str) module_subparser[module_str] = subparsers.add_parser( module_str, parents=[module.get_parser(add_help=False)], help=module.__doc__.split('\n', 1)[0] ) + module_subparser[module_str].set_defaults(run=module.main) configure = subparsers.add_parser('configure', help='configure this script.') argcomplete.autocomplete(parser) args = parser.parse_args() return args if __name__ == '__main__': args = parse_arguments() - from plotter.plotn import main - main(args) + args.run(args)
Use `set_defaults` of subparser to launch scripts
## Code Before: import argparse import argcomplete import plotter def parse_arguments(): """Argument Parser, providing available scripts""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title = 'plotter', description = 'available plotting scripts' ) module_subparser = {} for module_str in plotter.__all__: module = __import__('.'.join(('plotter', module_str)), fromlist=module_str) module_subparser[module_str] = subparsers.add_parser( module_str, parents=[module.get_parser(add_help=False)], help=module.__doc__.split('\n', 1)[0] ) configure = subparsers.add_parser('configure', help='configure this script.') argcomplete.autocomplete(parser) args = parser.parse_args() return args if __name__ == '__main__': args = parse_arguments() from plotter.plotn import main main(args) ## Instruction: Use `set_defaults` of subparser to launch scripts ## Code After: import argparse import argcomplete import plotter def parse_arguments(): """Argument Parser, providing available scripts""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title = 'plotter', description = 'available plotting scripts', dest='used_subparser', ) module_subparser = {} for module_str in plotter.__all__: module = __import__('plotter.' + module_str, fromlist=module_str) module_subparser[module_str] = subparsers.add_parser( module_str, parents=[module.get_parser(add_help=False)], help=module.__doc__.split('\n', 1)[0] ) module_subparser[module_str].set_defaults(run=module.main) configure = subparsers.add_parser('configure', help='configure this script.') argcomplete.autocomplete(parser) args = parser.parse_args() return args if __name__ == '__main__': args = parse_arguments() args.run(args)
import argparse import argcomplete import plotter def parse_arguments(): """Argument Parser, providing available scripts""" parser = argparse.ArgumentParser() subparsers = parser.add_subparsers( title = 'plotter', - description = 'available plotting scripts' + description = 'available plotting scripts', ? + + dest='used_subparser', ) module_subparser = {} for module_str in plotter.__all__: - module = __import__('.'.join(('plotter', module_str)), fromlist=module_str) ? ---------- ^ -- + module = __import__('plotter.' + module_str, fromlist=module_str) ? + ^^ module_subparser[module_str] = subparsers.add_parser( module_str, parents=[module.get_parser(add_help=False)], help=module.__doc__.split('\n', 1)[0] ) + module_subparser[module_str].set_defaults(run=module.main) configure = subparsers.add_parser('configure', help='configure this script.') argcomplete.autocomplete(parser) args = parser.parse_args() return args if __name__ == '__main__': args = parse_arguments() - from plotter.plotn import main - main(args) ? - ^ + args.run(args) ? ^^^^^^
b123001ea0d4fb475184727c39eafd5b46cc0964
shopit_app/urls.py
shopit_app/urls.py
from django.conf import settings from django.conf.urls import include, patterns, url from rest_framework_nested import routers from shopit_app.views import IndexView from authentication_app.views import AccountViewSet, LoginView router = routers.SimpleRouter() router.register(r'accounts', AccountViewSet) urlpatterns = patterns('', # API endpoints url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), url('^.*$', IndexView.as_view(), name='index'), )
from django.conf import settings from django.conf.urls import include, patterns, url from rest_framework_nested import routers from shopit_app.views import IndexView from authentication_app.views import AccountViewSet, LoginView, LogoutView router = routers.SimpleRouter() router.register(r'accounts', AccountViewSet) urlpatterns = patterns('', # API endpoints url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/logout/$', LogoutView.as_view(), name='logout'), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), url('^.*$', IndexView.as_view(), name='index'), )
Add the endpoint for the logout.
Add the endpoint for the logout.
Python
mit
mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app,mvpgomes/shopit-app
from django.conf import settings from django.conf.urls import include, patterns, url from rest_framework_nested import routers from shopit_app.views import IndexView - from authentication_app.views import AccountViewSet, LoginView + from authentication_app.views import AccountViewSet, LoginView, LogoutView router = routers.SimpleRouter() router.register(r'accounts', AccountViewSet) urlpatterns = patterns('', # API endpoints url(r'^api/v1/', include(router.urls)), + url(r'^api/v1/auth/logout/$', LogoutView.as_view(), name='logout'), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), url('^.*$', IndexView.as_view(), name='index'), )
Add the endpoint for the logout.
## Code Before: from django.conf import settings from django.conf.urls import include, patterns, url from rest_framework_nested import routers from shopit_app.views import IndexView from authentication_app.views import AccountViewSet, LoginView router = routers.SimpleRouter() router.register(r'accounts', AccountViewSet) urlpatterns = patterns('', # API endpoints url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), url('^.*$', IndexView.as_view(), name='index'), ) ## Instruction: Add the endpoint for the logout. ## Code After: from django.conf import settings from django.conf.urls import include, patterns, url from rest_framework_nested import routers from shopit_app.views import IndexView from authentication_app.views import AccountViewSet, LoginView, LogoutView router = routers.SimpleRouter() router.register(r'accounts', AccountViewSet) urlpatterns = patterns('', # API endpoints url(r'^api/v1/', include(router.urls)), url(r'^api/v1/auth/logout/$', LogoutView.as_view(), name='logout'), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), url('^.*$', IndexView.as_view(), name='index'), )
from django.conf import settings from django.conf.urls import include, patterns, url from rest_framework_nested import routers from shopit_app.views import IndexView - from authentication_app.views import AccountViewSet, LoginView + from authentication_app.views import AccountViewSet, LoginView, LogoutView ? ++++++++++++ router = routers.SimpleRouter() router.register(r'accounts', AccountViewSet) urlpatterns = patterns('', # API endpoints url(r'^api/v1/', include(router.urls)), + url(r'^api/v1/auth/logout/$', LogoutView.as_view(), name='logout'), url(r'^api/v1/auth/login/$', LoginView.as_view(), name='login'), url('^.*$', IndexView.as_view(), name='index'), )
ccafafbd51422979ed93ed197135bf03b7d0be81
opps/images/__init__.py
opps/images/__init__.py
from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image') settings.INSTALLED_APPS += ('thumbor',)
from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image')
Remove thumbor use on init image, thumbor not django application
Remove thumbor use on init image, thumbor not django application
Python
mit
YACOWS/opps,opps/opps,jeanmask/opps,jeanmask/opps,jeanmask/opps,opps/opps,opps/opps,YACOWS/opps,YACOWS/opps,YACOWS/opps,williamroot/opps,williamroot/opps,williamroot/opps,williamroot/opps,jeanmask/opps,opps/opps
from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image') - settings.INSTALLED_APPS += ('thumbor',)
Remove thumbor use on init image, thumbor not django application
## Code Before: from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image') settings.INSTALLED_APPS += ('thumbor',) ## Instruction: Remove thumbor use on init image, thumbor not django application ## Code After: from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image')
from django.utils.translation import ugettext_lazy as _ from django.conf import settings trans_app_label = _('Image') - settings.INSTALLED_APPS += ('thumbor',)
fa862fdd6be62eb4e79d0dfcef60471aecd46981
rest_framework_push_notifications/tests/test_serializers.py
rest_framework_push_notifications/tests/test_serializers.py
from django.test import TestCase from .. import serializers class TestAPNSDeviceSerializer(TestCase): def test_fields(self): expected = {'url', 'name', 'device_id', 'registration_id', 'active'} fields = serializers.APNSDevice().fields.keys() self.assertEqual(expected, set(fields)) def test_registration_id(self): serializer = serializers.APNSDevice(data={'registration_id': 'test_id'}) self.assertTrue(serializer.is_valid()) def test_registration_id_required(self): serializer = serializers.APNSDevice(data={}) self.assertFalse(serializer.is_valid())
from django.test import TestCase from .. import serializers class TestAPNSDeviceSerializer(TestCase): def test_fields(self): expected = {'url', 'name', 'device_id', 'registration_id', 'active'} fields = serializers.APNSDevice().fields.keys() self.assertEqual(expected, set(fields)) def test_registration_id(self): serializer = serializers.APNSDevice(data={'registration_id': 'test_id'}) self.assertTrue(serializer.is_valid()) def test_registration_id_required(self): serializer = serializers.APNSDevice(data={}) self.assertFalse(serializer.is_valid()) self.assertIn('registration_id', serializer.errors)
Check for registration_id in serializer errors
Check for registration_id in serializer errors
Python
bsd-2-clause
incuna/rest-framework-push-notifications
from django.test import TestCase from .. import serializers class TestAPNSDeviceSerializer(TestCase): def test_fields(self): expected = {'url', 'name', 'device_id', 'registration_id', 'active'} fields = serializers.APNSDevice().fields.keys() self.assertEqual(expected, set(fields)) def test_registration_id(self): serializer = serializers.APNSDevice(data={'registration_id': 'test_id'}) self.assertTrue(serializer.is_valid()) def test_registration_id_required(self): serializer = serializers.APNSDevice(data={}) self.assertFalse(serializer.is_valid()) + self.assertIn('registration_id', serializer.errors)
Check for registration_id in serializer errors
## Code Before: from django.test import TestCase from .. import serializers class TestAPNSDeviceSerializer(TestCase): def test_fields(self): expected = {'url', 'name', 'device_id', 'registration_id', 'active'} fields = serializers.APNSDevice().fields.keys() self.assertEqual(expected, set(fields)) def test_registration_id(self): serializer = serializers.APNSDevice(data={'registration_id': 'test_id'}) self.assertTrue(serializer.is_valid()) def test_registration_id_required(self): serializer = serializers.APNSDevice(data={}) self.assertFalse(serializer.is_valid()) ## Instruction: Check for registration_id in serializer errors ## Code After: from django.test import TestCase from .. import serializers class TestAPNSDeviceSerializer(TestCase): def test_fields(self): expected = {'url', 'name', 'device_id', 'registration_id', 'active'} fields = serializers.APNSDevice().fields.keys() self.assertEqual(expected, set(fields)) def test_registration_id(self): serializer = serializers.APNSDevice(data={'registration_id': 'test_id'}) self.assertTrue(serializer.is_valid()) def test_registration_id_required(self): serializer = serializers.APNSDevice(data={}) self.assertFalse(serializer.is_valid()) self.assertIn('registration_id', serializer.errors)
from django.test import TestCase from .. import serializers class TestAPNSDeviceSerializer(TestCase): def test_fields(self): expected = {'url', 'name', 'device_id', 'registration_id', 'active'} fields = serializers.APNSDevice().fields.keys() self.assertEqual(expected, set(fields)) def test_registration_id(self): serializer = serializers.APNSDevice(data={'registration_id': 'test_id'}) self.assertTrue(serializer.is_valid()) def test_registration_id_required(self): serializer = serializers.APNSDevice(data={}) self.assertFalse(serializer.is_valid()) + self.assertIn('registration_id', serializer.errors)
75a4733d059f6aad758f93a9c6e4878093afd184
test-messages.py
test-messages.py
import random import time import mosquitto timestamp = int(time.time()) broker = '127.0.0.1' port = 1883 element = 'home' areas = ['front', 'back', 'kitchen', 'basement', 'living'] entrances = ['door', 'window'] states = ['true', 'false'] print 'Messages are published on topic %s/#... -> CTRL + C to shutdown' \ % element while True: area = random.choice(areas) if (area in ['basement', 'living']): topic = element + '/' + area + '/temp' message = random.randrange(0, 30, 1) else: topic = element + '/' + area + '/' + random.choice(entrances) message = random.choice(states) client = mosquitto.Mosquitto("mqtt-panel-test") client.connect(broker) client.publish(topic, message) time.sleep(2)
import random import time import paho.mqtt.client as mqtt timestamp = int(time.time()) broker = '127.0.0.1' port = 1883 element = 'home' areas = ['front', 'back', 'kitchen', 'basement', 'living'] entrances = ['door', 'window'] states = ['true', 'false'] print('Messages are published on topic %s/#... -> CTRL + C to shutdown' \ % element) while True: area = random.choice(areas) if (area in ['basement', 'living']): topic = element + '/' + area + '/temp' message = random.randrange(0, 30, 1) else: topic = element + '/' + area + '/' + random.choice(entrances) message = random.choice(states) mqttclient = mqtt.Client("mqtt-panel-test") mqttclient.connect(broker, port=int(port)) mqttclient.publish(topic, message) time.sleep(2)
Switch to paho-mqtt and make ready for py3
Switch to paho-mqtt and make ready for py3
Python
mit
fabaff/mqtt-panel,fabaff/mqtt-panel,fabaff/mqtt-panel
import random import time - import mosquitto + import paho.mqtt.client as mqtt timestamp = int(time.time()) broker = '127.0.0.1' port = 1883 element = 'home' areas = ['front', 'back', 'kitchen', 'basement', 'living'] entrances = ['door', 'window'] states = ['true', 'false'] - print 'Messages are published on topic %s/#... -> CTRL + C to shutdown' \ + print('Messages are published on topic %s/#... -> CTRL + C to shutdown' \ - % element + % element) while True: area = random.choice(areas) if (area in ['basement', 'living']): topic = element + '/' + area + '/temp' message = random.randrange(0, 30, 1) else: topic = element + '/' + area + '/' + random.choice(entrances) message = random.choice(states) - client = mosquitto.Mosquitto("mqtt-panel-test") + mqttclient = mqtt.Client("mqtt-panel-test") - client.connect(broker) + mqttclient.connect(broker, port=int(port)) - client.publish(topic, message) + mqttclient.publish(topic, message) time.sleep(2) +
Switch to paho-mqtt and make ready for py3
## Code Before: import random import time import mosquitto timestamp = int(time.time()) broker = '127.0.0.1' port = 1883 element = 'home' areas = ['front', 'back', 'kitchen', 'basement', 'living'] entrances = ['door', 'window'] states = ['true', 'false'] print 'Messages are published on topic %s/#... -> CTRL + C to shutdown' \ % element while True: area = random.choice(areas) if (area in ['basement', 'living']): topic = element + '/' + area + '/temp' message = random.randrange(0, 30, 1) else: topic = element + '/' + area + '/' + random.choice(entrances) message = random.choice(states) client = mosquitto.Mosquitto("mqtt-panel-test") client.connect(broker) client.publish(topic, message) time.sleep(2) ## Instruction: Switch to paho-mqtt and make ready for py3 ## Code After: import random import time import paho.mqtt.client as mqtt timestamp = int(time.time()) broker = '127.0.0.1' port = 1883 element = 'home' areas = ['front', 'back', 'kitchen', 'basement', 'living'] entrances = ['door', 'window'] states = ['true', 'false'] print('Messages are published on topic %s/#... -> CTRL + C to shutdown' \ % element) while True: area = random.choice(areas) if (area in ['basement', 'living']): topic = element + '/' + area + '/temp' message = random.randrange(0, 30, 1) else: topic = element + '/' + area + '/' + random.choice(entrances) message = random.choice(states) mqttclient = mqtt.Client("mqtt-panel-test") mqttclient.connect(broker, port=int(port)) mqttclient.publish(topic, message) time.sleep(2)
import random import time - import mosquitto + import paho.mqtt.client as mqtt timestamp = int(time.time()) broker = '127.0.0.1' port = 1883 element = 'home' areas = ['front', 'back', 'kitchen', 'basement', 'living'] entrances = ['door', 'window'] states = ['true', 'false'] - print 'Messages are published on topic %s/#... -> CTRL + C to shutdown' \ ? ^ + print('Messages are published on topic %s/#... -> CTRL + C to shutdown' \ ? ^ - % element + % element) ? + while True: area = random.choice(areas) if (area in ['basement', 'living']): topic = element + '/' + area + '/temp' message = random.randrange(0, 30, 1) else: topic = element + '/' + area + '/' + random.choice(entrances) message = random.choice(states) - client = mosquitto.Mosquitto("mqtt-panel-test") ? -- -- - ^^^^^ -- + mqttclient = mqtt.Client("mqtt-panel-test") ? ++++ ^^ ++ - client.connect(broker) + mqttclient.connect(broker, port=int(port)) - client.publish(topic, message) + mqttclient.publish(topic, message) ? ++++ time.sleep(2) +
0f7ebf148ab3f88fc983e60f689a9c740ae64e47
outgoing_mail.py
outgoing_mail.py
from google.appengine.api import mail from google.appengine.ext.webapp import template import os from_address = 'admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com' def send(to, template_name, values): path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name) message = mail.EmailMessage(sender=from_address, to=to) message.subject = template.render(path + '.subject', values) message.body = template.render(path + '.body', values) message.send()
from google.appengine.api import mail from google.appengine.ext.webapp import template import os from_address = 'EventBot <admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com>' def send(to, template_name, values): path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name) message = mail.EmailMessage(sender=from_address, to=to) message.subject = template.render(path + '.subject', values) message.body = template.render(path + '.body', values) message.send()
Add display name for from address
Add display name for from address
Python
mit
eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot
from google.appengine.api import mail from google.appengine.ext.webapp import template import os - from_address = 'admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com' + from_address = 'EventBot <admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com>' def send(to, template_name, values): path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name) message = mail.EmailMessage(sender=from_address, to=to) message.subject = template.render(path + '.subject', values) message.body = template.render(path + '.body', values) message.send()
Add display name for from address
## Code Before: from google.appengine.api import mail from google.appengine.ext.webapp import template import os from_address = 'admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com' def send(to, template_name, values): path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name) message = mail.EmailMessage(sender=from_address, to=to) message.subject = template.render(path + '.subject', values) message.body = template.render(path + '.body', values) message.send() ## Instruction: Add display name for from address ## Code After: from google.appengine.api import mail from google.appengine.ext.webapp import template import os from_address = 'EventBot <admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com>' def send(to, template_name, values): path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name) message = mail.EmailMessage(sender=from_address, to=to) message.subject = template.render(path + '.subject', values) message.body = template.render(path + '.body', values) message.send()
from google.appengine.api import mail from google.appengine.ext.webapp import template import os - from_address = 'admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com' + from_address = 'EventBot <admin@' + os.environ['APPLICATION_ID'] + '.appspotmail.com>' ? ++++++++++ + def send(to, template_name, values): path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name) message = mail.EmailMessage(sender=from_address, to=to) message.subject = template.render(path + '.subject', values) message.body = template.render(path + '.body', values) message.send()
345056a7a6a801013cdc340f0f9cd8b4f5d48173
convert-bookmarks.py
convert-bookmarks.py
from argparse import ArgumentParser from bs4 import BeautifulSoup from datetime import datetime, timezone from bson import json_util import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # add date secs = link.get('add_date') date = datetime.fromtimestamp(int(secs), tz=timezone.utc) bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' # make json if args.mongo: print(json_util.dumps(bookmark, sort_keys=False, indent=4)) else: print(json.dumps(bookmark, sort_keys=False, indent=4))
from argparse import ArgumentParser from bs4 import BeautifulSoup import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' print(json.dumps(bookmark, sort_keys=False, indent=4))
Remove bson, datetime, and mongo
Remove bson, datetime, and mongo Current BSON fails to work datetime can't be serialized by json_util mongodb is not needed; just use JSON
Python
mit
jhh/netscape-bookmark-converter
from argparse import ArgumentParser from bs4 import BeautifulSoup - from datetime import datetime, timezone - from bson import json_util import json - parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') - parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', - help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] - # add date - secs = link.get('add_date') - date = datetime.fromtimestamp(int(secs), tz=timezone.utc) - bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ - else '' + else '' - # make json - if args.mongo: - print(json_util.dumps(bookmark, sort_keys=False, indent=4)) - else: - print(json.dumps(bookmark, sort_keys=False, indent=4)) + print(json.dumps(bookmark, sort_keys=False, indent=4))
Remove bson, datetime, and mongo
## Code Before: from argparse import ArgumentParser from bs4 import BeautifulSoup from datetime import datetime, timezone from bson import json_util import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # add date secs = link.get('add_date') date = datetime.fromtimestamp(int(secs), tz=timezone.utc) bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' # make json if args.mongo: print(json_util.dumps(bookmark, sort_keys=False, indent=4)) else: print(json.dumps(bookmark, sort_keys=False, indent=4)) ## Instruction: Remove bson, datetime, and mongo ## Code After: from argparse import ArgumentParser from bs4 import BeautifulSoup import json parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ else '' print(json.dumps(bookmark, sort_keys=False, indent=4))
from argparse import ArgumentParser from bs4 import BeautifulSoup - from datetime import datetime, timezone - from bson import json_util import json - parser = ArgumentParser(description='Convert Netscape bookmarks to JSON') parser.add_argument(dest='filenames', metavar='filename', nargs='+') parser.add_argument('-t', '--tag', metavar='tag', dest='tags', action='append', help='add tag to bookmarks, repeat \ for multiple tags') - parser.add_argument('-m', '--mongodb', action='store_true', dest='mongo', - help='output in mongodb import format') args = parser.parse_args() for filename in args.filenames: soup = BeautifulSoup(open(filename, encoding='utf8'), "html5lib") for link in soup.find_all('a'): bookmark = {} # url and title bookmark['url'] = link.get('href') bookmark['title'] = link.string.strip() if link.string\ else bookmark['url'] - # add date - secs = link.get('add_date') - date = datetime.fromtimestamp(int(secs), tz=timezone.utc) - bookmark['add_date'] = date # tags tags = link.get('tags') bookmark['tags'] = tags.split(',') if tags else [] if args.tags: bookmark['tags'] += args.tags # comment sibling = link.parent.next_sibling bookmark['comment'] = \ sibling.string.strip() if sibling and sibling.name == 'dd' \ - else '' ? ---- + else '' - # make json - if args.mongo: - print(json_util.dumps(bookmark, sort_keys=False, indent=4)) - else: - print(json.dumps(bookmark, sort_keys=False, indent=4)) ? ---- + print(json.dumps(bookmark, sort_keys=False, indent=4))
aa359661c31df53885e19f5acb2e0171b6f87398
recipe_scrapers/innit.py
recipe_scrapers/innit.py
from ._abstract import AbstractScraper """ Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com """ class Innit(AbstractScraper): @classmethod def host(self, domain="com"): return f"innit.{domain}"
from ._abstract import AbstractScraper """ Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com """ class Innit(AbstractScraper): @classmethod def host(self, domain="com"): return f"innit.{domain}" def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
Add wrapper methods for clarity.
Add wrapper methods for clarity.
Python
mit
hhursev/recipe-scraper
from ._abstract import AbstractScraper """ Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com """ class Innit(AbstractScraper): @classmethod def host(self, domain="com"): return f"innit.{domain}" + def title(self): + return self.schema.title() + + def total_time(self): + return self.schema.total_time() + + def yields(self): + return self.schema.yields() + + def image(self): + return self.schema.image() + + def ingredients(self): + return self.schema.ingredients() + + def instructions(self): + return self.schema.instructions() + + def ratings(self): + return self.schema.ratings()
Add wrapper methods for clarity.
## Code Before: from ._abstract import AbstractScraper """ Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com """ class Innit(AbstractScraper): @classmethod def host(self, domain="com"): return f"innit.{domain}" ## Instruction: Add wrapper methods for clarity. ## Code After: from ._abstract import AbstractScraper """ Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com """ class Innit(AbstractScraper): @classmethod def host(self, domain="com"): return f"innit.{domain}" def title(self): return self.schema.title() def total_time(self): return self.schema.total_time() def yields(self): return self.schema.yields() def image(self): return self.schema.image() def ingredients(self): return self.schema.ingredients() def instructions(self): return self.schema.instructions() def ratings(self): return self.schema.ratings()
from ._abstract import AbstractScraper """ Note that innit hosts recipes for several companies. I found it while looking at centralmarket.com """ class Innit(AbstractScraper): @classmethod def host(self, domain="com"): return f"innit.{domain}" + + def title(self): + return self.schema.title() + + def total_time(self): + return self.schema.total_time() + + def yields(self): + return self.schema.yields() + + def image(self): + return self.schema.image() + + def ingredients(self): + return self.schema.ingredients() + + def instructions(self): + return self.schema.instructions() + + def ratings(self): + return self.schema.ratings()
c272e73c0d3112425e0ba25c58448f7c1d492d11
api/src/SearchApi.py
api/src/SearchApi.py
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5)
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) searchResponse = json.dumps(search_request.execute(), separators=[',',':']) searchData = json.loads(searchResponse) filteredData = filterData(searchData) filteredResponse = json.dumps(filteredData) return filteredResponse def filterData(original): filtered = [] for item in original["items"]: temp = {} temp["title"] = item["snippet"]["title"] temp["descriptipn"] = item["snippet"]["description"] temp["uploader"] = item["snippet"]["channelTitle"] temp["id"] = item["id"]["videoId"] filtered.append(temp) return filtered
Update search api filter out unwanted information
Update search api filter out unwanted information
Python
mit
jghibiki/mopey,jghibiki/mopey,jghibiki/mopey,jghibiki/mopey,jghibiki/mopey
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) - search_response = json.dumps(search_request.execute(), separators=[',',':']) + searchResponse = json.dumps(search_request.execute(), separators=[',',':']) + searchData = json.loads(searchResponse) - return search_response + filteredData = filterData(searchData) + filteredResponse = json.dumps(filteredData) - if __name__ == "__main__": - print youtubeSearch("paramore", 5) + return filteredResponse + + def filterData(original): + filtered = [] + + for item in original["items"]: + temp = {} + temp["title"] = item["snippet"]["title"] + temp["descriptipn"] = item["snippet"]["description"] + temp["uploader"] = item["snippet"]["channelTitle"] + temp["id"] = item["id"]["videoId"] + filtered.append(temp) + return filtered + +
Update search api filter out unwanted information
## Code Before: from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) search_response = json.dumps(search_request.execute(), separators=[',',':']) return search_response if __name__ == "__main__": print youtubeSearch("paramore", 5) ## Instruction: Update search api filter out unwanted information ## Code After: from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) searchResponse = json.dumps(search_request.execute(), separators=[',',':']) searchData = json.loads(searchResponse) filteredData = filterData(searchData) filteredResponse = json.dumps(filteredData) return filteredResponse def filterData(original): filtered = [] for item in original["items"]: temp = {} temp["title"] = item["snippet"]["title"] temp["descriptipn"] = item["snippet"]["description"] temp["uploader"] = item["snippet"]["channelTitle"] temp["id"] = item["id"]["videoId"] filtered.append(temp) return filtered
from apiclient.discovery import build import json # Set DEVELOPER_KEY to the API key value from the APIs & auth > Registered apps # tab of # https://cloud.google.com/console # Please ensure that you have enabled the YouTube Data API for your project. devKeyFile = open("search-api.key", "rb") DEVELOPER_KEY = devKeyFile.read() devKeyFile.close() YOUTUBE_API_SERVICE_NAME = "youtube" YOUTUBE_API_VERSION = "v3" def youtubeSearch(query, maxResults=50): youtube = build(YOUTUBE_API_SERVICE_NAME, YOUTUBE_API_VERSION, developerKey=DEVELOPER_KEY) # Call the search.list method to retrieve results matching the specified # query term. search_request = youtube.search().list( part="id,snippet", q=query, type="video", maxResults=maxResults, order="relevance" ) - search_response = json.dumps(search_request.execute(), separators=[',',':']) ? ^^ + searchResponse = json.dumps(search_request.execute(), separators=[',',':']) ? ^ + searchData = json.loads(searchResponse) - return search_response + filteredData = filterData(searchData) + filteredResponse = json.dumps(filteredData) + return filteredResponse - if __name__ == "__main__": - print youtubeSearch("paramore", 5) + def filterData(original): + filtered = [] + + for item in original["items"]: + temp = {} + temp["title"] = item["snippet"]["title"] + temp["descriptipn"] = item["snippet"]["description"] + temp["uploader"] = item["snippet"]["channelTitle"] + temp["id"] = item["id"]["videoId"] + filtered.append(temp) + + + return filtered +
c5c92c852d27fb370e4efdc631caf38ebcfdd8ba
tests/GIR/test_query_select.py
tests/GIR/test_query_select.py
import sys import struct import unittest from test_000_config import TestConfig from test_001_connection import TestConnection from gi.repository import Midgard from gi.repository import GObject class TestQuerySelect(unittest.TestCase): def testSelectAdminPerson(self): mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "midgard_person") qs = Midgard.QuerySelect(connection = mgd, storage = st) qs.execute() objects = qs.list_objects() # Expect one person only self.assertEqual(len(objects), 1); def testSelectInvalidType(self): mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "NotExists") qs = Midgard.QuerySelect(connection = mgd, storage = st) # Check if we have GError self.assertRaises(GObject.GError, qs.execute) # Check if we have correct domain try: qs.execute() except GObject.GError as e: self.assertEqual(e.domain, "midgard-validation-error-quark") self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID) def testOrder(self): mgd = TestConnection.openConnection() self.assertEqual("ok", "NOT IMPLEMENTED") def testInheritance(self): mgd = TestConnection.openConnection() qs = Midgard.QuerySelect(connection = mgd) self.assertIsInstance(qs, Midgard.QueryExecutor) if __name__ == "__main__": unittest.main()
import sys import struct import unittest from test_000_config import TestConfig from test_001_connection import TestConnection from gi.repository import Midgard from gi.repository import GObject class TestQuerySelect(unittest.TestCase): def setUp(self): self.mgd = TestConnection.openConnection() def tearDown(self): return def testSelectAdminPerson(self): st = Midgard.QueryStorage(dbclass = "midgard_person") qs = Midgard.QuerySelect(connection = self.mgd, storage = st) qs.execute() objects = qs.list_objects() # Expect one person only self.assertEqual(len(objects), 1); def testSelectInvalidType(self): st = Midgard.QueryStorage(dbclass = "NotExists") qs = Midgard.QuerySelect(connection = self.mgd, storage = st) # Check if we have GError self.assertRaises(GObject.GError, qs.execute) # Check if we have correct domain try: qs.execute() except GObject.GError as e: self.assertEqual(e.domain, "midgard-validation-error-quark") self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID) def testOrder(self): self.assertEqual("ok", "NOT IMPLEMENTED") def testInheritance(self): qs = Midgard.QuerySelect(connection = self.mgd) self.assertIsInstance(qs, Midgard.QueryExecutor) if __name__ == "__main__": unittest.main()
Set MidgardConnection in setUp method
Set MidgardConnection in setUp method
Python
lgpl-2.1
piotras/midgard-core,piotras/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,midgardproject/midgard-core,piotras/midgard-core,piotras/midgard-core
import sys import struct import unittest from test_000_config import TestConfig from test_001_connection import TestConnection from gi.repository import Midgard from gi.repository import GObject class TestQuerySelect(unittest.TestCase): + def setUp(self): + self.mgd = TestConnection.openConnection() + + def tearDown(self): + return + def testSelectAdminPerson(self): - mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "midgard_person") - qs = Midgard.QuerySelect(connection = mgd, storage = st) + qs = Midgard.QuerySelect(connection = self.mgd, storage = st) qs.execute() objects = qs.list_objects() # Expect one person only self.assertEqual(len(objects), 1); - def testSelectInvalidType(self): + def testSelectInvalidType(self): - mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "NotExists") - qs = Midgard.QuerySelect(connection = mgd, storage = st) + qs = Midgard.QuerySelect(connection = self.mgd, storage = st) # Check if we have GError self.assertRaises(GObject.GError, qs.execute) # Check if we have correct domain try: qs.execute() except GObject.GError as e: self.assertEqual(e.domain, "midgard-validation-error-quark") self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID) def testOrder(self): - mgd = TestConnection.openConnection() self.assertEqual("ok", "NOT IMPLEMENTED") def testInheritance(self): - mgd = TestConnection.openConnection() - qs = Midgard.QuerySelect(connection = mgd) + qs = Midgard.QuerySelect(connection = self.mgd) self.assertIsInstance(qs, Midgard.QueryExecutor) if __name__ == "__main__": unittest.main()
Set MidgardConnection in setUp method
## Code Before: import sys import struct import unittest from test_000_config import TestConfig from test_001_connection import TestConnection from gi.repository import Midgard from gi.repository import GObject class TestQuerySelect(unittest.TestCase): def testSelectAdminPerson(self): mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "midgard_person") qs = Midgard.QuerySelect(connection = mgd, storage = st) qs.execute() objects = qs.list_objects() # Expect one person only self.assertEqual(len(objects), 1); def testSelectInvalidType(self): mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "NotExists") qs = Midgard.QuerySelect(connection = mgd, storage = st) # Check if we have GError self.assertRaises(GObject.GError, qs.execute) # Check if we have correct domain try: qs.execute() except GObject.GError as e: self.assertEqual(e.domain, "midgard-validation-error-quark") self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID) def testOrder(self): mgd = TestConnection.openConnection() self.assertEqual("ok", "NOT IMPLEMENTED") def testInheritance(self): mgd = TestConnection.openConnection() qs = Midgard.QuerySelect(connection = mgd) self.assertIsInstance(qs, Midgard.QueryExecutor) if __name__ == "__main__": unittest.main() ## Instruction: Set MidgardConnection in setUp method ## Code After: import sys import struct import unittest from test_000_config import TestConfig from test_001_connection import TestConnection from gi.repository import Midgard from gi.repository import GObject class TestQuerySelect(unittest.TestCase): def setUp(self): self.mgd = TestConnection.openConnection() def tearDown(self): return def testSelectAdminPerson(self): st = Midgard.QueryStorage(dbclass = "midgard_person") qs = Midgard.QuerySelect(connection = self.mgd, storage = st) qs.execute() objects = qs.list_objects() # Expect one person only self.assertEqual(len(objects), 1); def testSelectInvalidType(self): st = Midgard.QueryStorage(dbclass = "NotExists") qs = Midgard.QuerySelect(connection = self.mgd, storage = st) # Check if we have GError self.assertRaises(GObject.GError, qs.execute) # Check if we have correct domain try: qs.execute() except GObject.GError as e: self.assertEqual(e.domain, "midgard-validation-error-quark") self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID) def testOrder(self): self.assertEqual("ok", "NOT IMPLEMENTED") def testInheritance(self): qs = Midgard.QuerySelect(connection = self.mgd) self.assertIsInstance(qs, Midgard.QueryExecutor) if __name__ == "__main__": unittest.main()
import sys import struct import unittest from test_000_config import TestConfig from test_001_connection import TestConnection from gi.repository import Midgard from gi.repository import GObject class TestQuerySelect(unittest.TestCase): + def setUp(self): + self.mgd = TestConnection.openConnection() + + def tearDown(self): + return + def testSelectAdminPerson(self): - mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "midgard_person") - qs = Midgard.QuerySelect(connection = mgd, storage = st) + qs = Midgard.QuerySelect(connection = self.mgd, storage = st) ? +++++ qs.execute() objects = qs.list_objects() # Expect one person only self.assertEqual(len(objects), 1); - def testSelectInvalidType(self): + def testSelectInvalidType(self): ? + - mgd = TestConnection.openConnection() st = Midgard.QueryStorage(dbclass = "NotExists") - qs = Midgard.QuerySelect(connection = mgd, storage = st) + qs = Midgard.QuerySelect(connection = self.mgd, storage = st) ? +++++ # Check if we have GError self.assertRaises(GObject.GError, qs.execute) # Check if we have correct domain try: qs.execute() except GObject.GError as e: self.assertEqual(e.domain, "midgard-validation-error-quark") self.assertEqual(e.code, Midgard.ValidationError.TYPE_INVALID) def testOrder(self): - mgd = TestConnection.openConnection() self.assertEqual("ok", "NOT IMPLEMENTED") def testInheritance(self): - mgd = TestConnection.openConnection() - qs = Midgard.QuerySelect(connection = mgd) + qs = Midgard.QuerySelect(connection = self.mgd) ? +++++ self.assertIsInstance(qs, Midgard.QueryExecutor) if __name__ == "__main__": unittest.main()
41fccd9d5060f2b8dcedde2cb9ab3391b48df420
scripts/generate_input_syntax.py
scripts/generate_input_syntax.py
import sys, os # get the location of this script app_path = os.path.abspath(os.path.dirname(sys.argv[0])) # this script is actually in the scripts subdirectory, so go up a level app_path += '/..' # Set the name of the application here and moose directory relative to the application app_name = 'RAVEN' MOOSE_DIR = app_path + '/../moose' # See if MOOSE_DIR is already in the environment instead if os.environ.has_key("MOOSE_DIR"): MOOSE_DIR = os.environ['MOOSE_DIR'] sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML') import genInputFileSyntaxHTML # this will automatically copy the documentation to the base directory # in a folder named syntax genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR)
import sys, os # get the location of this script app_path = os.path.abspath(os.path.dirname(sys.argv[0])) # Set the name of the application here and moose directory relative to the application app_name = 'raven' MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose')) FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework')) #### See if MOOSE_DIR is already in the environment instead if os.environ.has_key("MOOSE_DIR"): MOOSE_DIR = os.environ['MOOSE_DIR'] FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework') if os.environ.has_key("FRAMEWORK_DIR"): FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR'] sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML') import genInputFileSyntaxHTML # this will automatically copy the documentation to the base directory # in a folder named syntax genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR)
Update scripts to reflect new MOOSE_DIR definition
Update scripts to reflect new MOOSE_DIR definition r25009
Python
apache-2.0
idaholab/raven,idaholab/raven,idaholab/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven,idaholab/raven,joshua-cogliati-inl/raven,joshua-cogliati-inl/raven
import sys, os # get the location of this script app_path = os.path.abspath(os.path.dirname(sys.argv[0])) - # this script is actually in the scripts subdirectory, so go up a level - app_path += '/..' # Set the name of the application here and moose directory relative to the application - app_name = 'RAVEN' - MOOSE_DIR = app_path + '/../moose' + app_name = 'raven' + + MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose')) + FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework')) - # See if MOOSE_DIR is already in the environment instead + #### See if MOOSE_DIR is already in the environment instead if os.environ.has_key("MOOSE_DIR"): MOOSE_DIR = os.environ['MOOSE_DIR'] + FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework') + if os.environ.has_key("FRAMEWORK_DIR"): + FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR'] - sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML') + sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML') import genInputFileSyntaxHTML # this will automatically copy the documentation to the base directory # in a folder named syntax - genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR) + genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR)
Update scripts to reflect new MOOSE_DIR definition
## Code Before: import sys, os # get the location of this script app_path = os.path.abspath(os.path.dirname(sys.argv[0])) # this script is actually in the scripts subdirectory, so go up a level app_path += '/..' # Set the name of the application here and moose directory relative to the application app_name = 'RAVEN' MOOSE_DIR = app_path + '/../moose' # See if MOOSE_DIR is already in the environment instead if os.environ.has_key("MOOSE_DIR"): MOOSE_DIR = os.environ['MOOSE_DIR'] sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML') import genInputFileSyntaxHTML # this will automatically copy the documentation to the base directory # in a folder named syntax genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR) ## Instruction: Update scripts to reflect new MOOSE_DIR definition ## Code After: import sys, os # get the location of this script app_path = os.path.abspath(os.path.dirname(sys.argv[0])) # Set the name of the application here and moose directory relative to the application app_name = 'raven' MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose')) FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework')) #### See if MOOSE_DIR is already in the environment instead if os.environ.has_key("MOOSE_DIR"): MOOSE_DIR = os.environ['MOOSE_DIR'] FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework') if os.environ.has_key("FRAMEWORK_DIR"): FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR'] sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML') import genInputFileSyntaxHTML # this will automatically copy the documentation to the base directory # in a folder named syntax genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR)
import sys, os # get the location of this script app_path = os.path.abspath(os.path.dirname(sys.argv[0])) - # this script is actually in the scripts subdirectory, so go up a level - app_path += '/..' # Set the name of the application here and moose directory relative to the application - app_name = 'RAVEN' - MOOSE_DIR = app_path + '/../moose' + app_name = 'raven' + + MOOSE_DIR = os.path.abspath(os.path.join(app_path, '..', '..' 'moose')) + FRAMEWORK_DIR = os.path.abspath(os.path.join(app_path, '..', '..', 'moose', 'framework')) - # See if MOOSE_DIR is already in the environment instead + #### See if MOOSE_DIR is already in the environment instead ? +++ if os.environ.has_key("MOOSE_DIR"): MOOSE_DIR = os.environ['MOOSE_DIR'] + FRAMEWORK_DIR = os.path.join(MOOSE_DIR, 'framework') + if os.environ.has_key("FRAMEWORK_DIR"): + FRAMEWORK_DIR = os.environ['FRAMEWORK_DIR'] - sys.path.append(MOOSE_DIR + '/scripts/syntaxHTML') ? ^^^ + sys.path.append(FRAMEWORK_DIR + '/scripts/syntaxHTML') ? +++ ++ ^^ import genInputFileSyntaxHTML # this will automatically copy the documentation to the base directory # in a folder named syntax - genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, MOOSE_DIR) ? ^^^ + genInputFileSyntaxHTML.generateHTML(app_name, app_path, sys.argv, FRAMEWORK_DIR) ? +++ ++ ^^
05b6eaf259117cc6254e2b13c5a02569713e6356
inbox/contacts/process_mail.py
inbox/contacts/process_mail.py
import uuid from inbox.models import Contact, MessageContactAssociation def update_contacts_from_message(db_session, message, account_id): with db_session.no_autoflush: for field in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): if getattr(message, field) is None: continue items = set(getattr(message, field)) for name, email_address in items: contact = db_session.query(Contact).filter( Contact.email_address == email_address).first() if contact is None: contact = Contact(name=name, email_address=email_address, account_id=account_id, source='local', provider_name='inbox', uid=uuid.uuid4().hex) message.contacts.append(MessageContactAssociation( contact=contact, field=field))
import uuid from inbox.models import Contact, MessageContactAssociation def update_contacts_from_message(db_session, message, account_id): with db_session.no_autoflush: for field in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): if getattr(message, field) is None: continue items = set(getattr(message, field)) for name, email_address in items: contact = db_session.query(Contact).filter( Contact.email_address == email_address, Contact.account_id == account_id).first() if contact is None: contact = Contact(name=name, email_address=email_address, account_id=account_id, source='local', provider_name='inbox', uid=uuid.uuid4().hex) message.contacts.append(MessageContactAssociation( contact=contact, field=field))
Fix filtering criterion when updating contacts from message.
Fix filtering criterion when updating contacts from message.
Python
agpl-3.0
gale320/sync-engine,closeio/nylas,wakermahmud/sync-engine,nylas/sync-engine,EthanBlackburn/sync-engine,jobscore/sync-engine,gale320/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,nylas/sync-engine,ErinCall/sync-engine,PriviPK/privipk-sync-engine,closeio/nylas,ErinCall/sync-engine,EthanBlackburn/sync-engine,Eagles2F/sync-engine,closeio/nylas,closeio/nylas,wakermahmud/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,wakermahmud/sync-engine,jobscore/sync-engine,jobscore/sync-engine,Eagles2F/sync-engine,EthanBlackburn/sync-engine,EthanBlackburn/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,jobscore/sync-engine,PriviPK/privipk-sync-engine,gale320/sync-engine,EthanBlackburn/sync-engine,gale320/sync-engine,ErinCall/sync-engine
import uuid from inbox.models import Contact, MessageContactAssociation def update_contacts_from_message(db_session, message, account_id): with db_session.no_autoflush: for field in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): if getattr(message, field) is None: continue items = set(getattr(message, field)) for name, email_address in items: contact = db_session.query(Contact).filter( - Contact.email_address == email_address).first() + Contact.email_address == email_address, + Contact.account_id == account_id).first() if contact is None: contact = Contact(name=name, email_address=email_address, account_id=account_id, source='local', provider_name='inbox', uid=uuid.uuid4().hex) message.contacts.append(MessageContactAssociation( contact=contact, field=field))
Fix filtering criterion when updating contacts from message.
## Code Before: import uuid from inbox.models import Contact, MessageContactAssociation def update_contacts_from_message(db_session, message, account_id): with db_session.no_autoflush: for field in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): if getattr(message, field) is None: continue items = set(getattr(message, field)) for name, email_address in items: contact = db_session.query(Contact).filter( Contact.email_address == email_address).first() if contact is None: contact = Contact(name=name, email_address=email_address, account_id=account_id, source='local', provider_name='inbox', uid=uuid.uuid4().hex) message.contacts.append(MessageContactAssociation( contact=contact, field=field)) ## Instruction: Fix filtering criterion when updating contacts from message. ## Code After: import uuid from inbox.models import Contact, MessageContactAssociation def update_contacts_from_message(db_session, message, account_id): with db_session.no_autoflush: for field in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): if getattr(message, field) is None: continue items = set(getattr(message, field)) for name, email_address in items: contact = db_session.query(Contact).filter( Contact.email_address == email_address, Contact.account_id == account_id).first() if contact is None: contact = Contact(name=name, email_address=email_address, account_id=account_id, source='local', provider_name='inbox', uid=uuid.uuid4().hex) message.contacts.append(MessageContactAssociation( contact=contact, field=field))
import uuid from inbox.models import Contact, MessageContactAssociation def update_contacts_from_message(db_session, message, account_id): with db_session.no_autoflush: for field in ('to_addr', 'from_addr', 'cc_addr', 'bcc_addr'): if getattr(message, field) is None: continue items = set(getattr(message, field)) for name, email_address in items: contact = db_session.query(Contact).filter( - Contact.email_address == email_address).first() ? ^^^^^^^^^ + Contact.email_address == email_address, ? ^ + Contact.account_id == account_id).first() if contact is None: contact = Contact(name=name, email_address=email_address, account_id=account_id, source='local', provider_name='inbox', uid=uuid.uuid4().hex) message.contacts.append(MessageContactAssociation( contact=contact, field=field))
40bc1f50e7b0605522feb4ac86daebb9f785eb88
test/OLItest/globals.py
test/OLItest/globals.py
from cStringIO import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP folderfilter= lambda f: f.startswith('INBOX.OLItest') """)
from cStringIO import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP # Don't hammer the server with too many connection attempts: maxconnections=1 folderfilter= lambda f: f.startswith('INBOX.OLItest') """)
Use only 1 IMAP connection by default
tests: Use only 1 IMAP connection by default We don't want to hammmer IMAP servers for the test series too much to avoid being locked out. We will need a few tests to test concurrent connections, but by default one connection should be fine. Signed-off-by: Sebastian Spaeth <98dcb2717ddae152d5b359c6ea97e4fe34a29d4c@SSpaeth.de>
Python
apache-2.0
frioux/offlineimap,frioux/offlineimap
from cStringIO import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP + # Don't hammer the server with too many connection attempts: + maxconnections=1 folderfilter= lambda f: f.startswith('INBOX.OLItest') """)
Use only 1 IMAP connection by default
## Code Before: from cStringIO import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP folderfilter= lambda f: f.startswith('INBOX.OLItest') """) ## Instruction: Use only 1 IMAP connection by default ## Code After: from cStringIO import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP # Don't hammer the server with too many connection attempts: maxconnections=1 folderfilter= lambda f: f.startswith('INBOX.OLItest') """)
from cStringIO import StringIO default_conf=StringIO("""[general] #will be set automatically metadata = accounts = test ui = quiet [Account test] localrepository = Maildir remoterepository = IMAP [Repository Maildir] Type = Maildir # will be set automatically during tests localfolders = [Repository IMAP] type=IMAP + # Don't hammer the server with too many connection attempts: + maxconnections=1 folderfilter= lambda f: f.startswith('INBOX.OLItest') """)
0da19042c74d2a85ef4652b36186a1ee6c4fc247
tilequeue/format/mvt.py
tilequeue/format/mvt.py
from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid from mapbox_vector_tile import encode as mvt_encode def encode(fp, feature_layers, coord, bounds_merc): tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc, on_invalid_geometry=on_invalid_geometry_make_valid) fp.write(tile)
from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid from mapbox_vector_tile import encode as mvt_encode def encode(fp, feature_layers, coord, bounds_merc): tile = mvt_encode( feature_layers, quantize_bounds=bounds_merc, on_invalid_geometry=on_invalid_geometry_make_valid, round_fn=round, ) fp.write(tile)
Use round_fn to specify built-in round function
Use round_fn to specify built-in round function
Python
mit
mapzen/tilequeue,tilezen/tilequeue
from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid from mapbox_vector_tile import encode as mvt_encode def encode(fp, feature_layers, coord, bounds_merc): - tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc, + tile = mvt_encode( + feature_layers, + quantize_bounds=bounds_merc, - on_invalid_geometry=on_invalid_geometry_make_valid) + on_invalid_geometry=on_invalid_geometry_make_valid, + round_fn=round, + ) fp.write(tile)
Use round_fn to specify built-in round function
## Code Before: from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid from mapbox_vector_tile import encode as mvt_encode def encode(fp, feature_layers, coord, bounds_merc): tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc, on_invalid_geometry=on_invalid_geometry_make_valid) fp.write(tile) ## Instruction: Use round_fn to specify built-in round function ## Code After: from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid from mapbox_vector_tile import encode as mvt_encode def encode(fp, feature_layers, coord, bounds_merc): tile = mvt_encode( feature_layers, quantize_bounds=bounds_merc, on_invalid_geometry=on_invalid_geometry_make_valid, round_fn=round, ) fp.write(tile)
from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid from mapbox_vector_tile import encode as mvt_encode def encode(fp, feature_layers, coord, bounds_merc): - tile = mvt_encode(feature_layers, quantize_bounds=bounds_merc, + tile = mvt_encode( + feature_layers, + quantize_bounds=bounds_merc, - on_invalid_geometry=on_invalid_geometry_make_valid) ? -------------- ^ + on_invalid_geometry=on_invalid_geometry_make_valid, ? ^ + round_fn=round, + ) fp.write(tile)
331f776eef9acd0509c7534040ef225869305d7f
tests/test_cookies.py
tests/test_cookies.py
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.error is None assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.exception is None assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
Update test for cookies fixture
Update test for cookies fixture
Python
mit
hackebrot/pytest-cookies
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) - assert cookies.error is None + assert cookies.exception is None + assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
Update test for cookies fixture
## Code Before: def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.error is None assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ]) ## Instruction: Update test for cookies fixture ## Code After: def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) assert cookies.exception is None assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
def test_cookies_fixture(testdir): """Make sure that pytest accepts the `cookies` fixture.""" # create a temporary pytest test module testdir.makepyfile(""" def test_valid_fixture(cookies): assert hasattr(cookies, 'bake') assert callable(cookies.bake) - assert cookies.error is None ? ^^ ^ + assert cookies.exception is None ? ^^^^^^ ^ + assert cookies.exit_code == 0 assert cookies.project is None """) # run pytest with the following cmd args result = testdir.runpytest('-v') # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ '*::test_valid_fixture PASSED', ]) # make sure that that we get a '0' exit code for the testsuite assert result.ret == 0 def test_help_message(testdir): result = testdir.runpytest( '--help', ) # fnmatch_lines does an assertion internally result.stdout.fnmatch_lines([ 'cookies:', ])
48987cb9b5417232280482c681d3e055c1dee9a4
snap7/bin/snap7-server.py
snap7/bin/snap7-server.py
import time import logging import snap7 def mainloop(): server = snap7.server.Server() size = 100 data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)() server.register_area(snap7.types.srvAreaDB, 1, data) server.start() while True: #logger.info("server: %s cpu: %s users: %s" % server.get_status()) while True: event = server.pick_event() if event: logger.info(server.event_text(event)) else: break time.sleep(1) def check_root(): """ check if uid of this process is root """ import os import platform if platform.system() == 'Windows': # We don't need root on Windows to use port 102 return True if os.getuid() == 0: return True root_msg = "it sucks, but you need to run this as root. The snap7 library is" \ " hardcoded run on port 102, which requires root privileges." if __name__ == '__main__': logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) if not check_root(): logging.error(root_msg) mainloop()
import time import logging import snap7 def mainloop(): server = snap7.server.Server() size = 100 data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)() server.register_area(snap7.types.srvAreaDB, 1, data) server.start() while True: #logger.info("server: %s cpu: %s users: %s" % server.get_status()) while True: event = server.pick_event() if event: logger.info(server.event_text(event)) else: break time.sleep(1) def check_root(): """ check if uid of this process is root """ import os import platform if platform.system() == 'Windows': # We don't need root on Windows to use port 102 return True if os.getuid() == 0: return True root_msg = "it sucks, but you need to run this as root. The snap7 library is" \ " hardcoded run on port 102, which requires root privileges." if __name__ == '__main__': import sys if len(sys.argv) > 1: snap7.common.load_library(sys.argv[1]) logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) if not check_root(): logging.error(root_msg) mainloop()
Add option to start server passing lib path
Add option to start server passing lib path
Python
mit
SimplyAutomationized/python-snap7,gijzelaerr/python-snap7,ellepdesk/python-snap7,SimplyAutomationized/python-snap7,ellepdesk/python-snap7
import time import logging import snap7 def mainloop(): server = snap7.server.Server() size = 100 data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)() server.register_area(snap7.types.srvAreaDB, 1, data) server.start() while True: #logger.info("server: %s cpu: %s users: %s" % server.get_status()) while True: event = server.pick_event() if event: logger.info(server.event_text(event)) else: break time.sleep(1) def check_root(): """ check if uid of this process is root """ import os import platform if platform.system() == 'Windows': # We don't need root on Windows to use port 102 return True if os.getuid() == 0: return True root_msg = "it sucks, but you need to run this as root. The snap7 library is" \ " hardcoded run on port 102, which requires root privileges." if __name__ == '__main__': + import sys + if len(sys.argv) > 1: + snap7.common.load_library(sys.argv[1]) logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) if not check_root(): logging.error(root_msg) mainloop()
Add option to start server passing lib path
## Code Before: import time import logging import snap7 def mainloop(): server = snap7.server.Server() size = 100 data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)() server.register_area(snap7.types.srvAreaDB, 1, data) server.start() while True: #logger.info("server: %s cpu: %s users: %s" % server.get_status()) while True: event = server.pick_event() if event: logger.info(server.event_text(event)) else: break time.sleep(1) def check_root(): """ check if uid of this process is root """ import os import platform if platform.system() == 'Windows': # We don't need root on Windows to use port 102 return True if os.getuid() == 0: return True root_msg = "it sucks, but you need to run this as root. The snap7 library is" \ " hardcoded run on port 102, which requires root privileges." if __name__ == '__main__': logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) if not check_root(): logging.error(root_msg) mainloop() ## Instruction: Add option to start server passing lib path ## Code After: import time import logging import snap7 def mainloop(): server = snap7.server.Server() size = 100 data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)() server.register_area(snap7.types.srvAreaDB, 1, data) server.start() while True: #logger.info("server: %s cpu: %s users: %s" % server.get_status()) while True: event = server.pick_event() if event: logger.info(server.event_text(event)) else: break time.sleep(1) def check_root(): """ check if uid of this process is root """ import os import platform if platform.system() == 'Windows': # We don't need root on Windows to use port 102 return True if os.getuid() == 0: return True root_msg = "it sucks, but you need to run this as root. The snap7 library is" \ " hardcoded run on port 102, which requires root privileges." if __name__ == '__main__': import sys if len(sys.argv) > 1: snap7.common.load_library(sys.argv[1]) logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) if not check_root(): logging.error(root_msg) mainloop()
import time import logging import snap7 def mainloop(): server = snap7.server.Server() size = 100 data = (snap7.types.wordlen_to_ctypes[snap7.types.S7WLByte] * size)() server.register_area(snap7.types.srvAreaDB, 1, data) server.start() while True: #logger.info("server: %s cpu: %s users: %s" % server.get_status()) while True: event = server.pick_event() if event: logger.info(server.event_text(event)) else: break time.sleep(1) def check_root(): """ check if uid of this process is root """ import os import platform if platform.system() == 'Windows': # We don't need root on Windows to use port 102 return True if os.getuid() == 0: return True root_msg = "it sucks, but you need to run this as root. The snap7 library is" \ " hardcoded run on port 102, which requires root privileges." if __name__ == '__main__': + import sys + if len(sys.argv) > 1: + snap7.common.load_library(sys.argv[1]) logging.basicConfig() logger = logging.getLogger() logger.setLevel(logging.INFO) if not check_root(): logging.error(root_msg) mainloop()
fc3f64de95554a66e2ec64804acf9c6032dd7e7b
test/rsrc/convert_stub.py
test/rsrc/convert_stub.py
import sys def convert(in_file, out_file, tag): """Copy `in_file` to `out_file` and append the string `tag`. """ with open(out_file, 'wb') as out_f: with open(in_file, 'rb') as in_f: out_f.write(in_f.read()) out_f.write(tag) if __name__ == '__main__': convert(sys.argv[1], sys.argv[2], sys.argv[3])
import sys def convert(in_file, out_file, tag): """Copy `in_file` to `out_file` and append the string `tag`. """ # On Python 3, encode the tag argument as bytes. if not isinstance(tag, bytes): tag = tag.encode('utf8') with open(out_file, 'wb') as out_f: with open(in_file, 'rb') as in_f: out_f.write(in_f.read()) out_f.write(tag) if __name__ == '__main__': convert(sys.argv[1], sys.argv[2], sys.argv[3])
Convert stub: Python 3 compatibility
Convert stub: Python 3 compatibility Important for systems where `python` is 3.x, like Arch, even when beets itself is running on Python 2.
Python
mit
madmouser1/beets,jcoady9/beets,jcoady9/beets,xsteadfastx/beets,madmouser1/beets,diego-plan9/beets,Kraymer/beets,jackwilsdon/beets,sampsyo/beets,beetbox/beets,MyTunesFreeMusic/privacy-policy,shamangeorge/beets,pkess/beets,Kraymer/beets,mosesfistos1/beetbox,Kraymer/beets,sampsyo/beets,lengtche/beets,pkess/beets,xsteadfastx/beets,xsteadfastx/beets,shamangeorge/beets,ibmibmibm/beets,diego-plan9/beets,madmouser1/beets,mosesfistos1/beetbox,beetbox/beets,lengtche/beets,Kraymer/beets,SusannaMaria/beets,lengtche/beets,shamangeorge/beets,shamangeorge/beets,jcoady9/beets,pkess/beets,artemutin/beets,mosesfistos1/beetbox,beetbox/beets,SusannaMaria/beets,MyTunesFreeMusic/privacy-policy,jackwilsdon/beets,madmouser1/beets,MyTunesFreeMusic/privacy-policy,jackwilsdon/beets,jackwilsdon/beets,MyTunesFreeMusic/privacy-policy,diego-plan9/beets,SusannaMaria/beets,jcoady9/beets,artemutin/beets,artemutin/beets,ibmibmibm/beets,pkess/beets,ibmibmibm/beets,sampsyo/beets,xsteadfastx/beets,ibmibmibm/beets,beetbox/beets,lengtche/beets,mosesfistos1/beetbox,SusannaMaria/beets,artemutin/beets,diego-plan9/beets,sampsyo/beets
import sys def convert(in_file, out_file, tag): """Copy `in_file` to `out_file` and append the string `tag`. """ + # On Python 3, encode the tag argument as bytes. + if not isinstance(tag, bytes): + tag = tag.encode('utf8') + with open(out_file, 'wb') as out_f: with open(in_file, 'rb') as in_f: out_f.write(in_f.read()) out_f.write(tag) if __name__ == '__main__': convert(sys.argv[1], sys.argv[2], sys.argv[3])
Convert stub: Python 3 compatibility
## Code Before: import sys def convert(in_file, out_file, tag): """Copy `in_file` to `out_file` and append the string `tag`. """ with open(out_file, 'wb') as out_f: with open(in_file, 'rb') as in_f: out_f.write(in_f.read()) out_f.write(tag) if __name__ == '__main__': convert(sys.argv[1], sys.argv[2], sys.argv[3]) ## Instruction: Convert stub: Python 3 compatibility ## Code After: import sys def convert(in_file, out_file, tag): """Copy `in_file` to `out_file` and append the string `tag`. """ # On Python 3, encode the tag argument as bytes. if not isinstance(tag, bytes): tag = tag.encode('utf8') with open(out_file, 'wb') as out_f: with open(in_file, 'rb') as in_f: out_f.write(in_f.read()) out_f.write(tag) if __name__ == '__main__': convert(sys.argv[1], sys.argv[2], sys.argv[3])
import sys def convert(in_file, out_file, tag): """Copy `in_file` to `out_file` and append the string `tag`. """ + # On Python 3, encode the tag argument as bytes. + if not isinstance(tag, bytes): + tag = tag.encode('utf8') + with open(out_file, 'wb') as out_f: with open(in_file, 'rb') as in_f: out_f.write(in_f.read()) out_f.write(tag) if __name__ == '__main__': convert(sys.argv[1], sys.argv[2], sys.argv[3])
19bae697bc6e017a97eef77d1425d1ccfbe27ff6
vprof/__main__.py
vprof/__main__.py
"""Visual profiler for Python.""" import argparse import functools import json import profile import stats_server import subprocess import sys _MODULE_DESC = 'Python visual profiler.' _HOST = 'localhost' _PORT = 8000 def main(): parser = argparse.ArgumentParser(description=_MODULE_DESC) parser.add_argument('source', metavar='src', nargs=1, help='Python program to profile.') args = parser.parse_args() sys.argv[:] = args.source print('Collecting profile stats...') program_info = profile.CProfile(args.source[0]).run() partial_handler = functools.partial( stats_server.StatsHandler, profile_json=json.dumps(program_info)) subprocess.call(['open', 'http://%s:%s' % (_HOST, _PORT)]) stats_server.start(_HOST, _PORT, partial_handler) if __name__ == "__main__": main()
"""Visual profiler for Python.""" import argparse import functools import json import profile import stats_server import subprocess import sys _MODULE_DESC = 'Python visual profiler.' _HOST = 'localhost' _PORT = 8000 _PROFILE_MAP = { 'c': profile.CProfile } def main(): parser = argparse.ArgumentParser(description=_MODULE_DESC) parser.add_argument('profilers', metavar='opts', help='Profilers configuration') parser.add_argument('source', metavar='src', nargs=1, help='Python program to profile.') args = parser.parse_args() sys.argv[:] = args.source program_name = args.source[0] if len(args.profilers) > len(set(args.profilers)): print('Profiler configuration is ambiguous. Remove duplicates.') sys.exit(1) for prof_option in args.profilers: if prof_option not in _PROFILE_MAP: print('Unrecognized option: %s' % prof_option) sys.exit(2) print('Collecting profile stats...') prof_option = args.profilers[0] profiler = _PROFILE_MAP[prof_option] program_info = profile.CProfile(args.source[0]).run() partial_handler = functools.partial( stats_server.StatsHandler, profile_json=json.dumps(program_info)) subprocess.call(['open', 'http://%s:%s' % (_HOST, _PORT)]) stats_server.start(_HOST, _PORT, partial_handler) if __name__ == "__main__": main()
Add profilers selection as CLI option.
Add profilers selection as CLI option.
Python
bsd-2-clause
nvdv/vprof,nvdv/vprof,nvdv/vprof
"""Visual profiler for Python.""" import argparse import functools import json import profile import stats_server import subprocess import sys _MODULE_DESC = 'Python visual profiler.' _HOST = 'localhost' _PORT = 8000 + _PROFILE_MAP = { + 'c': profile.CProfile + } def main(): parser = argparse.ArgumentParser(description=_MODULE_DESC) + parser.add_argument('profilers', metavar='opts', + help='Profilers configuration') parser.add_argument('source', metavar='src', nargs=1, help='Python program to profile.') args = parser.parse_args() + sys.argv[:] = args.source + program_name = args.source[0] + + if len(args.profilers) > len(set(args.profilers)): + print('Profiler configuration is ambiguous. Remove duplicates.') + sys.exit(1) + + for prof_option in args.profilers: + if prof_option not in _PROFILE_MAP: + print('Unrecognized option: %s' % prof_option) + sys.exit(2) print('Collecting profile stats...') + + prof_option = args.profilers[0] + profiler = _PROFILE_MAP[prof_option] program_info = profile.CProfile(args.source[0]).run() partial_handler = functools.partial( stats_server.StatsHandler, profile_json=json.dumps(program_info)) subprocess.call(['open', 'http://%s:%s' % (_HOST, _PORT)]) stats_server.start(_HOST, _PORT, partial_handler) if __name__ == "__main__": main()
Add profilers selection as CLI option.
## Code Before: """Visual profiler for Python.""" import argparse import functools import json import profile import stats_server import subprocess import sys _MODULE_DESC = 'Python visual profiler.' _HOST = 'localhost' _PORT = 8000 def main(): parser = argparse.ArgumentParser(description=_MODULE_DESC) parser.add_argument('source', metavar='src', nargs=1, help='Python program to profile.') args = parser.parse_args() sys.argv[:] = args.source print('Collecting profile stats...') program_info = profile.CProfile(args.source[0]).run() partial_handler = functools.partial( stats_server.StatsHandler, profile_json=json.dumps(program_info)) subprocess.call(['open', 'http://%s:%s' % (_HOST, _PORT)]) stats_server.start(_HOST, _PORT, partial_handler) if __name__ == "__main__": main() ## Instruction: Add profilers selection as CLI option. ## Code After: """Visual profiler for Python.""" import argparse import functools import json import profile import stats_server import subprocess import sys _MODULE_DESC = 'Python visual profiler.' _HOST = 'localhost' _PORT = 8000 _PROFILE_MAP = { 'c': profile.CProfile } def main(): parser = argparse.ArgumentParser(description=_MODULE_DESC) parser.add_argument('profilers', metavar='opts', help='Profilers configuration') parser.add_argument('source', metavar='src', nargs=1, help='Python program to profile.') args = parser.parse_args() sys.argv[:] = args.source program_name = args.source[0] if len(args.profilers) > len(set(args.profilers)): print('Profiler configuration is ambiguous. Remove duplicates.') sys.exit(1) for prof_option in args.profilers: if prof_option not in _PROFILE_MAP: print('Unrecognized option: %s' % prof_option) sys.exit(2) print('Collecting profile stats...') prof_option = args.profilers[0] profiler = _PROFILE_MAP[prof_option] program_info = profile.CProfile(args.source[0]).run() partial_handler = functools.partial( stats_server.StatsHandler, profile_json=json.dumps(program_info)) subprocess.call(['open', 'http://%s:%s' % (_HOST, _PORT)]) stats_server.start(_HOST, _PORT, partial_handler) if __name__ == "__main__": main()
"""Visual profiler for Python.""" import argparse import functools import json import profile import stats_server import subprocess import sys _MODULE_DESC = 'Python visual profiler.' _HOST = 'localhost' _PORT = 8000 + _PROFILE_MAP = { + 'c': profile.CProfile + } def main(): parser = argparse.ArgumentParser(description=_MODULE_DESC) + parser.add_argument('profilers', metavar='opts', + help='Profilers configuration') parser.add_argument('source', metavar='src', nargs=1, help='Python program to profile.') args = parser.parse_args() + sys.argv[:] = args.source + program_name = args.source[0] + + if len(args.profilers) > len(set(args.profilers)): + print('Profiler configuration is ambiguous. Remove duplicates.') + sys.exit(1) + + for prof_option in args.profilers: + if prof_option not in _PROFILE_MAP: + print('Unrecognized option: %s' % prof_option) + sys.exit(2) print('Collecting profile stats...') + + prof_option = args.profilers[0] + profiler = _PROFILE_MAP[prof_option] program_info = profile.CProfile(args.source[0]).run() partial_handler = functools.partial( stats_server.StatsHandler, profile_json=json.dumps(program_info)) subprocess.call(['open', 'http://%s:%s' % (_HOST, _PORT)]) stats_server.start(_HOST, _PORT, partial_handler) if __name__ == "__main__": main()