commit stringlengths 40 40 | old_file stringlengths 4 106 | new_file stringlengths 4 106 | old_contents stringlengths 10 2.94k | new_contents stringlengths 21 2.95k | subject stringlengths 16 444 | message stringlengths 17 2.63k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 7 43k | ndiff stringlengths 52 3.31k | instruction stringlengths 16 444 | content stringlengths 133 4.32k | diff stringlengths 49 3.61k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
59d7c19f26d2907413e5ee4cb86cbd534e89135b | examples/livestream_datalogger.py | examples/livestream_datalogger.py | from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
| from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
| Simplify and clean the livestream data logger | PM-133: Simplify and clean the livestream data logger
| Python | mit | liquidinstruments/pymoku | from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
+ import time
- import time, logging, traceback
-
- logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
- logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
+ # 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
+ # instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
- time.sleep(1)
+ # Stop a previous session, if any, then start a new single-channel log in real
+ # time over the network.
i.datalogger_stop()
-
- i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
+ i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
- ch, idx, d = i.datalogger_get_samples(timeout=5)
+ ch, idx, samples = i.datalogger_get_samples()
- print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
+ print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
- except NoDataException as e:
+ except NoDataException:
+ print("Finished")
- # This will be raised if we try and get samples but the session has finished.
- print(e)
- except Exception as e:
- print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
| Simplify and clean the livestream data logger | ## Code Before:
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time, logging, traceback
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(1)
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, d = i.datalogger_get_samples(timeout=5)
print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
except NoDataException as e:
# This will be raised if we try and get samples but the session has finished.
print(e)
except Exception as e:
print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close()
## Instruction:
Simplify and clean the livestream data logger
## Code After:
from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
import time
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
# 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
# instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
# Stop a previous session, if any, then start a new single-channel log in real
# time over the network.
i.datalogger_stop()
i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
ch, idx, samples = i.datalogger_get_samples()
print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
except NoDataException:
print("Finished")
finally:
i.datalogger_stop()
m.close()
| from pymoku import Moku, MokuException, NoDataException
from pymoku.instruments import *
+ import time
- import time, logging, traceback
-
- logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
- logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
+ # 10Hz sample rate. The datalogger is actually just a mode of the Oscilloscope
+ # instrument in ROLL mode.
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
- time.sleep(1)
+ # Stop a previous session, if any, then start a new single-channel log in real
+ # time over the network.
i.datalogger_stop()
-
- i.datalogger_start(start=0, duration=100, use_sd=False, ch1=True, ch2=False, filetype='net')
? --------------
+ i.datalogger_start(start=0, duration=100, ch1=True, ch2=False, filetype='net')
while True:
- ch, idx, d = i.datalogger_get_samples(timeout=5)
? ^ ---------
+ ch, idx, samples = i.datalogger_get_samples()
? ^^^^^^^
- print("Received samples %d to %d from channel %d" % (idx, idx + len(d) - 1, ch))
? ^
+ print("Received samples %d to %d from channel %d" % (idx, idx + len(samples) - 1, ch))
? ^^^^^^^
- except NoDataException as e:
? -----
+ except NoDataException:
+ print("Finished")
- # This will be raised if we try and get samples but the session has finished.
- print(e)
- except Exception as e:
- print(traceback.format_exc())
finally:
i.datalogger_stop()
m.close() |
037e15f383c326f1f4e7de59bc3ec3520ac6ce40 | pystachio/__init__.py | pystachio/__init__.py | __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| Add check for minimum Python version | Add check for minimum Python version
| Python | mit | wickman/pystachio | __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
+
+ import sys
+ if sys.version_info < (2, 6, 5):
+ raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| Add check for minimum Python version | ## Code Before:
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
## Instruction:
Add check for minimum Python version
## Code After:
__author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
import sys
if sys.version_info < (2, 6, 5):
raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct)
| __author__ = 'Brian Wickman'
__version__ = '0.5.2'
__license__ = 'MIT'
+
+ import sys
+ if sys.version_info < (2, 6, 5):
+ raise ImportError("pystachio requires Python >= 2.6.5")
from pystachio.typing import (
Type,
TypeCheck,
TypeFactory)
from pystachio.base import Environment
from pystachio.parsing import MustacheParser
from pystachio.naming import Namable, Ref
from pystachio.basic import (
Float,
Integer,
String)
from pystachio.container import (
List,
Map)
from pystachio.composite import (
Default,
Empty,
Provided,
Required,
Struct) |
4e4262f3d9cde4394d08681c517fcec4e2e9a336 | shellpython/tests/test_helpers.py | shellpython/tests/test_helpers.py | import unittest
import tempfile
import os
from shellpython.helpers import Dir
class TestDirectory(unittest.TestCase):
def test_relative_dirs(self):
cur_dir = os.path.split(__file__)[0]
with Dir(os.path.join(cur_dir, 'data')):
self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd())
with Dir(os.path.join('locator')):
self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd())
def test_absolute_dirs(self):
with Dir(tempfile.gettempdir()):
self.assertEqual(tempfile.gettempdir(), os.getcwd())
| import unittest
import tempfile
import os
from os import path
from shellpython.helpers import Dir
class TestDirectory(unittest.TestCase):
def test_relative_dirs(self):
cur_dir = path.dirname(path.abspath(__file__))
with Dir(path.join(cur_dir, 'data')):
self.assertEqual(path.join(cur_dir, 'data'), os.getcwd())
with Dir(path.join('locator')):
self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd())
def test_absolute_dirs(self):
with Dir(tempfile.gettempdir()):
self.assertEqual(tempfile.gettempdir(), os.getcwd())
| Fix directory tests, __file__ may return relative path and now it is taken into consideration | Fix directory tests, __file__ may return relative path and now it is
taken into consideration
| Python | bsd-3-clause | lamerman/shellpy | import unittest
import tempfile
import os
+ from os import path
from shellpython.helpers import Dir
class TestDirectory(unittest.TestCase):
def test_relative_dirs(self):
- cur_dir = os.path.split(__file__)[0]
+ cur_dir = path.dirname(path.abspath(__file__))
- with Dir(os.path.join(cur_dir, 'data')):
+ with Dir(path.join(cur_dir, 'data')):
- self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd())
+ self.assertEqual(path.join(cur_dir, 'data'), os.getcwd())
- with Dir(os.path.join('locator')):
+ with Dir(path.join('locator')):
- self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd())
+ self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd())
def test_absolute_dirs(self):
with Dir(tempfile.gettempdir()):
self.assertEqual(tempfile.gettempdir(), os.getcwd())
| Fix directory tests, __file__ may return relative path and now it is taken into consideration | ## Code Before:
import unittest
import tempfile
import os
from shellpython.helpers import Dir
class TestDirectory(unittest.TestCase):
def test_relative_dirs(self):
cur_dir = os.path.split(__file__)[0]
with Dir(os.path.join(cur_dir, 'data')):
self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd())
with Dir(os.path.join('locator')):
self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd())
def test_absolute_dirs(self):
with Dir(tempfile.gettempdir()):
self.assertEqual(tempfile.gettempdir(), os.getcwd())
## Instruction:
Fix directory tests, __file__ may return relative path and now it is taken into consideration
## Code After:
import unittest
import tempfile
import os
from os import path
from shellpython.helpers import Dir
class TestDirectory(unittest.TestCase):
def test_relative_dirs(self):
cur_dir = path.dirname(path.abspath(__file__))
with Dir(path.join(cur_dir, 'data')):
self.assertEqual(path.join(cur_dir, 'data'), os.getcwd())
with Dir(path.join('locator')):
self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd())
def test_absolute_dirs(self):
with Dir(tempfile.gettempdir()):
self.assertEqual(tempfile.gettempdir(), os.getcwd())
| import unittest
import tempfile
import os
+ from os import path
from shellpython.helpers import Dir
class TestDirectory(unittest.TestCase):
def test_relative_dirs(self):
- cur_dir = os.path.split(__file__)[0]
+ cur_dir = path.dirname(path.abspath(__file__))
- with Dir(os.path.join(cur_dir, 'data')):
? ---
+ with Dir(path.join(cur_dir, 'data')):
- self.assertEqual(os.path.join(cur_dir, 'data'), os.getcwd())
? ---
+ self.assertEqual(path.join(cur_dir, 'data'), os.getcwd())
- with Dir(os.path.join('locator')):
? ---
+ with Dir(path.join('locator')):
- self.assertEqual(os.path.join(cur_dir, 'data', 'locator'), os.getcwd())
? ---
+ self.assertEqual(path.join(cur_dir, 'data', 'locator'), os.getcwd())
def test_absolute_dirs(self):
with Dir(tempfile.gettempdir()):
self.assertEqual(tempfile.gettempdir(), os.getcwd()) |
281208f9ecfa3f5f5028df75fff86f1cdb752487 | jasylibrary.py | jasylibrary.py | import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
@share
def build(regenerate = False):
""" Build static website """
konstrukteur.Konstrukteur.build(regenerate)
| import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
session.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| Add support for part loading | Add support for part loading
| Python | mit | fastner/konstrukteur,fastner/konstrukteur,fastner/konstrukteur | import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
+ import jasy.asset.Manager
+
@share
- def build(regenerate = False):
+ def build(profile, regenerate = False):
""" Build static website """
- konstrukteur.Konstrukteur.build(regenerate)
+ def getPartUrl(part, type):
+ folder = ""
+ if type == "css":
+ folder = profile.getCssFolder()
+ outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
+ filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
+ return filename
+ session.addCommand("part.url", getPartUrl, "url")
+
+ for permutation in profile.permutate():
+ konstrukteur.Konstrukteur.build(regenerate, profile)
+ | Add support for part loading | ## Code Before:
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
@share
def build(regenerate = False):
""" Build static website """
konstrukteur.Konstrukteur.build(regenerate)
## Instruction:
Add support for part loading
## Code After:
import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
import jasy.asset.Manager
@share
def build(profile, regenerate = False):
""" Build static website """
def getPartUrl(part, type):
folder = ""
if type == "css":
folder = profile.getCssFolder()
outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
return filename
session.addCommand("part.url", getPartUrl, "url")
for permutation in profile.permutate():
konstrukteur.Konstrukteur.build(regenerate, profile)
| import sys, os.path, inspect
filename = inspect.getframeinfo(inspect.currentframe()).filename
path = os.path.dirname(os.path.abspath(filename))
sys.path.append(path)
import konstrukteur.Konstrukteur
+ import jasy.asset.Manager
+
@share
- def build(regenerate = False):
+ def build(profile, regenerate = False):
? +++++++++
""" Build static website """
+ def getPartUrl(part, type):
+ folder = ""
+ if type == "css":
+ folder = profile.getCssFolder()
+ outputPath = folder #os.path.join(profile.getDestinationPath(), folder)
+ filename = profile.expandFileName("%s/%s-{{id}}.%s" % (outputPath, part, type))
+ return filename
+
+ session.addCommand("part.url", getPartUrl, "url")
+
+ for permutation in profile.permutate():
- konstrukteur.Konstrukteur.build(regenerate)
+ konstrukteur.Konstrukteur.build(regenerate, profile)
? + +++++++++
|
9c68a69eb5bf6e7ffab8b7538797c74b05a7c70b | src/zeit/content/article/edit/browser/tests/test_header.py | src/zeit/content/article/edit/browser/tests/test_header.py | import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| Test needs to wait until the header values are updated after it changed the template | FIX: Test needs to wait until the header values are updated after it changed the template
I'm not sure how this previously has ever passed, to be honest.
| Python | bsd-3-clause | ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article,ZeitOnline/zeit.content.article | import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
- s.waitForVisible('css=.fieldname-header_layout')
+ s.type('id=options-template.header_layout', '\t')
+ s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| Test needs to wait until the header values are updated after it changed the template | ## Code Before:
import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.waitForVisible('css=.fieldname-header_layout')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
## Instruction:
Test needs to wait until the header values are updated after it changed the template
## Code After:
import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
s.type('id=options-template.header_layout', '\t')
s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1)
| import zeit.content.article.edit.browser.testing
class HeaderModules(zeit.content.article.edit.browser.testing.EditorTestCase):
def test_can_create_module_by_drag_and_drop(self):
s = self.selenium
self.add_article()
# Select header that allows header module
s.click('css=#edit-form-misc .edit-bar .fold-link')
s.select('id=options-template.template', 'Kolumne')
- s.waitForVisible('css=.fieldname-header_layout')
+ s.type('id=options-template.header_layout', '\t')
+ s.waitForSelectedValue('id=options-template.header_layout', '')
s.select('id=options-template.header_layout', 'Standard')
s.type('id=options-template.header_layout', '\t')
s.pause(500)
block = 'quiz'
# copy&paste from self.create_block()
s.waitForNotVisible('css=.message')
s.click('link=Struktur')
s.click('link=Header')
s.waitForElementPresent('css=#header-modules .module')
block_sel = '.block.type-{0}'.format(block)
count = s.getCssCount('css={0}'.format(block_sel))
s.dragAndDropToObject(
'css=#header-modules .module[cms\\:block_type={0}]'.format(block),
'css=#editable-header > .landing-zone', '10,10')
s.waitForCssCount('css={0}'.format(block_sel), count + 1) |
6f6f6e183b574f8505b53ddb7651c8766992b953 | pywikibot/families/lingualibre_family.py | pywikibot/families/lingualibre_family.py | """Family module for Lingua Libre."""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Lingua Libre family
class Family(family.WikimediaFamily):
"""Family class for Lingua Libre.
*New in version 6.5.*
"""
name = 'lingualibre'
langs = {
'lingualibre': 'lingualibre.org'
}
interwiki_forward = 'wikipedia'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
| """Family module for Lingua Libre."""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Lingua Libre family
class Family(family.WikimediaFamily):
"""Family class for Lingua Libre.
*New in version 6.5.*
"""
name = 'lingualibre'
langs = {
'lingualibre': 'lingualibre.org'
}
interwiki_forward = 'wikipedia'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
def interface(self, code):
"""Return 'DataSite'."""
return 'DataSite'
| Allow to request for item on Lingua Libre | Allow to request for item on Lingua Libre
Bug: T286303
Change-Id: Ic0d8824d1bf326f2182fbb64d0cc2ed77f82fd4c
| Python | mit | wikimedia/pywikibot-core,wikimedia/pywikibot-core | """Family module for Lingua Libre."""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Lingua Libre family
class Family(family.WikimediaFamily):
"""Family class for Lingua Libre.
*New in version 6.5.*
"""
name = 'lingualibre'
langs = {
'lingualibre': 'lingualibre.org'
}
interwiki_forward = 'wikipedia'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
+ def interface(self, code):
+ """Return 'DataSite'."""
+ return 'DataSite'
+ | Allow to request for item on Lingua Libre | ## Code Before:
"""Family module for Lingua Libre."""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Lingua Libre family
class Family(family.WikimediaFamily):
"""Family class for Lingua Libre.
*New in version 6.5.*
"""
name = 'lingualibre'
langs = {
'lingualibre': 'lingualibre.org'
}
interwiki_forward = 'wikipedia'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
## Instruction:
Allow to request for item on Lingua Libre
## Code After:
"""Family module for Lingua Libre."""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Lingua Libre family
class Family(family.WikimediaFamily):
"""Family class for Lingua Libre.
*New in version 6.5.*
"""
name = 'lingualibre'
langs = {
'lingualibre': 'lingualibre.org'
}
interwiki_forward = 'wikipedia'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
def interface(self, code):
"""Return 'DataSite'."""
return 'DataSite'
| """Family module for Lingua Libre."""
#
# (C) Pywikibot team, 2021
#
# Distributed under the terms of the MIT license.
#
from pywikibot import family
# The Lingua Libre family
class Family(family.WikimediaFamily):
"""Family class for Lingua Libre.
*New in version 6.5.*
"""
name = 'lingualibre'
langs = {
'lingualibre': 'lingualibre.org'
}
interwiki_forward = 'wikipedia'
def scriptpath(self, code):
"""Return the script path for this family."""
return ''
+
+ def interface(self, code):
+ """Return 'DataSite'."""
+ return 'DataSite' |
bc9488b6954c172d903521df9f00c7ff71243fff | tests.py | tests.py |
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
|
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| Fix F401 error (module imported but unused) | Fix F401 error (module imported but unused)
[ci skip] | Python | mit | le717/linescan.py |
from __future__ import print_function
- import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
| Fix F401 error (module imported but unused) | ## Code Before:
from __future__ import print_function
import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
## Instruction:
Fix F401 error (module imported but unused)
## Code After:
from __future__ import print_function
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse)
|
from __future__ import print_function
- import sys
import os
import linescan
testFile = os.path.join("test", "testfile.txt")
if __name__ == "__main__":
line = linescan.scan(testFile, 5, "utf_8")
print(line)
lines = linescan.scanlines(testFile, 8, 12, "cp1252")
print(lines)
thisshouldbefalse = linescan.scan(testFile, 55)
print(thisshouldbefalse) |
5fc699b89eae0c41923a813ac48281729c4d80b8 | orderable_inlines/inlines.py | orderable_inlines/inlines.py | from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
| from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
| Make this hack compatible with Django 1.9 | Make this hack compatible with Django 1.9
| Python | bsd-2-clause | frx0119/django-orderable-inlines,frx0119/django-orderable-inlines | from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
- if self.declared_fieldsets:
- return self.declared_fieldsets
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
| Make this hack compatible with Django 1.9 | ## Code Before:
from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
if self.declared_fieldsets:
return self.declared_fieldsets
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
## Instruction:
Make this hack compatible with Django 1.9
## Code After:
from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
| from django.contrib.admin import StackedInline, TabularInline
from django.template.defaultfilters import slugify
class OrderableInlineMixin(object):
class Media:
js = (
'js/jquery.browser.min.js',
'js/orderable-inline-jquery-ui.js',
'js/orderable-inline.js',
)
css = {
'all': [
'css/orderable-inline.css'
]
}
def get_fieldsets(self, request, obj=None):
- if self.declared_fieldsets:
- return self.declared_fieldsets
form = self.get_formset(request, obj, fields=None).form
fields = list(form.base_fields) + list(self.get_readonly_fields(request, obj))
return [
(None, {
'fields': fields,
'classes': self.fieldset_css_classes + ['orderable-field-%s' % self.orderable_field]
})
]
class OrderableStackedInline(OrderableInlineMixin, StackedInline):
fieldset_css_classes = ['orderable-stacked']
class OrderableTabularInline(OrderableInlineMixin, TabularInline):
fieldset_css_classes = ['orderable-tabular']
template = 'orderable_inlines/edit_inline/tabular.html'
|
6153952ca9794ccb1dd5d76696aa2d4881a665c1 | tests/core/migrations/0004_bookwithchapters.py | tests/core/migrations/0004_bookwithchapters.py | from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters',
django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), default=list,
size=None)),
],
),
]
| from __future__ import unicode_literals
from django import VERSION
from django.db import migrations, models
if VERSION >= (1, 8):
from django.contrib.postgres.fields import ArrayField
chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None)
else:
chapters_field = models.Field() # Dummy field
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters', chapters_field)
],
),
]
| Add version check for importing django.contrib.postgres.fields.ArrayField | Add version check for importing django.contrib.postgres.fields.ArrayField
| Python | bsd-2-clause | daniell/django-import-export,jnns/django-import-export,django-import-export/django-import-export,bmihelac/django-import-export,copperleaftech/django-import-export,brillgen/django-import-export,PetrDlouhy/django-import-export,daniell/django-import-export,daniell/django-import-export,PetrDlouhy/django-import-export,PetrDlouhy/django-import-export,jnns/django-import-export,brillgen/django-import-export,daniell/django-import-export,copperleaftech/django-import-export,jnns/django-import-export,brillgen/django-import-export,brillgen/django-import-export,copperleaftech/django-import-export,bmihelac/django-import-export,PetrDlouhy/django-import-export,django-import-export/django-import-export,django-import-export/django-import-export,bmihelac/django-import-export,bmihelac/django-import-export,jnns/django-import-export,django-import-export/django-import-export,copperleaftech/django-import-export | from __future__ import unicode_literals
- import django.contrib.postgres.fields
+ from django import VERSION
from django.db import migrations, models
+ if VERSION >= (1, 8):
+ from django.contrib.postgres.fields import ArrayField
+ chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None)
+ else:
+ chapters_field = models.Field() # Dummy field
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
- if schema_editor.connection.vendor.startswith("postgres"):
+ if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
- if schema_editor.connection.vendor.startswith("postgres"):
+ if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
- ('chapters',
+ ('chapters', chapters_field)
- django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), default=list,
- size=None)),
],
),
]
| Add version check for importing django.contrib.postgres.fields.ArrayField | ## Code Before:
from __future__ import unicode_literals
import django.contrib.postgres.fields
from django.db import migrations, models
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters',
django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), default=list,
size=None)),
],
),
]
## Instruction:
Add version check for importing django.contrib.postgres.fields.ArrayField
## Code After:
from __future__ import unicode_literals
from django import VERSION
from django.db import migrations, models
if VERSION >= (1, 8):
from django.contrib.postgres.fields import ArrayField
chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None)
else:
chapters_field = models.Field() # Dummy field
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
('chapters', chapters_field)
],
),
]
| from __future__ import unicode_literals
- import django.contrib.postgres.fields
+ from django import VERSION
from django.db import migrations, models
+ if VERSION >= (1, 8):
+ from django.contrib.postgres.fields import ArrayField
+ chapters_field = ArrayField(base_field=models.CharField(max_length=100), default=list, size=None)
+ else:
+ chapters_field = models.Field() # Dummy field
class PostgresOnlyCreateModel(migrations.CreateModel):
def database_forwards(self, app_label, schema_editor, from_state, to_state):
- if schema_editor.connection.vendor.startswith("postgres"):
+ if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
? ++++++++++++++++++++++
super(PostgresOnlyCreateModel, self).database_forwards(app_label, schema_editor, from_state, to_state)
def database_backwards(self, app_label, schema_editor, from_state, to_state):
- if schema_editor.connection.vendor.startswith("postgres"):
+ if VERSION >= (1, 8) and schema_editor.connection.vendor.startswith("postgres"):
? ++++++++++++++++++++++
super(PostgresOnlyCreateModel, self).database_backwards(app_label, schema_editor, from_state, to_state)
class Migration(migrations.Migration):
dependencies = [
('core', '0003_withfloatfield'),
]
operations = [
PostgresOnlyCreateModel(
name='BookWithChapters',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Book name')),
- ('chapters',
+ ('chapters', chapters_field)
? ++++++++++++++++
- django.contrib.postgres.fields.ArrayField(base_field=models.CharField(max_length=100), default=list,
- size=None)),
],
),
] |
fb8cfa8eb7d088ebe11075bff42bea54c97e9c18 | hermes/views.py | hermes/views.py | from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
| from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
slug = None
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
| Add slug variable to pass in the URL | Add slug variable to pass in the URL | Python | mit | emilian/django-hermes | from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
+ slug = None
+
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
| Add slug variable to pass in the URL | ## Code Before:
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
## Instruction:
Add slug variable to pass in the URL
## Code After:
from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
slug = None
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html"
| from django.views.generic import ListView, DetailView
from .models import Post
class PostListView(ListView):
context_object_name = 'posts'
model = Post
template_name = 'hermes/post_list.html'
def get_queryset(self):
return self.model.objects.order_by('created_on')
class CategoryPostListView(PostListView):
+ slug = None
+
def get_queryset(self):
category_slug = self.kwargs.get('slug', '')
return self.model.objects.in_category(category_slug)
class ArchivePostListView(PostListView):
def get_queryset(self):
year = self.kwargs.get('year', None)
month = self.kwargs.get('month', None)
day = self.kwargs.get('day', None)
return self.model.objects.created_on(year=year, month=month, day=day)
class PostDetail(DetailView):
context_object_name = 'post'
model = Post
template_name = "hermes/post_detail.html" |
a8a088828eee6938c56ce6f2aaecc7e776a8cb23 | swift/obj/dedupe/fp_index.py | swift/obj/dedupe/fp_index.py | __author__ = 'mjwtom'
import sqlite3
import unittest
class fp_index:
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
| __author__ = 'mjwtom'
import sqlite3
import unittest
class Fp_Index(object):
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
'''
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
'''
| Use database to detect the duplication. But the md5 value does not match. Need to add some code here | Use database to detect the duplication. But the md5 value does not match. Need to add some code here
| Python | apache-2.0 | mjwtom/swift,mjwtom/swift | __author__ = 'mjwtom'
import sqlite3
import unittest
- class fp_index:
+ class Fp_Index(object):
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
-
+ '''
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
+ '''
| Use database to detect the duplication. But the md5 value does not match. Need to add some code here | ## Code Before:
__author__ = 'mjwtom'
import sqlite3
import unittest
class fp_index:
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
## Instruction:
Use database to detect the duplication. But the md5 value does not match. Need to add some code here
## Code After:
__author__ = 'mjwtom'
import sqlite3
import unittest
class Fp_Index(object):
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
'''
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
'''
| __author__ = 'mjwtom'
import sqlite3
import unittest
- class fp_index:
+ class Fp_Index(object):
def __init__(self, name):
if name.endswith('.db'):
self.name = name
else:
self.name = name + '.db'
self.conn = sqlite3.connect(name)
self.c = self.conn.cursor()
self.c.execute('''CREATE TABLE IF NOT EXISTS fp_index (key text, value text)''')
def insert(self, key, value):
data = (key, value)
self.c.execute('INSERT INTO fp_index VALUES (?, ?)', data)
self.conn.commit()
def lookup(self, key):
data = (key,)
self.c.execute('SELECT value FROM fp_index WHERE key=?', data)
return self.c.fetchone()
-
+ '''
def testinsert():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
fp.insert(str, str)
def testselect():
fp = fp_index('/home/mjwtom/mydb.db')
for i in range(0, 100):
str = i.__str__()
c = fp.lookup(str)
for row in c:
print row
if __name__ == '__main__':
unittest.main()
+ ''' |
4a032ece106d4b3b3764420197453afd33475bf6 | donut/modules/permissions/helpers.py | donut/modules/permissions/helpers.py | import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
| import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
| Fix failing test and make lint | Fix failing test and make lint
| Python | mit | ASCIT/donut-python,ASCIT/donut,ASCIT/donut,ASCIT/donut-python,ASCIT/donut | import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
+ holders = [row['user_id'] for row in holders]
- if {'user_id': user_id} in holders:
+ if user_id in holders:
return True
return False
| Fix failing test and make lint | ## Code Before:
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
if {'user_id': user_id} in holders:
return True
return False
## Instruction:
Fix failing test and make lint
## Code After:
import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
holders = [row['user_id'] for row in holders]
if user_id in holders:
return True
return False
| import flask
from donut.modules.groups import helpers as groups
def has_permission(user_id, permission_id):
'''
Returns True if [user_id] holds a position that directly
or indirectly (through a position relation) grants
them [permission_id]. Otherwise returns False.
'''
if not (isinstance(user_id, int) and isinstance(permission_id, int)):
return False
# get all position id's with this permission
query = '''SELECT pos_id FROM position_permissions WHERE permission_id = %s'''
with flask.g.pymysql_db.cursor() as cursor:
cursor.execute(query, permission_id)
result = cursor.fetchall()
pos_ids = [row['pos_id'] for row in result]
for pos_id in pos_ids:
holders = groups.get_position_holders(pos_id)
+ holders = [row['user_id'] for row in holders]
- if {'user_id': user_id} in holders:
? -- -----------
+ if user_id in holders:
return True
return False |
7206d68648c91790ac4fa14a3074c77c97c01636 | mopidy/backends/base/__init__.py | mopidy/backends/base/__init__.py | import logging
from .current_playlist import CurrentPlaylistController
from .library import LibraryController, BaseLibraryProvider
from .playback import PlaybackController, BasePlaybackProvider
from .stored_playlists import (StoredPlaylistsController,
BaseStoredPlaylistsProvider)
logger = logging.getLogger('mopidy.backends.base')
class Backend(object):
#: The current playlist controller. An instance of
#: :class:`mopidy.backends.base.CurrentPlaylistController`.
current_playlist = None
#: The library controller. An instance of
# :class:`mopidy.backends.base.LibraryController`.
library = None
#: The sound mixer. An instance of :class:`mopidy.mixers.BaseMixer`.
mixer = None
#: The playback controller. An instance of
#: :class:`mopidy.backends.base.PlaybackController`.
playback = None
#: The stored playlists controller. An instance of
#: :class:`mopidy.backends.base.StoredPlaylistsController`.
stored_playlists = None
#: List of URI prefixes this backend can handle.
uri_handlers = []
| import logging
from .current_playlist import CurrentPlaylistController
from .library import LibraryController, BaseLibraryProvider
from .playback import PlaybackController, BasePlaybackProvider
from .stored_playlists import (StoredPlaylistsController,
BaseStoredPlaylistsProvider)
logger = logging.getLogger('mopidy.backends.base')
class Backend(object):
#: The current playlist controller. An instance of
#: :class:`mopidy.backends.base.CurrentPlaylistController`.
current_playlist = None
#: The library controller. An instance of
# :class:`mopidy.backends.base.LibraryController`.
library = None
#: The playback controller. An instance of
#: :class:`mopidy.backends.base.PlaybackController`.
playback = None
#: The stored playlists controller. An instance of
#: :class:`mopidy.backends.base.StoredPlaylistsController`.
stored_playlists = None
#: List of URI prefixes this backend can handle.
uri_handlers = []
| Remove mixer from the Backend API as it is independent | Remove mixer from the Backend API as it is independent
| Python | apache-2.0 | adamcik/mopidy,vrs01/mopidy,pacificIT/mopidy,jmarsik/mopidy,jcass77/mopidy,glogiotatidis/mopidy,kingosticks/mopidy,ZenithDK/mopidy,rawdlite/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,tkem/mopidy,kingosticks/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,bencevans/mopidy,diandiankan/mopidy,quartz55/mopidy,glogiotatidis/mopidy,quartz55/mopidy,priestd09/mopidy,pacificIT/mopidy,SuperStarPL/mopidy,bacontext/mopidy,rawdlite/mopidy,mopidy/mopidy,bencevans/mopidy,pacificIT/mopidy,jodal/mopidy,diandiankan/mopidy,mopidy/mopidy,abarisain/mopidy,tkem/mopidy,SuperStarPL/mopidy,abarisain/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,bacontext/mopidy,adamcik/mopidy,swak/mopidy,ZenithDK/mopidy,quartz55/mopidy,hkariti/mopidy,vrs01/mopidy,ali/mopidy,vrs01/mopidy,woutervanwijk/mopidy,ali/mopidy,jodal/mopidy,dbrgn/mopidy,jmarsik/mopidy,jcass77/mopidy,ali/mopidy,jcass77/mopidy,liamw9534/mopidy,pacificIT/mopidy,hkariti/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,swak/mopidy,adamcik/mopidy,priestd09/mopidy,dbrgn/mopidy,mokieyue/mopidy,kingosticks/mopidy,tkem/mopidy,liamw9534/mopidy,rawdlite/mopidy,quartz55/mopidy,priestd09/mopidy,vrs01/mopidy,ali/mopidy,mokieyue/mopidy,bencevans/mopidy,bencevans/mopidy,mokieyue/mopidy,diandiankan/mopidy,bacontext/mopidy,jodal/mopidy,mopidy/mopidy,hkariti/mopidy,dbrgn/mopidy,ZenithDK/mopidy,tkem/mopidy,swak/mopidy,bacontext/mopidy,swak/mopidy,mokieyue/mopidy,rawdlite/mopidy,diandiankan/mopidy,hkariti/mopidy,SuperStarPL/mopidy | import logging
from .current_playlist import CurrentPlaylistController
from .library import LibraryController, BaseLibraryProvider
from .playback import PlaybackController, BasePlaybackProvider
from .stored_playlists import (StoredPlaylistsController,
BaseStoredPlaylistsProvider)
logger = logging.getLogger('mopidy.backends.base')
class Backend(object):
#: The current playlist controller. An instance of
#: :class:`mopidy.backends.base.CurrentPlaylistController`.
current_playlist = None
#: The library controller. An instance of
# :class:`mopidy.backends.base.LibraryController`.
library = None
- #: The sound mixer. An instance of :class:`mopidy.mixers.BaseMixer`.
- mixer = None
-
#: The playback controller. An instance of
#: :class:`mopidy.backends.base.PlaybackController`.
playback = None
#: The stored playlists controller. An instance of
#: :class:`mopidy.backends.base.StoredPlaylistsController`.
stored_playlists = None
#: List of URI prefixes this backend can handle.
uri_handlers = []
| Remove mixer from the Backend API as it is independent | ## Code Before:
import logging
from .current_playlist import CurrentPlaylistController
from .library import LibraryController, BaseLibraryProvider
from .playback import PlaybackController, BasePlaybackProvider
from .stored_playlists import (StoredPlaylistsController,
BaseStoredPlaylistsProvider)
logger = logging.getLogger('mopidy.backends.base')
class Backend(object):
#: The current playlist controller. An instance of
#: :class:`mopidy.backends.base.CurrentPlaylistController`.
current_playlist = None
#: The library controller. An instance of
# :class:`mopidy.backends.base.LibraryController`.
library = None
#: The sound mixer. An instance of :class:`mopidy.mixers.BaseMixer`.
mixer = None
#: The playback controller. An instance of
#: :class:`mopidy.backends.base.PlaybackController`.
playback = None
#: The stored playlists controller. An instance of
#: :class:`mopidy.backends.base.StoredPlaylistsController`.
stored_playlists = None
#: List of URI prefixes this backend can handle.
uri_handlers = []
## Instruction:
Remove mixer from the Backend API as it is independent
## Code After:
import logging
from .current_playlist import CurrentPlaylistController
from .library import LibraryController, BaseLibraryProvider
from .playback import PlaybackController, BasePlaybackProvider
from .stored_playlists import (StoredPlaylistsController,
BaseStoredPlaylistsProvider)
logger = logging.getLogger('mopidy.backends.base')
class Backend(object):
#: The current playlist controller. An instance of
#: :class:`mopidy.backends.base.CurrentPlaylistController`.
current_playlist = None
#: The library controller. An instance of
# :class:`mopidy.backends.base.LibraryController`.
library = None
#: The playback controller. An instance of
#: :class:`mopidy.backends.base.PlaybackController`.
playback = None
#: The stored playlists controller. An instance of
#: :class:`mopidy.backends.base.StoredPlaylistsController`.
stored_playlists = None
#: List of URI prefixes this backend can handle.
uri_handlers = []
| import logging
from .current_playlist import CurrentPlaylistController
from .library import LibraryController, BaseLibraryProvider
from .playback import PlaybackController, BasePlaybackProvider
from .stored_playlists import (StoredPlaylistsController,
BaseStoredPlaylistsProvider)
logger = logging.getLogger('mopidy.backends.base')
class Backend(object):
#: The current playlist controller. An instance of
#: :class:`mopidy.backends.base.CurrentPlaylistController`.
current_playlist = None
#: The library controller. An instance of
# :class:`mopidy.backends.base.LibraryController`.
library = None
- #: The sound mixer. An instance of :class:`mopidy.mixers.BaseMixer`.
- mixer = None
-
#: The playback controller. An instance of
#: :class:`mopidy.backends.base.PlaybackController`.
playback = None
#: The stored playlists controller. An instance of
#: :class:`mopidy.backends.base.StoredPlaylistsController`.
stored_playlists = None
#: List of URI prefixes this backend can handle.
uri_handlers = [] |
7ea131b0c906c8da66f050e5833ded02f8acb495 | user_messages/managers.py | user_messages/managers.py | from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
| from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
| Send a signal on each message that is sent to allow for external customization. | Send a signal on each message that is sent to allow for external customization.
| Python | mit | arthur-wsw/pinax-messages,eldarion/user_messages,eldarion/user_messages,pinax/pinax-messages,pinax/pinax-messages,arthur-wsw/pinax-messages | from django.db.models import Manager
+
+ from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
+ message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
- return self.create(thread=thread, sender=from_user, content=content)
+ msg = self.create(thread=thread, sender=from_user, content=content)
+ message_sent.send(sender=self.model, message=msg, thread=thread)
+ return msg
| Send a signal on each message that is sent to allow for external customization. | ## Code Before:
from django.db.models import Manager
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
return self.create(thread=thread, sender=from_user, content=content)
## Instruction:
Send a signal on each message that is sent to allow for external customization.
## Code After:
from django.db.models import Manager
from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
msg = self.create(thread=thread, sender=from_user, content=content)
message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
| from django.db.models import Manager
+
+ from user_messages.signals import message_sent
class ThreadManager(Manager):
def inbox(self, user):
return self.filter(userthread__user=user, userthread__deleted=False)
def unread(self, user):
return self.filter(userthread__user=user, userthread__deleted=False, userthread__unread=True)
class MessageManager(Manager):
def new_reply(self, thread, user, content):
msg = self.create(thread=thread, sender=user, content=content)
thread.userthread_set.exclude(user=user).update(deleted=False, unread=True)
+ message_sent.send(sender=self.model, message=msg, thread=thread)
return msg
def new_message(self, from_user, to_users, subject, content):
from user_messages.models import Thread
thread = Thread.objects.create(subject=subject)
for user in to_users:
thread.userthread_set.create(user=user, deleted=False, unread=True)
thread.userthread_set.create(user=from_user, deleted=True, unread=False)
- return self.create(thread=thread, sender=from_user, content=content)
? ^^^^^^
+ msg = self.create(thread=thread, sender=from_user, content=content)
? ^^^^^
+ message_sent.send(sender=self.model, message=msg, thread=thread)
+ return msg |
7b90d75f260e76baf8b57840d96bb36b62e2c56c | __init__.py | __init__.py |
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main() |
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
| Update script with proper git-ing. | Update script with proper git-ing.
| Python | mit | tabatkins/bikeshed-data |
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
- #bikeshed.update.update(path=dataPath)
+ bikeshed.update.update(path=dataPath)
- #bikeshed.update.createManifest(path=dataPath)
+ bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
- print subprocess.check_output("git add .", shell=True)
+ subprocess.check_call("git add data", shell=True)
+ subprocess.check_call("git commit -m 'update data'", shell=True)
- print subprocess.check_output("git push", shell=True)
+ subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
- main()
+ main()
+ | Update script with proper git-ing. | ## Code Before:
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main()
## Instruction:
Update script with proper git-ing.
## Code After:
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
|
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
- #bikeshed.update.update(path=dataPath)
? -
+ bikeshed.update.update(path=dataPath)
- #bikeshed.update.createManifest(path=dataPath)
? -
+ bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
- print subprocess.check_output("git add .", shell=True)
? ------ ^^^^^^ ^
+ subprocess.check_call("git add data", shell=True)
? ^^^^ ^^^^
+ subprocess.check_call("git commit -m 'update data'", shell=True)
- print subprocess.check_output("git push", shell=True)
? ------ ^^^^^^
+ subprocess.check_call("git push", shell=True)
? ^^^^
if __name__ == "__main__":
- main()
+ main() |
e5ed3e877e24d943096fa5e48c1f8c9bc30c3160 | flask_annex/__init__.py | flask_annex/__init__.py | from .base import AnnexBase
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
| from .base import AnnexBase
from . import utils
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
@classmethod
def from_env(cls, namespace):
storage = utils.get_config_from_env(namespace)['storage']
# Use storage-specific env namespace when configuring a generic annex,
# to avoid having unrecognized extra keys when changing storage.
storage_namespace = '{}_{}'.format(namespace, storage.upper())
storage_config = utils.get_config_from_env(storage_namespace)
return cls(storage, **storage_config)
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
| Use storage sub-namespace for generic annex | Use storage sub-namespace for generic annex
| Python | mit | 4Catalyzer/flask-annex,taion/flask-annex | from .base import AnnexBase
+ from . import utils
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
+ @classmethod
+ def from_env(cls, namespace):
+ storage = utils.get_config_from_env(namespace)['storage']
+
+ # Use storage-specific env namespace when configuring a generic annex,
+ # to avoid having unrecognized extra keys when changing storage.
+ storage_namespace = '{}_{}'.format(namespace, storage.upper())
+ storage_config = utils.get_config_from_env(storage_namespace)
+
+ return cls(storage, **storage_config)
+
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
| Use storage sub-namespace for generic annex | ## Code Before:
from .base import AnnexBase
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
## Instruction:
Use storage sub-namespace for generic annex
## Code After:
from .base import AnnexBase
from . import utils
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
@classmethod
def from_env(cls, namespace):
storage = utils.get_config_from_env(namespace)['storage']
# Use storage-specific env namespace when configuring a generic annex,
# to avoid having unrecognized extra keys when changing storage.
storage_namespace = '{}_{}'.format(namespace, storage.upper())
storage_config = utils.get_config_from_env(storage_namespace)
return cls(storage, **storage_config)
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options)
| from .base import AnnexBase
+ from . import utils
__all__ = ('Annex',)
# -----------------------------------------------------------------------------
def get_annex_class(storage):
if storage == 'file':
from .file import FileAnnex
return FileAnnex
else:
raise ValueError("unsupported storage {}".format(storage))
# -----------------------------------------------------------------------------
class Annex(AnnexBase):
def __init__(self, storage, **kwargs):
annex_class = get_annex_class(storage)
# Proxy the actual implementation to prevent use of storage-specific
# attributes when using the generic annex.
self._impl = annex_class(**kwargs)
+ @classmethod
+ def from_env(cls, namespace):
+ storage = utils.get_config_from_env(namespace)['storage']
+
+ # Use storage-specific env namespace when configuring a generic annex,
+ # to avoid having unrecognized extra keys when changing storage.
+ storage_namespace = '{}_{}'.format(namespace, storage.upper())
+ storage_config = utils.get_config_from_env(storage_namespace)
+
+ return cls(storage, **storage_config)
+
def save_file(self, key, filename):
return self._impl.save_file(key, filename)
def send_file(self, key, **options):
return self._impl.send_file(key, **options) |
60173acbecf1239872411b2ca0dd9eb75b543843 | tests/sentry/web/frontend/test_organization_stats.py | tests/sentry/web/frontend/test_organization_stats.py | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_cannot_load(self):
self.assert_org_member_cannot_access(self.path)
def test_org_admin_can_load(self):
self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| Correct permission tests for organization stats | Correct permission tests for organization stats
| Python | bsd-3-clause | looker/sentry,alexm92/sentry,gg7/sentry,zenefits/sentry,vperron/sentry,ifduyue/sentry,imankulov/sentry,JamesMura/sentry,daevaorn/sentry,mitsuhiko/sentry,JackDanger/sentry,ewdurbin/sentry,BuildingLink/sentry,daevaorn/sentry,kevinlondon/sentry,songyi199111/sentry,TedaLIEz/sentry,kevinlondon/sentry,wujuguang/sentry,mitsuhiko/sentry,argonemyth/sentry,hongliang5623/sentry,ifduyue/sentry,kevinlondon/sentry,JamesMura/sentry,nicholasserra/sentry,boneyao/sentry,ewdurbin/sentry,TedaLIEz/sentry,vperron/sentry,looker/sentry,ifduyue/sentry,1tush/sentry,BuildingLink/sentry,Kryz/sentry,kevinastone/sentry,gencer/sentry,mvaled/sentry,looker/sentry,boneyao/sentry,jean/sentry,JTCunning/sentry,wong2/sentry,songyi199111/sentry,ngonzalvez/sentry,imankulov/sentry,felixbuenemann/sentry,TedaLIEz/sentry,jean/sentry,JackDanger/sentry,jean/sentry,jean/sentry,ngonzalvez/sentry,BuildingLink/sentry,zenefits/sentry,ewdurbin/sentry,daevaorn/sentry,fuziontech/sentry,JackDanger/sentry,argonemyth/sentry,hongliang5623/sentry,mvaled/sentry,JTCunning/sentry,nicholasserra/sentry,ifduyue/sentry,fotinakis/sentry,korealerts1/sentry,boneyao/sentry,kevinastone/sentry,Natim/sentry,beeftornado/sentry,drcapulet/sentry,gg7/sentry,gencer/sentry,llonchj/sentry,Kryz/sentry,drcapulet/sentry,llonchj/sentry,BayanGroup/sentry,korealerts1/sentry,fotinakis/sentry,vperron/sentry,BayanGroup/sentry,fuziontech/sentry,looker/sentry,drcapulet/sentry,felixbuenemann/sentry,fotinakis/sentry,wong2/sentry,zenefits/sentry,beeftornado/sentry,mvaled/sentry,Natim/sentry,beeftornado/sentry,Kryz/sentry,imankulov/sentry,pauloschilling/sentry,BuildingLink/sentry,gencer/sentry,mvaled/sentry,fuziontech/sentry,alexm92/sentry,Natim/sentry,1tush/sentry,kevinastone/sentry,korealerts1/sentry,JamesMura/sentry,BuildingLink/sentry,ngonzalvez/sentry,pauloschilling/sentry,songyi199111/sentry,wong2/sentry,JamesMura/sentry,zenefits/sentry,mvaled/sentry,wujuguang/sentry,fotinakis/sentry,gencer/sentry,hongliang5623/sentry,gencer/sentry,daevaorn/sentry,pauloschilling/sentry,nicholasserra/sentry,BayanGroup/sentry,jean/sentry,wujuguang/sentry,1tush/sentry,mvaled/sentry,llonchj/sentry,alexm92/sentry,JamesMura/sentry,argonemyth/sentry,zenefits/sentry,gg7/sentry,ifduyue/sentry,felixbuenemann/sentry,looker/sentry,JTCunning/sentry | from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
- def test_org_member_cannot_load(self):
+ def test_org_member_can_load(self):
- self.assert_org_member_cannot_access(self.path)
+ self.assert_org_member_can_access(self.path)
-
- def test_org_admin_can_load(self):
- self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| Correct permission tests for organization stats | ## Code Before:
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_cannot_load(self):
self.assert_org_member_cannot_access(self.path)
def test_org_admin_can_load(self):
self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
## Instruction:
Correct permission tests for organization stats
## Code After:
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
def test_org_member_can_load(self):
self.assert_org_member_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization
| from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.testutils import TestCase, PermissionTestCase
class OrganizationStatsPermissionTest(PermissionTestCase):
def setUp(self):
super(OrganizationStatsPermissionTest, self).setUp()
self.path = reverse('sentry-organization-stats', args=[self.organization.slug])
def test_teamless_admin_cannot_load(self):
self.assert_teamless_admin_cannot_access(self.path)
- def test_org_member_cannot_load(self):
? ---
+ def test_org_member_can_load(self):
- self.assert_org_member_cannot_access(self.path)
? ---
+ self.assert_org_member_can_access(self.path)
-
- def test_org_admin_can_load(self):
- self.assert_org_admin_can_access(self.path)
class OrganizationStatsTest(TestCase):
def test_renders_with_context(self):
organization = self.create_organization(name='foo', owner=self.user)
team_1 = self.create_team(name='foo', organization=organization)
team_2 = self.create_team(name='bar', organization=organization)
path = reverse('sentry-organization-stats', args=[organization.slug])
self.login_as(self.user)
resp = self.client.get(path)
assert resp.status_code == 200
self.assertTemplateUsed(resp, 'sentry/organization-stats.html')
assert resp.context['organization'] == organization |
56d3db6aae71c88ff8b55bb1d173abc025be7e8c | jacquard/tests/test_cli.py | jacquard/tests/test_cli.py | import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
| import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
| Add test of a write command | Add test of a write command
| Python | mit | prophile/jacquard,prophile/jacquard | import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
+
+ def test_run_write_command():
+ config = unittest.mock.Mock()
+ config.storage = DummyStore('', data={})
+
+ output = io.StringIO()
+ with contextlib.redirect_stdout(output):
+ main(['set-default', 'foo', '"bar"'], config=config)
+
+ assert output.getvalue() == ''
+
+ assert config.storage.data == {'defaults': '{"foo": "bar"}'}
+ | Add test of a write command | ## Code Before:
import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
## Instruction:
Add test of a write command
## Code After:
import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
def test_run_write_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['set-default', 'foo', '"bar"'], config=config)
assert output.getvalue() == ''
assert config.storage.data == {'defaults': '{"foo": "bar"}'}
| import io
import unittest.mock
import contextlib
import textwrap
from jacquard.cli import main
from jacquard.storage.dummy import DummyStore
def test_smoke_cli_help():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['--help'])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_help_message_when_given_no_subcommand():
try:
output = io.StringIO()
with contextlib.redirect_stdout(output):
main([])
except SystemExit:
pass
assert output.getvalue().startswith("usage: ")
def test_run_basic_command():
config = unittest.mock.Mock()
config.storage = DummyStore('', data={
'foo': 'bar',
})
output = io.StringIO()
with contextlib.redirect_stdout(output):
main(['storage-dump'], config=config)
assert output.getvalue().strip() == textwrap.dedent("""
foo
===
'bar'
"""
).strip()
+
+
+ def test_run_write_command():
+ config = unittest.mock.Mock()
+ config.storage = DummyStore('', data={})
+
+ output = io.StringIO()
+ with contextlib.redirect_stdout(output):
+ main(['set-default', 'foo', '"bar"'], config=config)
+
+ assert output.getvalue() == ''
+
+ assert config.storage.data == {'defaults': '{"foo": "bar"}'} |
cdae77dee9888d6d6094566747650bf80d631f03 | station.py | station.py | """Creates the station class"""
#import ask_user from ask_user
#import int_check from int_check
#import reasonable_check from reasonable_check
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
ask_user(prompt, lower_range, upper_range): function to get input, maybe it should live
somewhere else?
"""
def __init__(self, capacity, escalators, train_wait, travelors_arriving, travelors_departing):
self.capacity = user.says("Enter the max capacity of the station between" lower "and" upper)
self.escalators = user.says("Enter the number of escalators in the station between" lower "and" upper)
self.train_wait = user.says("Enter the wait time between trains in seconds between" lower "and" upper)
self.travelors_arriving = user.says("How many people just exited the train? between" lower "and" upper)
self.travelors_departing = user.says("How many people are waiting for the train? between" lower "and" upper)
| """Creates the station class"""
#import request_integer_in_range from request_integer_in_range
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
request_integer_in_range : requests an integer in a range
"""
def __init__(self, capacity, escalators, train_wait, travelors_arriving, travelors_departing):
self.capacity = request_integer_in_range("Enter the station capacity between 10 and 10000: ", 10, 10000)
self.escalators = request_integer_in_range("Enter an odd number of escalators between 1 and 7: ", 1, 7)
self.train_wait = request_integer_in_range("Enter the wait time between trains in seconds between 60 and 1800 ", 60, 1800)
self.travelors_arriving = request_integer_in_range("Enter the number of people exiting the train between 1 and 500: ", 1, 500)
self.travelors_departing = request_integer_in_range("Enter the number of people waiting for the train between 1 and 500: ", 1, 500)
| Integrate integer test function into instantiation | Integrate integer test function into instantiation
Ref #23 | Python | mit | ForestPride/rail-problem | """Creates the station class"""
+ #import request_integer_in_range from request_integer_in_range
+
- #import ask_user from ask_user
- #import int_check from int_check
- #import reasonable_check from reasonable_check
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
+ request_integer_in_range : requests an integer in a range
+
- total_station_pop: calculates total station population
- ask_user(prompt, lower_range, upper_range): function to get input, maybe it should live
- somewhere else?
"""
def __init__(self, capacity, escalators, train_wait, travelors_arriving, travelors_departing):
- self.capacity = user.says("Enter the max capacity of the station between" lower "and" upper)
- self.escalators = user.says("Enter the number of escalators in the station between" lower "and" upper)
+ self.capacity = request_integer_in_range("Enter the station capacity between 10 and 10000: ", 10, 10000)
+ self.escalators = request_integer_in_range("Enter an odd number of escalators between 1 and 7: ", 1, 7)
- self.train_wait = user.says("Enter the wait time between trains in seconds between" lower "and" upper)
+ self.train_wait = request_integer_in_range("Enter the wait time between trains in seconds between 60 and 1800 ", 60, 1800)
- self.travelors_arriving = user.says("How many people just exited the train? between" lower "and" upper)
- self.travelors_departing = user.says("How many people are waiting for the train? between" lower "and" upper)
+ self.travelors_arriving = request_integer_in_range("Enter the number of people exiting the train between 1 and 500: ", 1, 500)
+ self.travelors_departing = request_integer_in_range("Enter the number of people waiting for the train between 1 and 500: ", 1, 500)
| Integrate integer test function into instantiation | ## Code Before:
"""Creates the station class"""
#import ask_user from ask_user
#import int_check from int_check
#import reasonable_check from reasonable_check
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
ask_user(prompt, lower_range, upper_range): function to get input, maybe it should live
somewhere else?
"""
def __init__(self, capacity, escalators, train_wait, travelors_arriving, travelors_departing):
self.capacity = user.says("Enter the max capacity of the station between" lower "and" upper)
self.escalators = user.says("Enter the number of escalators in the station between" lower "and" upper)
self.train_wait = user.says("Enter the wait time between trains in seconds between" lower "and" upper)
self.travelors_arriving = user.says("How many people just exited the train? between" lower "and" upper)
self.travelors_departing = user.says("How many people are waiting for the train? between" lower "and" upper)
## Instruction:
Integrate integer test function into instantiation
## Code After:
"""Creates the station class"""
#import request_integer_in_range from request_integer_in_range
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
request_integer_in_range : requests an integer in a range
"""
def __init__(self, capacity, escalators, train_wait, travelors_arriving, travelors_departing):
self.capacity = request_integer_in_range("Enter the station capacity between 10 and 10000: ", 10, 10000)
self.escalators = request_integer_in_range("Enter an odd number of escalators between 1 and 7: ", 1, 7)
self.train_wait = request_integer_in_range("Enter the wait time between trains in seconds between 60 and 1800 ", 60, 1800)
self.travelors_arriving = request_integer_in_range("Enter the number of people exiting the train between 1 and 500: ", 1, 500)
self.travelors_departing = request_integer_in_range("Enter the number of people waiting for the train between 1 and 500: ", 1, 500)
| """Creates the station class"""
+ #import request_integer_in_range from request_integer_in_range
+
- #import ask_user from ask_user
- #import int_check from int_check
- #import reasonable_check from reasonable_check
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
+ request_integer_in_range : requests an integer in a range
+
- total_station_pop: calculates total station population
- ask_user(prompt, lower_range, upper_range): function to get input, maybe it should live
- somewhere else?
"""
def __init__(self, capacity, escalators, train_wait, travelors_arriving, travelors_departing):
- self.capacity = user.says("Enter the max capacity of the station between" lower "and" upper)
- self.escalators = user.says("Enter the number of escalators in the station between" lower "and" upper)
+ self.capacity = request_integer_in_range("Enter the station capacity between 10 and 10000: ", 10, 10000)
+ self.escalators = request_integer_in_range("Enter an odd number of escalators between 1 and 7: ", 1, 7)
- self.train_wait = user.says("Enter the wait time between trains in seconds between" lower "and" upper)
? ^^ ^^ - ^^^^^ - ^^^^^
+ self.train_wait = request_integer_in_range("Enter the wait time between trains in seconds between 60 and 1800 ", 60, 1800)
? +++ + +++++++ ^^^^^ ^^^ ^^ ++++++ + ^^^^^^^^
- self.travelors_arriving = user.says("How many people just exited the train? between" lower "and" upper)
- self.travelors_departing = user.says("How many people are waiting for the train? between" lower "and" upper)
+ self.travelors_arriving = request_integer_in_range("Enter the number of people exiting the train between 1 and 500: ", 1, 500)
+ self.travelors_departing = request_integer_in_range("Enter the number of people waiting for the train between 1 and 500: ", 1, 500) |
0ebac1925b3d4b32188a6f2c9e40760b21d933ce | backend/uclapi/dashboard/app_helpers.py | backend/uclapi/dashboard/app_helpers.py | from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
| from binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret | Add helpers to the dashboard code to generate OAuth keys | Add helpers to the dashboard code to generate OAuth keys
| Python | mit | uclapi/uclapi,uclapi/uclapi,uclapi/uclapi,uclapi/uclapi | from binascii import hexlify
+ from random import choice
+
import os
+ import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
+ def generate_app_client_id():
+ client_id = ''.join(random.choice(string.digits, k=16))
+ client_id += "."
+ client_id += ''.join(random.choice(string.digits, k=16))
+
+ return client_id
+
+ def generate_app_client_secret():
+ client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
+
+ return client_secret | Add helpers to the dashboard code to generate OAuth keys | ## Code Before:
from binascii import hexlify
import os
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
## Instruction:
Add helpers to the dashboard code to generate OAuth keys
## Code After:
from binascii import hexlify
from random import choice
import os
import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
def generate_app_client_id():
client_id = ''.join(random.choice(string.digits, k=16))
client_id += "."
client_id += ''.join(random.choice(string.digits, k=16))
return client_id
def generate_app_client_secret():
client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
return client_secret | from binascii import hexlify
+ from random import choice
+
import os
+ import string
def generate_api_token():
key = hexlify(os.urandom(30)).decode()
dashes_key = ""
for idx, char in enumerate(key):
if idx % 15 == 0 and idx != len(key)-1:
dashes_key += "-"
else:
dashes_key += char
final = "uclapi" + dashes_key
return final
def generate_app_id():
key = hexlify(os.urandom(5)).decode()
final = "A" + key
return final
+
+ def generate_app_client_id():
+ client_id = ''.join(random.choice(string.digits, k=16))
+ client_id += "."
+ client_id += ''.join(random.choice(string.digits, k=16))
+
+ return client_id
+
+ def generate_app_client_secret():
+ client_secret = ''.join(random.choice(string.ascii_lowercase + string.digits, k=64))
+
+ return client_secret |
5054e882194adae4b76681e78c45d41ae2c2f0f7 | pymatgen/util/sequence.py | pymatgen/util/sequence.py |
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
|
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total, **kwargs):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
| Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`) | Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`)
| Python | mit | gVallverdu/pymatgen,vorwerkc/pymatgen,vorwerkc/pymatgen,davidwaroquiers/pymatgen,gVallverdu/pymatgen,gVallverdu/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,gVallverdu/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,davidwaroquiers/pymatgen,davidwaroquiers/pymatgen,fraricci/pymatgen,vorwerkc/pymatgen,fraricci/pymatgen |
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
- def __init__(self, total):
+ def __init__(self, total, **kwargs):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
| Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`) | ## Code Before:
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
## Instruction:
Allow `PBar` to accept any kwargs (e.g. those used by `tqdm`)
## Code After:
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
def __init__(self, total, **kwargs):
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe
|
import math
def get_chunks(sequence, size=1):
"""
Args:
sequence ():
size ():
Returns:
"""
chunks = int(math.ceil(len(sequence) / float(size)))
return [sequence[i * size : (i + 1) * size] for i in range(chunks)]
class PBarSafe:
"""
Progress bar.
"""
- def __init__(self, total):
+ def __init__(self, total, **kwargs):
? ++++++++++
"""
Args:
total (): Total value.
"""
self.total = total
self.done = 0
self.report()
def update(self, amount):
"""
Update progress bar by amount.
Args:
amount (float):
"""
self.done += amount
self.report()
def report(self):
"""
Print progress.
"""
print("{} of {} done {:.1%}".format(self.done, self.total, self.done / self.total))
try:
# noinspection PyUnresolvedReferences
if get_ipython().__class__.__name__ == "ZMQInteractiveShell": # type: ignore
from tqdm import tqdm_notebook as PBar
else: # likely 'TerminalInteractiveShell'
from tqdm import tqdm as PBar
except NameError:
try:
from tqdm import tqdm as PBar
except ImportError:
PBar = PBarSafe
except ImportError:
PBar = PBarSafe |
cddc9b20855147541859976229e1dc34a611de26 | twitterfunctions.py | twitterfunctions.py |
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
|
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
| Change the api.update_status() call to explicitly state the 'status' message. | Change the api.update_status() call to explicitly state the 'status' message.
- A recent version of Tweepy required it to be explicit, no harm in always being so
| Python | agpl-3.0 | pattonwebz/ScheduledTweetBot |
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
-
+
- # Authorize with consumer credentials and get an access token
+ # Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
-
+
# get an authenticated instance of the API class
api = tweepy.API(auth)
-
+
# return API object 'api'
return api
-
+
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
- api.update_status(tweet)
+ api.update_status(status=tweet)
- | Change the api.update_status() call to explicitly state the 'status' message. | ## Code Before:
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(tweet)
## Instruction:
Change the api.update_status() call to explicitly state the 'status' message.
## Code After:
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
# Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
# get an authenticated instance of the API class
api = tweepy.API(auth)
# return API object 'api'
return api
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
api.update_status(status=tweet)
|
import tweepy
def authenticatetwitter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET):
# Authenticate with Twitter using keys and secrets and return
# an 'api' object
-
+
- # Authorize with consumer credentials and get an access token
? -
+ # Authorize with consumer credentials and get an access token
# with access credentials
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
-
+
# get an authenticated instance of the API class
api = tweepy.API(auth)
-
+
# return API object 'api'
return api
-
+
def sendtweet(api, tweet):
# Send 'tweet' using Tweepy API function
- api.update_status(tweet)
+ api.update_status(status=tweet)
? +++++++
- |
a0f09e23dd19f0cf223034f9b787a4f038cd995d | testsuite/driver/my_typing.py | testsuite/driver/my_typing.py |
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
if 'TextIO' in globals():
TextIO = typing.TextIO
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str) |
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
if 'TextIO' not in globals():
try:
TextIO = typing.TextIO
except ImportError:
TextIO = None # type: ignore
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str) | Simplify Python <3.5 fallback for TextIO | testsuite: Simplify Python <3.5 fallback for TextIO
(cherry picked from commit d092d8598694c23bc07cdcc504dff52fa5f33be1)
| Python | bsd-3-clause | sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc |
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
- if 'TextIO' in globals():
+ if 'TextIO' not in globals():
+ try:
- TextIO = typing.TextIO
+ TextIO = typing.TextIO
+ except ImportError:
+ TextIO = None # type: ignore
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str) | Simplify Python <3.5 fallback for TextIO | ## Code Before:
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
if 'TextIO' in globals():
TextIO = typing.TextIO
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str)
## Instruction:
Simplify Python <3.5 fallback for TextIO
## Code After:
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
if 'TextIO' not in globals():
try:
TextIO = typing.TextIO
except ImportError:
TextIO = None # type: ignore
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str) |
try:
from typing import *
import typing
except:
# The backwards compatibility stubs must live in another module lest
# mypy complains.
from typing_stubs import * # type: ignore
####################################################
# Backwards compatibility shims
#
# N.B. mypy appears to typecheck as though the "then" clause of if structures
# is taken. We exploit this below.
# TextIO is missing on some older Pythons.
- if 'TextIO' in globals():
+ if 'TextIO' not in globals():
? ++++
+ try:
- TextIO = typing.TextIO
+ TextIO = typing.TextIO
? ++++
+ except ImportError:
+ TextIO = None # type: ignore
else:
TextIO = None # type: ignore
####################################################
# Testsuite-specific types
WayName = NewType("WayName", str)
TestName = NewType("TestName", str)
OutputNormalizer = Callable[[str], str]
IssueNumber = NewType("IssueNumber", int)
# Used by perf_notes
GitHash = NewType("GitHash", str)
GitRef = NewType("GitRef", str)
TestEnv = NewType("TestEnv", str)
MetricName = NewType("MetricName", str) |
d1be59a87fce8e20d698c4d1f6a272c21834a1c3 | providers/popularity/kickasstorrents.py | providers/popularity/kickasstorrents.py | from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
| from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
| Fix Kickasstorrents by using one of many mirrors. | Fix Kickasstorrents by using one of many mirrors.
| Python | mit | EmilStenstrom/nephele | from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
- PAGES_TO_FETCH = 1
+ PAGES_TO_FETCH = 3
def get_popular(self):
names = []
+ base = "https://kickasstorrents.to/highres-movies/"
+ # New mirrors can be found at https://thekickasstorrents.com/
+
for page in range(Provider.PAGES_TO_FETCH):
- url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
+ if page == 0:
+ url = base
+ else:
+ url = base + "%s/" % (page + 1)
+
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
| Fix Kickasstorrents by using one of many mirrors. | ## Code Before:
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 1
def get_popular(self):
names = []
for page in range(Provider.PAGES_TO_FETCH):
url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
## Instruction:
Fix Kickasstorrents by using one of many mirrors.
## Code After:
from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
PAGES_TO_FETCH = 3
def get_popular(self):
names = []
base = "https://kickasstorrents.to/highres-movies/"
# New mirrors can be found at https://thekickasstorrents.com/
for page in range(Provider.PAGES_TO_FETCH):
if page == 0:
url = base
else:
url = base + "%s/" % (page + 1)
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies
| from providers.popularity.provider import PopularityProvider
from utils.torrent_util import torrent_to_movie, remove_bad_torrent_matches
IDENTIFIER = "kickasstorrents"
class Provider(PopularityProvider):
- PAGES_TO_FETCH = 1
? ^
+ PAGES_TO_FETCH = 3
? ^
def get_popular(self):
names = []
+ base = "https://kickasstorrents.to/highres-movies/"
+ # New mirrors can be found at https://thekickasstorrents.com/
+
for page in range(Provider.PAGES_TO_FETCH):
- url = "https://kat.cr/usearch/category%%3Ahighres-movies/%s/" % page
+ if page == 0:
+ url = base
+ else:
+ url = base + "%s/" % (page + 1)
+
names += self.parse_html(url, "#mainSearchTable .data .cellMainLink", cache=False)
movies = [torrent_to_movie(name) for name in names]
movies = remove_bad_torrent_matches(movies)
return movies |
935a44b454d83452e302130114c1f40d934bf2ed | setup.py | setup.py |
from distutils.core import setup
setup(name = "f.lux indicator applet",
version = "1.1.8",
description = "f.lux indicator applet - better lighting for your computer",
author = "Kilian Valkhof, Michael and Lorna Herf, Josh Winters",
author_email = "kilian@kilianvalkhof.com",
url = "http://www.stereopsis.com/flux/",
license = "MIT license",
package_dir = {'fluxgui' : 'src/fluxgui'},
packages = ["fluxgui",],
package_data = {"fluxgui" : ["*.glade"] },
data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
('share/applications', ['desktop/fluxgui.desktop']),
('bin', ['xflux']),],
scripts = ["fluxgui"],
long_description = """f.lux indicator applet is an indicator applet to
control xflux, an application that makes the color of your computer's
display adapt to the time of day, warm at nights and like sunlight during
the day""",
)
|
from distutils.core import setup
data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
('share/applications', ['desktop/fluxgui.desktop'])]
import os
if os.path.exists("xflux"):
data_files.append( ('bin', ['xflux']) )
setup(name = "f.lux indicator applet",
version = "1.1.8",
description = "f.lux indicator applet - better lighting for your computer",
author = "Kilian Valkhof, Michael and Lorna Herf, Josh Winters",
author_email = "kilian@kilianvalkhof.com",
url = "http://www.stereopsis.com/flux/",
license = "MIT license",
package_dir = {'fluxgui' : 'src/fluxgui'},
packages = ["fluxgui",],
package_data = {"fluxgui" : ["*.glade"] },
data_files = data_files,
scripts = ["fluxgui"],
long_description = """f.lux indicator applet is an indicator applet to
control xflux, an application that makes the color of your computer's
display adapt to the time of day, warm at nights and like sunlight during
the day""",
)
| Drop xflux binary from debian package | Drop xflux binary from debian package
| Python | mit | NHellFire/f.lux-indicator-applet,esmail/f.lux-indicator-applet |
from distutils.core import setup
+
+ data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
+ ('share/applications', ['desktop/fluxgui.desktop'])]
+
+ import os
+ if os.path.exists("xflux"):
+ data_files.append( ('bin', ['xflux']) )
setup(name = "f.lux indicator applet",
version = "1.1.8",
description = "f.lux indicator applet - better lighting for your computer",
author = "Kilian Valkhof, Michael and Lorna Herf, Josh Winters",
author_email = "kilian@kilianvalkhof.com",
url = "http://www.stereopsis.com/flux/",
license = "MIT license",
package_dir = {'fluxgui' : 'src/fluxgui'},
packages = ["fluxgui",],
package_data = {"fluxgui" : ["*.glade"] },
+ data_files = data_files,
- data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
- ('share/applications', ['desktop/fluxgui.desktop']),
- ('bin', ['xflux']),],
scripts = ["fluxgui"],
long_description = """f.lux indicator applet is an indicator applet to
control xflux, an application that makes the color of your computer's
display adapt to the time of day, warm at nights and like sunlight during
the day""",
)
| Drop xflux binary from debian package | ## Code Before:
from distutils.core import setup
setup(name = "f.lux indicator applet",
version = "1.1.8",
description = "f.lux indicator applet - better lighting for your computer",
author = "Kilian Valkhof, Michael and Lorna Herf, Josh Winters",
author_email = "kilian@kilianvalkhof.com",
url = "http://www.stereopsis.com/flux/",
license = "MIT license",
package_dir = {'fluxgui' : 'src/fluxgui'},
packages = ["fluxgui",],
package_data = {"fluxgui" : ["*.glade"] },
data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
('share/applications', ['desktop/fluxgui.desktop']),
('bin', ['xflux']),],
scripts = ["fluxgui"],
long_description = """f.lux indicator applet is an indicator applet to
control xflux, an application that makes the color of your computer's
display adapt to the time of day, warm at nights and like sunlight during
the day""",
)
## Instruction:
Drop xflux binary from debian package
## Code After:
from distutils.core import setup
data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
('share/applications', ['desktop/fluxgui.desktop'])]
import os
if os.path.exists("xflux"):
data_files.append( ('bin', ['xflux']) )
setup(name = "f.lux indicator applet",
version = "1.1.8",
description = "f.lux indicator applet - better lighting for your computer",
author = "Kilian Valkhof, Michael and Lorna Herf, Josh Winters",
author_email = "kilian@kilianvalkhof.com",
url = "http://www.stereopsis.com/flux/",
license = "MIT license",
package_dir = {'fluxgui' : 'src/fluxgui'},
packages = ["fluxgui",],
package_data = {"fluxgui" : ["*.glade"] },
data_files = data_files,
scripts = ["fluxgui"],
long_description = """f.lux indicator applet is an indicator applet to
control xflux, an application that makes the color of your computer's
display adapt to the time of day, warm at nights and like sunlight during
the day""",
)
|
from distutils.core import setup
+
+ data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
+ ('share/applications', ['desktop/fluxgui.desktop'])]
+
+ import os
+ if os.path.exists("xflux"):
+ data_files.append( ('bin', ['xflux']) )
setup(name = "f.lux indicator applet",
version = "1.1.8",
description = "f.lux indicator applet - better lighting for your computer",
author = "Kilian Valkhof, Michael and Lorna Herf, Josh Winters",
author_email = "kilian@kilianvalkhof.com",
url = "http://www.stereopsis.com/flux/",
license = "MIT license",
package_dir = {'fluxgui' : 'src/fluxgui'},
packages = ["fluxgui",],
package_data = {"fluxgui" : ["*.glade"] },
+ data_files = data_files,
- data_files=[('share/icons/hicolor/scalable/apps', ['fluxgui.svg', 'fluxgui-light.svg', 'fluxgui-dark.svg']),
- ('share/applications', ['desktop/fluxgui.desktop']),
- ('bin', ['xflux']),],
scripts = ["fluxgui"],
long_description = """f.lux indicator applet is an indicator applet to
control xflux, an application that makes the color of your computer's
display adapt to the time of day, warm at nights and like sunlight during
the day""",
)
|
8521d53b20e8f4874a82188b792dd1d4d0ecf419 | djangae/db/backends/appengine/parsers/version_18.py | djangae/db/backends/appengine/parsers/version_18.py | from django.db.models.expressions import Star
from django.db.models.sql.datastructures import EmptyResultSet
from .version_19 import Parser as BaseParser
class Parser(BaseParser):
def _prepare_for_transformation(self):
from django.db.models.sql.where import EmptyWhere
if isinstance(self.django_query.where, EmptyWhere):
# Empty where means return nothing!
raise EmptyResultSet()
def _determine_query_kind(self):
query = self.django_query
if query.annotations:
if "__count" in query.annotations:
field = query.annotations["__count"].input_field
if isinstance(field, Star) or field.value == "*":
return "COUNT"
return "SELECT"
| from django.db.models.expressions import Star
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.where import SubqueryConstraint
from .version_19 import Parser as BaseParser
class Parser(BaseParser):
def _prepare_for_transformation(self):
from django.db.models.sql.where import EmptyWhere
if isinstance(self.django_query.where, EmptyWhere):
# Empty where means return nothing!
raise EmptyResultSet()
def _where_node_leaf_callback(self, node, negated, new_parent, connection, model, compiler):
if not isinstance(node, SubqueryConstraint):
# Only do this test if it's not a subquery, the parent method deals with that
if not hasattr(node, "lhs") and not hasattr(node, "rhs"):
# Empty Q() object - basically an empty where node that needs nothing doing to it
return
return super(Parser, self)._where_node_leaf_callback(node, negated, new_parent, connection, model, compiler)
def _determine_query_kind(self):
query = self.django_query
if query.annotations:
if "__count" in query.annotations:
field = query.annotations["__count"].input_field
if isinstance(field, Star) or field.value == "*":
return "COUNT"
return "SELECT"
| Fix empty Q() filters on Django 1.8 | Fix empty Q() filters on Django 1.8
| Python | bsd-3-clause | grzes/djangae,grzes/djangae,potatolondon/djangae,grzes/djangae,potatolondon/djangae | from django.db.models.expressions import Star
from django.db.models.sql.datastructures import EmptyResultSet
+ from django.db.models.sql.where import SubqueryConstraint
from .version_19 import Parser as BaseParser
class Parser(BaseParser):
def _prepare_for_transformation(self):
from django.db.models.sql.where import EmptyWhere
if isinstance(self.django_query.where, EmptyWhere):
# Empty where means return nothing!
raise EmptyResultSet()
+ def _where_node_leaf_callback(self, node, negated, new_parent, connection, model, compiler):
+ if not isinstance(node, SubqueryConstraint):
+ # Only do this test if it's not a subquery, the parent method deals with that
+ if not hasattr(node, "lhs") and not hasattr(node, "rhs"):
+ # Empty Q() object - basically an empty where node that needs nothing doing to it
+ return
+
+ return super(Parser, self)._where_node_leaf_callback(node, negated, new_parent, connection, model, compiler)
+
def _determine_query_kind(self):
query = self.django_query
if query.annotations:
if "__count" in query.annotations:
field = query.annotations["__count"].input_field
if isinstance(field, Star) or field.value == "*":
return "COUNT"
return "SELECT"
| Fix empty Q() filters on Django 1.8 | ## Code Before:
from django.db.models.expressions import Star
from django.db.models.sql.datastructures import EmptyResultSet
from .version_19 import Parser as BaseParser
class Parser(BaseParser):
def _prepare_for_transformation(self):
from django.db.models.sql.where import EmptyWhere
if isinstance(self.django_query.where, EmptyWhere):
# Empty where means return nothing!
raise EmptyResultSet()
def _determine_query_kind(self):
query = self.django_query
if query.annotations:
if "__count" in query.annotations:
field = query.annotations["__count"].input_field
if isinstance(field, Star) or field.value == "*":
return "COUNT"
return "SELECT"
## Instruction:
Fix empty Q() filters on Django 1.8
## Code After:
from django.db.models.expressions import Star
from django.db.models.sql.datastructures import EmptyResultSet
from django.db.models.sql.where import SubqueryConstraint
from .version_19 import Parser as BaseParser
class Parser(BaseParser):
def _prepare_for_transformation(self):
from django.db.models.sql.where import EmptyWhere
if isinstance(self.django_query.where, EmptyWhere):
# Empty where means return nothing!
raise EmptyResultSet()
def _where_node_leaf_callback(self, node, negated, new_parent, connection, model, compiler):
if not isinstance(node, SubqueryConstraint):
# Only do this test if it's not a subquery, the parent method deals with that
if not hasattr(node, "lhs") and not hasattr(node, "rhs"):
# Empty Q() object - basically an empty where node that needs nothing doing to it
return
return super(Parser, self)._where_node_leaf_callback(node, negated, new_parent, connection, model, compiler)
def _determine_query_kind(self):
query = self.django_query
if query.annotations:
if "__count" in query.annotations:
field = query.annotations["__count"].input_field
if isinstance(field, Star) or field.value == "*":
return "COUNT"
return "SELECT"
| from django.db.models.expressions import Star
from django.db.models.sql.datastructures import EmptyResultSet
+ from django.db.models.sql.where import SubqueryConstraint
from .version_19 import Parser as BaseParser
class Parser(BaseParser):
def _prepare_for_transformation(self):
from django.db.models.sql.where import EmptyWhere
if isinstance(self.django_query.where, EmptyWhere):
# Empty where means return nothing!
raise EmptyResultSet()
+ def _where_node_leaf_callback(self, node, negated, new_parent, connection, model, compiler):
+ if not isinstance(node, SubqueryConstraint):
+ # Only do this test if it's not a subquery, the parent method deals with that
+ if not hasattr(node, "lhs") and not hasattr(node, "rhs"):
+ # Empty Q() object - basically an empty where node that needs nothing doing to it
+ return
+
+ return super(Parser, self)._where_node_leaf_callback(node, negated, new_parent, connection, model, compiler)
+
def _determine_query_kind(self):
query = self.django_query
if query.annotations:
if "__count" in query.annotations:
field = query.annotations["__count"].input_field
if isinstance(field, Star) or field.value == "*":
return "COUNT"
return "SELECT" |
7c2a75906338e0670d0f75b4e06fc9ae775f3142 | custom/opm/migrations/0001_drop_old_fluff_tables.py | custom/opm/migrations/0001_drop_old_fluff_tables.py | from __future__ import unicode_literals
import logging
from sqlalchemy import Table, MetaData
from corehq.db import connection_manager
from django.db import migrations
def drop_tables(apps, schema_editor):
# show SQL commands
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
metadata = MetaData(bind=connection_manager.get_engine())
for table_name in [
'fluff_OPMHierarchyFluff',
'fluff_OpmCaseFluff',
'fluff_OpmFormFluff',
'fluff_OpmHealthStatusAllInfoFluff',
'fluff_VhndAvailabilityFluff',
]:
Table(table_name, metadata).drop(checkfirst=True)
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(drop_tables),
]
| from __future__ import unicode_literals
import logging
from sqlalchemy import Table, MetaData
from corehq.db import connection_manager
from corehq.util.decorators import change_log_level
from django.db import migrations
@change_log_level('sqlalchemy.engine', logging.INFO) # show SQL commands
def drop_tables(apps, schema_editor):
metadata = MetaData(bind=connection_manager.get_engine())
for table_name in [
'fluff_OPMHierarchyFluff',
'fluff_OpmCaseFluff',
'fluff_OpmFormFluff',
'fluff_OpmHealthStatusAllInfoFluff',
'fluff_VhndAvailabilityFluff',
]:
Table(table_name, metadata).drop(checkfirst=True)
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(drop_tables),
]
| Make sure log level gets reset afterwards | Make sure log level gets reset afterwards
| Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq | from __future__ import unicode_literals
import logging
from sqlalchemy import Table, MetaData
from corehq.db import connection_manager
+ from corehq.util.decorators import change_log_level
from django.db import migrations
+ @change_log_level('sqlalchemy.engine', logging.INFO) # show SQL commands
def drop_tables(apps, schema_editor):
- # show SQL commands
- logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
metadata = MetaData(bind=connection_manager.get_engine())
for table_name in [
'fluff_OPMHierarchyFluff',
'fluff_OpmCaseFluff',
'fluff_OpmFormFluff',
'fluff_OpmHealthStatusAllInfoFluff',
'fluff_VhndAvailabilityFluff',
]:
Table(table_name, metadata).drop(checkfirst=True)
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(drop_tables),
]
| Make sure log level gets reset afterwards | ## Code Before:
from __future__ import unicode_literals
import logging
from sqlalchemy import Table, MetaData
from corehq.db import connection_manager
from django.db import migrations
def drop_tables(apps, schema_editor):
# show SQL commands
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
metadata = MetaData(bind=connection_manager.get_engine())
for table_name in [
'fluff_OPMHierarchyFluff',
'fluff_OpmCaseFluff',
'fluff_OpmFormFluff',
'fluff_OpmHealthStatusAllInfoFluff',
'fluff_VhndAvailabilityFluff',
]:
Table(table_name, metadata).drop(checkfirst=True)
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(drop_tables),
]
## Instruction:
Make sure log level gets reset afterwards
## Code After:
from __future__ import unicode_literals
import logging
from sqlalchemy import Table, MetaData
from corehq.db import connection_manager
from corehq.util.decorators import change_log_level
from django.db import migrations
@change_log_level('sqlalchemy.engine', logging.INFO) # show SQL commands
def drop_tables(apps, schema_editor):
metadata = MetaData(bind=connection_manager.get_engine())
for table_name in [
'fluff_OPMHierarchyFluff',
'fluff_OpmCaseFluff',
'fluff_OpmFormFluff',
'fluff_OpmHealthStatusAllInfoFluff',
'fluff_VhndAvailabilityFluff',
]:
Table(table_name, metadata).drop(checkfirst=True)
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(drop_tables),
]
| from __future__ import unicode_literals
import logging
from sqlalchemy import Table, MetaData
from corehq.db import connection_manager
+ from corehq.util.decorators import change_log_level
from django.db import migrations
+ @change_log_level('sqlalchemy.engine', logging.INFO) # show SQL commands
def drop_tables(apps, schema_editor):
- # show SQL commands
- logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
-
metadata = MetaData(bind=connection_manager.get_engine())
for table_name in [
'fluff_OPMHierarchyFluff',
'fluff_OpmCaseFluff',
'fluff_OpmFormFluff',
'fluff_OpmHealthStatusAllInfoFluff',
'fluff_VhndAvailabilityFluff',
]:
Table(table_name, metadata).drop(checkfirst=True)
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.RunPython(drop_tables),
] |
09c24ac93b6e697b48c52b614fe92f7978fe2320 | linter.py | linter.py |
from SublimeLinter.lint import Linter
import sublime
class Iverilog(Linter):
# http://www.sublimelinter.com/en/stable/linter_attributes.html
name = "iverilog"
cmd = "iverilog ${args}"
tempfile_suffix = "verilog"
multiline = True
on_stderr = None
# fmt: off
defaults = {
"selector": "source.verilog | source.systemverilog",
"-t": "null",
"-g": 2012,
"-I +": [],
"-y +": [],
}
# fmt: on
# there is a ":" in the filepath under Windows like C:\DIR\FILE
if sublime.platform() == "windows":
filepath_regex = r"[^:]+:[^:]+"
else:
filepath_regex = r"[^:]+"
# what kind of messages should be caught?
regex = (
r"(?P<file>{0}):(?P<line>\d+):\s*"
r"(?:(?:(?P<warning>warning)|(?P<error>error)):)?\s*"
r"(?P<message>.*)".format(filepath_regex)
)
|
from SublimeLinter.lint import Linter
import sublime
class Iverilog(Linter):
# http://www.sublimelinter.com/en/stable/linter_attributes.html
name = "iverilog"
cmd = "iverilog ${args}"
tempfile_suffix = "verilog"
multiline = True
on_stderr = None
# fmt: off
defaults = {
"selector": "source.verilog | source.systemverilog",
# @see https://iverilog.fandom.com/wiki/Iverilog_Flags
"-t": "null",
"-g": 2012,
"-I +": [],
"-y +": [],
}
# fmt: on
# there is a ":" in the filepath under Windows like C:\DIR\FILE
if sublime.platform() == "windows":
filepath_regex = r"[^:]+:[^:]+"
else:
filepath_regex = r"[^:]+"
# what kind of messages should be caught?
regex = (
r"(?P<file>{0}):(?P<line>\d+):\s*"
r"(?:(?:(?P<warning>warning)|(?P<error>error)):)?\s*"
r"(?P<message>.*)".format(filepath_regex)
)
| Add iverilog flags reference URL | Add iverilog flags reference URL
Signed-off-by: Jack Cherng <159f0f32a62cc912ca55f89bb5e06807cf019bc7@gmail.com>
| Python | mit | jfcherng/SublimeLinter-contrib-iverilog,jfcherng/SublimeLinter-contrib-iverilog |
from SublimeLinter.lint import Linter
import sublime
class Iverilog(Linter):
# http://www.sublimelinter.com/en/stable/linter_attributes.html
name = "iverilog"
cmd = "iverilog ${args}"
tempfile_suffix = "verilog"
multiline = True
on_stderr = None
# fmt: off
defaults = {
"selector": "source.verilog | source.systemverilog",
+ # @see https://iverilog.fandom.com/wiki/Iverilog_Flags
"-t": "null",
"-g": 2012,
"-I +": [],
"-y +": [],
}
# fmt: on
# there is a ":" in the filepath under Windows like C:\DIR\FILE
if sublime.platform() == "windows":
filepath_regex = r"[^:]+:[^:]+"
else:
filepath_regex = r"[^:]+"
# what kind of messages should be caught?
regex = (
r"(?P<file>{0}):(?P<line>\d+):\s*"
r"(?:(?:(?P<warning>warning)|(?P<error>error)):)?\s*"
r"(?P<message>.*)".format(filepath_regex)
)
| Add iverilog flags reference URL | ## Code Before:
from SublimeLinter.lint import Linter
import sublime
class Iverilog(Linter):
# http://www.sublimelinter.com/en/stable/linter_attributes.html
name = "iverilog"
cmd = "iverilog ${args}"
tempfile_suffix = "verilog"
multiline = True
on_stderr = None
# fmt: off
defaults = {
"selector": "source.verilog | source.systemverilog",
"-t": "null",
"-g": 2012,
"-I +": [],
"-y +": [],
}
# fmt: on
# there is a ":" in the filepath under Windows like C:\DIR\FILE
if sublime.platform() == "windows":
filepath_regex = r"[^:]+:[^:]+"
else:
filepath_regex = r"[^:]+"
# what kind of messages should be caught?
regex = (
r"(?P<file>{0}):(?P<line>\d+):\s*"
r"(?:(?:(?P<warning>warning)|(?P<error>error)):)?\s*"
r"(?P<message>.*)".format(filepath_regex)
)
## Instruction:
Add iverilog flags reference URL
## Code After:
from SublimeLinter.lint import Linter
import sublime
class Iverilog(Linter):
# http://www.sublimelinter.com/en/stable/linter_attributes.html
name = "iverilog"
cmd = "iverilog ${args}"
tempfile_suffix = "verilog"
multiline = True
on_stderr = None
# fmt: off
defaults = {
"selector": "source.verilog | source.systemverilog",
# @see https://iverilog.fandom.com/wiki/Iverilog_Flags
"-t": "null",
"-g": 2012,
"-I +": [],
"-y +": [],
}
# fmt: on
# there is a ":" in the filepath under Windows like C:\DIR\FILE
if sublime.platform() == "windows":
filepath_regex = r"[^:]+:[^:]+"
else:
filepath_regex = r"[^:]+"
# what kind of messages should be caught?
regex = (
r"(?P<file>{0}):(?P<line>\d+):\s*"
r"(?:(?:(?P<warning>warning)|(?P<error>error)):)?\s*"
r"(?P<message>.*)".format(filepath_regex)
)
|
from SublimeLinter.lint import Linter
import sublime
class Iverilog(Linter):
# http://www.sublimelinter.com/en/stable/linter_attributes.html
name = "iverilog"
cmd = "iverilog ${args}"
tempfile_suffix = "verilog"
multiline = True
on_stderr = None
# fmt: off
defaults = {
"selector": "source.verilog | source.systemverilog",
+ # @see https://iverilog.fandom.com/wiki/Iverilog_Flags
"-t": "null",
"-g": 2012,
"-I +": [],
"-y +": [],
}
# fmt: on
# there is a ":" in the filepath under Windows like C:\DIR\FILE
if sublime.platform() == "windows":
filepath_regex = r"[^:]+:[^:]+"
else:
filepath_regex = r"[^:]+"
# what kind of messages should be caught?
regex = (
r"(?P<file>{0}):(?P<line>\d+):\s*"
r"(?:(?:(?P<warning>warning)|(?P<error>error)):)?\s*"
r"(?P<message>.*)".format(filepath_regex)
) |
aff77b144c1a1895c9e8c0ca2d4e79451525901c | terminus/models/trunk.py | terminus/models/trunk.py |
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
|
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
| Make Trunks have opposite directions in the included lanes | Make Trunks have opposite directions in the included lanes
| Python | apache-2.0 | ekumenlabs/terminus,ekumenlabs/terminus |
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
- self.add_lane(-2)
+ self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
| Make Trunks have opposite directions in the included lanes | ## Code Before:
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
## Instruction:
Make Trunks have opposite directions in the included lanes
## Code After:
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
self.add_lane(-2, reversed=True)
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self)
|
from road import Road
class Trunk(Road):
def __init__(self, name=None):
super(Trunk, self).__init__(name)
self.add_lane(2)
- self.add_lane(-2)
+ self.add_lane(-2, reversed=True)
? +++++++++++++++
def accept(self, generator):
generator.start_trunk(self)
for lane in self.lanes():
lane.accept(generator)
generator.end_trunk(self) |
355a3a34b9a264734c1f5f2ec365a5873f000b77 | open_skin_as_project.py | open_skin_as_project.py | import os
import subprocess
import sublime
import sublime_plugin
from .path.skin_path_provider import get_cached_skin_path
class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand):
def run(self):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None)
def on_skin_selected(self, selected_skin_id):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
selected_skin = skins[selected_skin_id]
selected_skin_path = os.path.join(skins_path, selected_skin)
# to open a folder in new window, just create a new process with the folder as argument
st_path = sublime.executable_path()
subprocess.Popen([
st_path,
selected_skin_path
])
| import os
import subprocess
import sublime
import sublime_plugin
from .path.skin_path_provider import get_cached_skin_path
class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand):
def run(self):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None)
def on_skin_selected(self, selected_skin_id):
if selected_skin_id == -1:
return
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
selected_skin = skins[selected_skin_id]
selected_skin_path = os.path.join(skins_path, selected_skin)
# to open a folder in new window, just create a new process with the folder as argument
st_path = sublime.executable_path()
subprocess.Popen([
st_path,
selected_skin_path
])
| Handle in case user cancels open skin as project command | Handle in case user cancels open skin as project command
| Python | mit | thatsIch/sublime-rainmeter | import os
import subprocess
import sublime
import sublime_plugin
from .path.skin_path_provider import get_cached_skin_path
class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand):
def run(self):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None)
def on_skin_selected(self, selected_skin_id):
+ if selected_skin_id == -1:
+ return
+
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
selected_skin = skins[selected_skin_id]
selected_skin_path = os.path.join(skins_path, selected_skin)
# to open a folder in new window, just create a new process with the folder as argument
st_path = sublime.executable_path()
subprocess.Popen([
st_path,
selected_skin_path
])
| Handle in case user cancels open skin as project command | ## Code Before:
import os
import subprocess
import sublime
import sublime_plugin
from .path.skin_path_provider import get_cached_skin_path
class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand):
def run(self):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None)
def on_skin_selected(self, selected_skin_id):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
selected_skin = skins[selected_skin_id]
selected_skin_path = os.path.join(skins_path, selected_skin)
# to open a folder in new window, just create a new process with the folder as argument
st_path = sublime.executable_path()
subprocess.Popen([
st_path,
selected_skin_path
])
## Instruction:
Handle in case user cancels open skin as project command
## Code After:
import os
import subprocess
import sublime
import sublime_plugin
from .path.skin_path_provider import get_cached_skin_path
class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand):
def run(self):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None)
def on_skin_selected(self, selected_skin_id):
if selected_skin_id == -1:
return
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
selected_skin = skins[selected_skin_id]
selected_skin_path = os.path.join(skins_path, selected_skin)
# to open a folder in new window, just create a new process with the folder as argument
st_path = sublime.executable_path()
subprocess.Popen([
st_path,
selected_skin_path
])
| import os
import subprocess
import sublime
import sublime_plugin
from .path.skin_path_provider import get_cached_skin_path
class RainmeterOpenSkinAsProjectCommand(sublime_plugin.ApplicationCommand):
def run(self):
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
sublime.active_window().show_quick_panel(skins, self.on_skin_selected, 0, 0, None)
def on_skin_selected(self, selected_skin_id):
+ if selected_skin_id == -1:
+ return
+
skins_path = get_cached_skin_path()
skins = os.listdir(skins_path)
selected_skin = skins[selected_skin_id]
selected_skin_path = os.path.join(skins_path, selected_skin)
# to open a folder in new window, just create a new process with the folder as argument
st_path = sublime.executable_path()
subprocess.Popen([
st_path,
selected_skin_path
]) |
ab7a8335bae22bae6f729fc9805810c0c8925703 | isitbullshit/__init__.py | isitbullshit/__init__.py |
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
| Add myself to the module | Add myself to the module
| Python | mit | 9seconds/isitbullshit |
+ __author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
| Add myself to the module | ## Code Before:
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
## Instruction:
Add myself to the module
## Code After:
__author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin
|
+ __author__ = "Sergey Arkhipov <serge@aerialsounds.org>"
__version__ = 0, 1, 1
from .core import isitbullshit, raise_for_problem, WHATEVER # NOQA
from .exceptions import ItIsBullshitError # NOQA
from .testcase_mixin import IsItBullshitMixin # NOQA
# silence for pyflakes
assert isitbullshit
assert raise_for_problem
assert WHATEVER
assert ItIsBullshitError
assert IsItBullshitMixin |
255ef7b16258c67586d14e6c8d8d531a3553cd3e | bot/games/tests/test_game_queryset.py | bot/games/tests/test_game_queryset.py | from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
| from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
| Add extra test for regression | Add extra test for regression
| Python | mit | sergei-maertens/discord-bot,sergei-maertens/discord-bot,sergei-maertens/discord-bot | from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
+ def test_get_by_name_distinct(self):
+
+ bf1 = Game.objects.create(name='Battlefield 1')
+ Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
+ Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
+
+ game = Game.objects.get_by_name('Battlefield 1')
+ self.assertEqual(bf1, game)
+ | Add extra test for regression | ## Code Before:
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
## Instruction:
Add extra test for regression
## Code After:
from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
def test_get_by_name_distinct(self):
bf1 = Game.objects.create(name='Battlefield 1')
Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
game = Game.objects.get_by_name('Battlefield 1')
self.assertEqual(bf1, game)
| from django.test import TestCase
from ..models import Game
class QuerySetTests(TestCase):
def test_get_by_name(self):
gta_v = Game.objects.create(name='GTA V')
Game.objects.create(name='Grand Theft Auto V', alias_for=gta_v)
game = Game.objects.get_by_name('gta V')
self.assertEqual(game, gta_v)
game2 = Game.objects.get_by_name('Grand Theft Auto V')
self.assertEqual(game2, gta_v)
# non-existing game should be created
overwatch = Game.objects.get_by_name('Overwatch')
self.assertIsNotNone(overwatch.pk)
+
+ def test_get_by_name_distinct(self):
+
+ bf1 = Game.objects.create(name='Battlefield 1')
+ Game.objects.create(name='Battlefield™ 1 Open Beta', alias_for=bf1)
+ Game.objects.create(name='Battlefield™ 1', alias_for=bf1)
+
+ game = Game.objects.get_by_name('Battlefield 1')
+ self.assertEqual(bf1, game) |
221d672368f8989508aaf5b36f6a4f9f5bd5425a | winthrop/books/migrations/0008_add-digital-edition.py | winthrop/books/migrations/0008_add-digital-edition.py | from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
| from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
| Fix migration so it works with actual existing djiffy migrations | Fix migration so it works with actual existing djiffy migrations
| Python | apache-2.0 | Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django | from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
- ('djiffy', '0002_add-digital-edition'),
+ ('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
- field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
+ field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
- preserve_default=False,
),
]
| Fix migration so it works with actual existing djiffy migrations | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0002_add-digital-edition'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
preserve_default=False,
),
]
## Instruction:
Fix migration so it works with actual existing djiffy migrations
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('djiffy', '0001_initial'),
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
),
]
| from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
- ('djiffy', '0002_add-digital-edition'),
? ^ ----- ^^^^^^^^ ^^
+ ('djiffy', '0001_initial'),
? ^ ^ ^^
('books', '0007_title-length'),
]
operations = [
migrations.AlterModelOptions(
name='book',
options={'ordering': ['title']},
),
migrations.AddField(
model_name='book',
name='digital_edition',
- field=models.ForeignKey(blank=True, null=True, default=None, help_text='Digitized edition of this book, if available', on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
? -------------------------
+ field=models.ForeignKey(blank=True, help_text='Digitized edition of this book, if available', null=True, on_delete=django.db.models.deletion.CASCADE, to='djiffy.Manifest'),
? +++++++++++
- preserve_default=False,
),
] |
3a37211f09c000f0fcb41ca076cb98b90bfae030 | eb_sqs/urls.py | eb_sqs/urls.py | from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
app_name = 'eb_sqs'
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
| from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
| Remove unnecessary global variable assignment | Remove unnecessary global variable assignment
| Python | mit | cuda-networks/django-eb-sqs,cuda-networks/django-eb-sqs | from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
- app_name = 'eb_sqs'
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
| Remove unnecessary global variable assignment | ## Code Before:
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
app_name = 'eb_sqs'
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
## Instruction:
Remove unnecessary global variable assignment
## Code After:
from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
]
| from __future__ import absolute_import, unicode_literals
from django.conf.urls import url
from eb_sqs.views import process_task
- app_name = 'eb_sqs'
urlpatterns = [
url(r'^process$', process_task, name='process_task'),
] |
b740490e49b775809cb99b4cf30e3b7cf259d8f6 | superdesk/io/__init__.py | superdesk/io/__init__.py | """Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
| """Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
from .commands.remove_expired_content import RemoveExpiredContent
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
RemoveExpiredContent().run()
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
| Revert "fix(ingest) - disable expired content removal" | Revert "fix(ingest) - disable expired content removal"
This reverts commit 281e051344c9fe8e835941117e2d2068ecdabd87.
| Python | agpl-3.0 | mdhaman/superdesk,akintolga/superdesk-aap,ioanpocol/superdesk-ntb,marwoodandrew/superdesk,marwoodandrew/superdesk-aap,marwoodandrew/superdesk-aap,plamut/superdesk,darconny/superdesk,darconny/superdesk,superdesk/superdesk,amagdas/superdesk,hlmnrmr/superdesk,mdhaman/superdesk-aap,sivakuna-aap/superdesk,liveblog/superdesk,plamut/superdesk,marwoodandrew/superdesk,akintolga/superdesk-aap,marwoodandrew/superdesk,akintolga/superdesk-aap,plamut/superdesk,petrjasek/superdesk-ntb,amagdas/superdesk,Aca-jov/superdesk,superdesk/superdesk-ntb,petrjasek/superdesk,akintolga/superdesk,pavlovicnemanja92/superdesk,petrjasek/superdesk-ntb,Aca-jov/superdesk,gbbr/superdesk,liveblog/superdesk,sivakuna-aap/superdesk,ioanpocol/superdesk-ntb,superdesk/superdesk-ntb,liveblog/superdesk,verifiedpixel/superdesk,pavlovicnemanja/superdesk,petrjasek/superdesk,vied12/superdesk,mdhaman/superdesk-aap,marwoodandrew/superdesk,thnkloud9/superdesk,gbbr/superdesk,pavlovicnemanja92/superdesk,mugurrus/superdesk,ancafarcas/superdesk,superdesk/superdesk-aap,sivakuna-aap/superdesk,verifiedpixel/superdesk,superdesk/superdesk-aap,pavlovicnemanja/superdesk,fritzSF/superdesk,thnkloud9/superdesk,hlmnrmr/superdesk,sjunaid/superdesk,ioanpocol/superdesk-ntb,Aca-jov/superdesk,ioanpocol/superdesk,hlmnrmr/superdesk,liveblog/superdesk,gbbr/superdesk,verifiedpixel/superdesk,thnkloud9/superdesk,superdesk/superdesk,akintolga/superdesk-aap,petrjasek/superdesk-ntb,pavlovicnemanja/superdesk,akintolga/superdesk,ancafarcas/superdesk,marwoodandrew/superdesk-aap,sjunaid/superdesk,superdesk/superdesk,marwoodandrew/superdesk,akintolga/superdesk,ancafarcas/superdesk,vied12/superdesk,fritzSF/superdesk,fritzSF/superdesk,vied12/superdesk,petrjasek/superdesk-server,pavlovicnemanja/superdesk,mdhaman/superdesk,vied12/superdesk,amagdas/superdesk,mdhaman/superdesk,akintolga/superdesk,mugurrus/superdesk,pavlovicnemanja92/superdesk,verifiedpixel/superdesk,superdesk/superdesk-ntb,superdesk/superdesk,pavlovicnemanja92/superdesk,pavlovicnemanja92/superdesk,mdhaman/superdesk-aap,akintolga/superdesk,liveblog/superdesk,sivakuna-aap/superdesk,superdesk/superdesk-ntb,amagdas/superdesk,superdesk/superdesk-aap,petrjasek/superdesk-server,plamut/superdesk,marwoodandrew/superdesk-aap,fritzSF/superdesk,plamut/superdesk,sjunaid/superdesk,sivakuna-aap/superdesk,ioanpocol/superdesk,superdesk/superdesk-aap,ioanpocol/superdesk,darconny/superdesk,petrjasek/superdesk,petrjasek/superdesk-ntb,vied12/superdesk,petrjasek/superdesk,verifiedpixel/superdesk,mdhaman/superdesk-aap,mugurrus/superdesk,amagdas/superdesk,fritzSF/superdesk | """Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
+ from .commands.remove_expired_content import RemoveExpiredContent
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
+ RemoveExpiredContent().run()
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
| Revert "fix(ingest) - disable expired content removal" | ## Code Before:
"""Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
## Instruction:
Revert "fix(ingest) - disable expired content removal"
## Code After:
"""Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
from .commands.remove_expired_content import RemoveExpiredContent
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
RemoveExpiredContent().run()
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
"""
| """Superdesk IO"""
from abc import ABCMeta, abstractmethod
import superdesk
import logging
from superdesk.celery_app import celery
providers = {}
allowed_providers = []
logger = logging.getLogger(__name__)
+ from .commands.remove_expired_content import RemoveExpiredContent
from .commands.update_ingest import UpdateIngest
from .commands.add_provider import AddProvider # NOQA
def init_app(app):
from .ingest_provider_model import IngestProviderResource
from superdesk.services import BaseService
import superdesk
endpoint_name = 'ingest_providers'
service = BaseService(endpoint_name, backend=superdesk.get_backend())
IngestProviderResource(endpoint_name, app=app, service=service)
def register_provider(type, provider):
providers[type] = provider
allowed_providers.append(type)
superdesk.privilege(name='ingest_providers', label='Ingest Channels', description='User can maintain Ingest Channels.')
@celery.task()
def fetch_ingest():
+ RemoveExpiredContent().run()
UpdateIngest().run()
class Parser:
"""
Parent Class for all types of Parsers like News ML 1.2, News ML G2, NITF,...
"""
__metaclass__ = ABCMeta
@abstractmethod
def parse_message(self, xml_doc):
"""
Parses the ingest XML and extracts the relevant elements/attributes values from the XML.
Sub-classes must override.
""" |
d8a861c47df6b41c27f2ec43474766284ba728af | bot/logger/message_sender/reusable/limiter/group.py | bot/logger/message_sender/reusable/limiter/group.py | from bot.logger.message_sender.message_builder import MessageBuilder
from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter
class ReusableMessageLimiterGroup(ReusableMessageLimiter):
def __init__(self, *limiters: ReusableMessageLimiter):
self.limiters = limiters
def should_issue_new_message_pre_add(self, new_text):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text))
def should_issue_new_message_post_add(self, builder: MessageBuilder):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder))
def __any_limiter(self, func: callable):
return any((func(limiter) for limiter in self.limiters))
def notify_new_message_issued(self):
for limiter in self.limiters:
limiter.notify_new_message_issued()
| from bot.logger.message_sender.message_builder import MessageBuilder
from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter
class ReusableMessageLimiterGroup(ReusableMessageLimiter):
def __init__(self, *limiters: ReusableMessageLimiter):
self.limiters = limiters
def should_issue_new_message_pre_add(self, new_text):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text))
def should_issue_new_message_post_add(self, builder: MessageBuilder):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder))
def __any_limiter(self, func: callable):
return any((func(limiter) for limiter in self.limiters))
def notify_new_message_issued(self):
for limiter in self.limiters:
limiter.notify_new_message_issued()
def notify_about_to_send_message(self):
for limiter in self.limiters:
limiter.notify_about_to_send_message()
| Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters | Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters
| Python | agpl-3.0 | alvarogzp/telegram-bot,alvarogzp/telegram-bot | from bot.logger.message_sender.message_builder import MessageBuilder
from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter
class ReusableMessageLimiterGroup(ReusableMessageLimiter):
def __init__(self, *limiters: ReusableMessageLimiter):
self.limiters = limiters
def should_issue_new_message_pre_add(self, new_text):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text))
def should_issue_new_message_post_add(self, builder: MessageBuilder):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder))
def __any_limiter(self, func: callable):
return any((func(limiter) for limiter in self.limiters))
def notify_new_message_issued(self):
for limiter in self.limiters:
limiter.notify_new_message_issued()
+ def notify_about_to_send_message(self):
+ for limiter in self.limiters:
+ limiter.notify_about_to_send_message()
+ | Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters | ## Code Before:
from bot.logger.message_sender.message_builder import MessageBuilder
from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter
class ReusableMessageLimiterGroup(ReusableMessageLimiter):
def __init__(self, *limiters: ReusableMessageLimiter):
self.limiters = limiters
def should_issue_new_message_pre_add(self, new_text):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text))
def should_issue_new_message_post_add(self, builder: MessageBuilder):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder))
def __any_limiter(self, func: callable):
return any((func(limiter) for limiter in self.limiters))
def notify_new_message_issued(self):
for limiter in self.limiters:
limiter.notify_new_message_issued()
## Instruction:
Make ReusableMessageGroup broadcast the notify_about_to_send_message to all limiters
## Code After:
from bot.logger.message_sender.message_builder import MessageBuilder
from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter
class ReusableMessageLimiterGroup(ReusableMessageLimiter):
def __init__(self, *limiters: ReusableMessageLimiter):
self.limiters = limiters
def should_issue_new_message_pre_add(self, new_text):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text))
def should_issue_new_message_post_add(self, builder: MessageBuilder):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder))
def __any_limiter(self, func: callable):
return any((func(limiter) for limiter in self.limiters))
def notify_new_message_issued(self):
for limiter in self.limiters:
limiter.notify_new_message_issued()
def notify_about_to_send_message(self):
for limiter in self.limiters:
limiter.notify_about_to_send_message()
| from bot.logger.message_sender.message_builder import MessageBuilder
from bot.logger.message_sender.reusable.limiter import ReusableMessageLimiter
class ReusableMessageLimiterGroup(ReusableMessageLimiter):
def __init__(self, *limiters: ReusableMessageLimiter):
self.limiters = limiters
def should_issue_new_message_pre_add(self, new_text):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_pre_add(new_text))
def should_issue_new_message_post_add(self, builder: MessageBuilder):
return self.__any_limiter(lambda limiter: limiter.should_issue_new_message_post_add(builder))
def __any_limiter(self, func: callable):
return any((func(limiter) for limiter in self.limiters))
def notify_new_message_issued(self):
for limiter in self.limiters:
limiter.notify_new_message_issued()
+
+ def notify_about_to_send_message(self):
+ for limiter in self.limiters:
+ limiter.notify_about_to_send_message() |
5f49fb8c7c0f9e7a05d4f9b730d7f3e872229d60 | test/completion/definition.py | test/completion/definition.py |
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
|
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
| Add blackbox tests using column number | Add blackbox tests using column number
| Python | mit | flurischt/jedi,WoLpH/jedi,mfussenegger/jedi,dwillmer/jedi,tjwei/jedi,jonashaag/jedi,jonashaag/jedi,tjwei/jedi,flurischt/jedi,dwillmer/jedi,mfussenegger/jedi,WoLpH/jedi |
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
+ # Note: len('isinstance(') == 11
+ #? 11 isinstance
+ isinstance()
+
+ # Note: len('isinstance(None,') == 16
+ ##? 16 isinstance
+ isinstance(None,)
+
+ # Note: len('isinstance(None,') == 16
+ ##? 16 isinstance
+ isinstance(None, )
+
+ # Note: len('isinstance(None, ') == 17
+ ##? 17 isinstance
+ isinstance(None, )
+
+ # Note: len('isinstance( ') == 12
+ ##? 12 isinstance
+ isinstance( )
+ | Add blackbox tests using column number | ## Code Before:
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
## Instruction:
Add blackbox tests using column number
## Code After:
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
# Note: len('isinstance(') == 11
#? 11 isinstance
isinstance()
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None,)
# Note: len('isinstance(None,') == 16
##? 16 isinstance
isinstance(None, )
# Note: len('isinstance(None, ') == 17
##? 17 isinstance
isinstance(None, )
# Note: len('isinstance( ') == 12
##? 12 isinstance
isinstance( )
|
#? isinstance
isinstance(
)
#? isinstance
isinstance(None,
)
#? isinstance
isinstance(None,
)
+
+ # Note: len('isinstance(') == 11
+ #? 11 isinstance
+ isinstance()
+
+ # Note: len('isinstance(None,') == 16
+ ##? 16 isinstance
+ isinstance(None,)
+
+ # Note: len('isinstance(None,') == 16
+ ##? 16 isinstance
+ isinstance(None, )
+
+ # Note: len('isinstance(None, ') == 17
+ ##? 17 isinstance
+ isinstance(None, )
+
+ # Note: len('isinstance( ') == 12
+ ##? 12 isinstance
+ isinstance( ) |
1d74ba63dda5193a5287a45c9570a7c2ece6fb42 | moksha/apps/metrics/moksha/apps/metrics/consumers/metrics_consumer.py | moksha/apps/metrics/moksha/apps/metrics/consumers/metrics_consumer.py |
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
|
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
| Fix the data format of our metrics consumer | Fix the data format of our metrics consumer
| Python | apache-2.0 | mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha,ralphbean/moksha,ralphbean/moksha,pombredanne/moksha,pombredanne/moksha,lmacken/moksha,lmacken/moksha,ralphbean/moksha,lmacken/moksha,mokshaproject/moksha,mokshaproject/moksha,pombredanne/moksha |
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
- self.send_message(message['topic'], message['data'])
+ self.send_message(message['body']['topic'], message['body']['data'])
| Fix the data format of our metrics consumer | ## Code Before:
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['topic'], message['data'])
## Instruction:
Fix the data format of our metrics consumer
## Code After:
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
self.send_message(message['body']['topic'], message['body']['data'])
|
from moksha.api.hub import Consumer
class MokshaMessageMetricsConsumer(Consumer):
"""
This consumer listens to all messages on the `moksha_message_metrics`
topic, and relays the message to the message.body['topic'] topic.
"""
topic = 'moksha_message_metrics'
def consume(self, message):
- self.send_message(message['topic'], message['data'])
+ self.send_message(message['body']['topic'], message['body']['data'])
? ++++++++ ++++++++
|
cb7b51414a034d50e44fb30c6528b878aa9c64ee | web_ui/opensesame.py | web_ui/opensesame.py | password = "" | email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
| Add email and API template | Add email and API template | Python | apache-2.0 | cisco-gve/epnm_alarm_report,cisco-gve/epnm_alarm_report,cisco-gve/epnm_alarm_report,cisco-gve/epnm_alarm_report | + email_address = ""
+ email_password = ""
+ # Enter the login information for the EPNM API Account
+ API_username = ""
- password = ""
+ API_password = ""
+ | Add email and API template | ## Code Before:
password = ""
## Instruction:
Add email and API template
## Code After:
email_address = ""
email_password = ""
# Enter the login information for the EPNM API Account
API_username = ""
API_password = ""
| + email_address = ""
+ email_password = ""
+ # Enter the login information for the EPNM API Account
+ API_username = ""
- password = ""
+ API_password = ""
? ++++
|
35a15e06feca24872acb42c5395b58b2a1bed60e | byceps/services/snippet/transfer/models.py | byceps/services/snippet/transfer/models.py |
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_global(cls) -> Scope:
return cls('global', 'global')
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
|
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
| Remove unused class method `Scope.for_global` | Remove unused class method `Scope.for_global`
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps |
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
-
- @classmethod
- def for_global(cls) -> Scope:
- return cls('global', 'global')
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
| Remove unused class method `Scope.for_global` | ## Code Before:
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_global(cls) -> Scope:
return cls('global', 'global')
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
## Instruction:
Remove unused class method `Scope.for_global`
## Code After:
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID
|
from __future__ import annotations
from dataclasses import dataclass
from enum import Enum
from typing import NewType
from uuid import UUID
from ...site.transfer.models import SiteID
from ....typing import BrandID
@dataclass(frozen=True)
class Scope:
type_: str
name: str
-
- @classmethod
- def for_global(cls) -> Scope:
- return cls('global', 'global')
@classmethod
def for_brand(cls, brand_id: BrandID) -> Scope:
return cls('brand', str(brand_id))
@classmethod
def for_site(cls, site_id: SiteID) -> Scope:
return cls('site', str(site_id))
SnippetID = NewType('SnippetID', UUID)
SnippetType = Enum('SnippetType', ['document', 'fragment'])
SnippetVersionID = NewType('SnippetVersionID', UUID)
MountpointID = NewType('MountpointID', UUID)
@dataclass(frozen=True)
class Mountpoint:
id: MountpointID
site_id: SiteID
endpoint_suffix: str
url_path: str
snippet_id: SnippetID |
779c01d3932c02f2b9c45e300c7efb54f81749e9 | tests/rietveld/test_event_handler.py | tests/rietveld/test_event_handler.py | from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
def test_evt_change_gss_mode(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
def test_evt_change_gss_mode_exception(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| Add exception to test title | Add exception to test title
| Python | mit | neutrons/FastGR,neutrons/FastGR,neutrons/FastGR | from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
- def test_evt_change_gss_mode(self):
+ def test_evt_change_gss_mode_exception(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| Add exception to test title | ## Code Before:
from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
def test_evt_change_gss_mode(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
## Instruction:
Add exception to test title
## Code After:
from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
def test_evt_change_gss_mode_exception(self):
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover
| from __future__ import absolute_import, print_function
import unittest
from qtpy.QtWidgets import QApplication
from addie.rietveld import event_handler
class RietveldEventHandlerTests(unittest.TestCase):
def setUp(self):
self.main_window = QApplication([])
'''
def tearDown(self):
self.main_window.quit()
'''
- def test_evt_change_gss_mode(self):
+ def test_evt_change_gss_mode_exception(self):
? ++++++++++
"""Test we can extract a bank id from bank workspace name"""
f = event_handler.evt_change_gss_mode
self.assertRaises(NotImplementedError, f, None)
if __name__ == '__main__':
unittest.main() # pragma: no cover |
b8e23194ff0c24bd9460629aff18f69d7a868f6d | likelihood.py | likelihood.py | import math
#Log-likelihood
def ll(ciphertext,perm,mat,k):
s=0.0
for i in range(len(ciphertext)-(k-1)):
kmer = tuple([perm[c] for c in ciphertext[i:i+k]])
s = s + math.log(mat[kmer])
return s
| import math
#Log-likelihood
def ll(ciphertext,perm,mat,k):
if k==1:
return ll_k1(ciphertext,perm,mat)
if k==2:
return ll_k2(ciphertext,perm,mat)
if k==3:
return ll_k3(ciphertext,perm,mat)
s=0.0
for i in range(len(ciphertext)-(k-1)):
kmer = tuple([perm[c] for c in ciphertext[i:i+k]])
s = s + math.log(mat[kmer])
return s
##Log-likelihood - hard-coded version for k=1
def ll_k1(ciphertext,perm,mat):
s=0.0
for i in range(len(ciphertext)):
uple = (perm[ciphertext[i]],)
s = s + math.log(mat[uple])
return s
##Log-likelihood - hard-coded version for k=2
def ll_k2(ciphertext,perm,mat):
s=0.0
for i in range(len(ciphertext)-1):
pair = (perm[ciphertext[i]],perm[ciphertext[i+1]])
s = s + math.log(mat[pair])
return s
##Log-likelihood - hard-coded version for k=3
def ll_k3(ciphertext,perm,mat):
s=0.0
for i in range(len(ciphertext)-2):
triplet = (perm[ciphertext[i]],perm[ciphertext[i+1]],perm[ciphertext[i+2]])
s = s + math.log(mat[triplet])
return s
| Add hard-coded versions of ll function for k=1,2,3 for speed | Add hard-coded versions of ll function for k=1,2,3 for speed
| Python | mit | gputzel/decode | import math
#Log-likelihood
def ll(ciphertext,perm,mat,k):
+ if k==1:
+ return ll_k1(ciphertext,perm,mat)
+ if k==2:
+ return ll_k2(ciphertext,perm,mat)
+ if k==3:
+ return ll_k3(ciphertext,perm,mat)
s=0.0
for i in range(len(ciphertext)-(k-1)):
kmer = tuple([perm[c] for c in ciphertext[i:i+k]])
s = s + math.log(mat[kmer])
return s
+ ##Log-likelihood - hard-coded version for k=1
+ def ll_k1(ciphertext,perm,mat):
+ s=0.0
+ for i in range(len(ciphertext)):
+ uple = (perm[ciphertext[i]],)
+ s = s + math.log(mat[uple])
+ return s
+
+ ##Log-likelihood - hard-coded version for k=2
+ def ll_k2(ciphertext,perm,mat):
+ s=0.0
+ for i in range(len(ciphertext)-1):
+ pair = (perm[ciphertext[i]],perm[ciphertext[i+1]])
+ s = s + math.log(mat[pair])
+ return s
+
+ ##Log-likelihood - hard-coded version for k=3
+ def ll_k3(ciphertext,perm,mat):
+ s=0.0
+ for i in range(len(ciphertext)-2):
+ triplet = (perm[ciphertext[i]],perm[ciphertext[i+1]],perm[ciphertext[i+2]])
+ s = s + math.log(mat[triplet])
+ return s
+ | Add hard-coded versions of ll function for k=1,2,3 for speed | ## Code Before:
import math
#Log-likelihood
def ll(ciphertext,perm,mat,k):
s=0.0
for i in range(len(ciphertext)-(k-1)):
kmer = tuple([perm[c] for c in ciphertext[i:i+k]])
s = s + math.log(mat[kmer])
return s
## Instruction:
Add hard-coded versions of ll function for k=1,2,3 for speed
## Code After:
import math
#Log-likelihood
def ll(ciphertext,perm,mat,k):
if k==1:
return ll_k1(ciphertext,perm,mat)
if k==2:
return ll_k2(ciphertext,perm,mat)
if k==3:
return ll_k3(ciphertext,perm,mat)
s=0.0
for i in range(len(ciphertext)-(k-1)):
kmer = tuple([perm[c] for c in ciphertext[i:i+k]])
s = s + math.log(mat[kmer])
return s
##Log-likelihood - hard-coded version for k=1
def ll_k1(ciphertext,perm,mat):
s=0.0
for i in range(len(ciphertext)):
uple = (perm[ciphertext[i]],)
s = s + math.log(mat[uple])
return s
##Log-likelihood - hard-coded version for k=2
def ll_k2(ciphertext,perm,mat):
s=0.0
for i in range(len(ciphertext)-1):
pair = (perm[ciphertext[i]],perm[ciphertext[i+1]])
s = s + math.log(mat[pair])
return s
##Log-likelihood - hard-coded version for k=3
def ll_k3(ciphertext,perm,mat):
s=0.0
for i in range(len(ciphertext)-2):
triplet = (perm[ciphertext[i]],perm[ciphertext[i+1]],perm[ciphertext[i+2]])
s = s + math.log(mat[triplet])
return s
| import math
#Log-likelihood
def ll(ciphertext,perm,mat,k):
+ if k==1:
+ return ll_k1(ciphertext,perm,mat)
+ if k==2:
+ return ll_k2(ciphertext,perm,mat)
+ if k==3:
+ return ll_k3(ciphertext,perm,mat)
s=0.0
for i in range(len(ciphertext)-(k-1)):
kmer = tuple([perm[c] for c in ciphertext[i:i+k]])
s = s + math.log(mat[kmer])
return s
+
+ ##Log-likelihood - hard-coded version for k=1
+ def ll_k1(ciphertext,perm,mat):
+ s=0.0
+ for i in range(len(ciphertext)):
+ uple = (perm[ciphertext[i]],)
+ s = s + math.log(mat[uple])
+ return s
+
+ ##Log-likelihood - hard-coded version for k=2
+ def ll_k2(ciphertext,perm,mat):
+ s=0.0
+ for i in range(len(ciphertext)-1):
+ pair = (perm[ciphertext[i]],perm[ciphertext[i+1]])
+ s = s + math.log(mat[pair])
+ return s
+
+ ##Log-likelihood - hard-coded version for k=3
+ def ll_k3(ciphertext,perm,mat):
+ s=0.0
+ for i in range(len(ciphertext)-2):
+ triplet = (perm[ciphertext[i]],perm[ciphertext[i+1]],perm[ciphertext[i+2]])
+ s = s + math.log(mat[triplet])
+ return s |
592a2c778bf7c87b7aad6f9ba14c1ba83da033e8 | scoring_engine/web/views/services.py | scoring_engine/web/views/services.py | from flask import Blueprint, render_template, flash
from flask_login import login_required, current_user
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
flash('Only blue teams can access services', 'error')
return render_template('overview.html')
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
return render_template('service.html', service=id)
| from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
service = Service.query.get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', service=service)
| Add unauthorize to service template | Add unauthorize to service template
Signed-off-by: Brandon Myers <9cda508be11a1ae7ceef912b85c196946f0ec5f3@mozilla.com>
| Python | mit | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | - from flask import Blueprint, render_template, flash
+ from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
+ from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
+ return redirect(url_for('auth.unauthorized'))
- flash('Only blue teams can access services', 'error')
- return render_template('overview.html')
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
+ service = Service.query.get(id)
+ if service is None or not current_user.team == service.team:
+ return redirect(url_for('auth.unauthorized'))
- return render_template('service.html', service=id)
+ return render_template('service.html', service=service)
| Add unauthorize to service template | ## Code Before:
from flask import Blueprint, render_template, flash
from flask_login import login_required, current_user
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
flash('Only blue teams can access services', 'error')
return render_template('overview.html')
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
return render_template('service.html', service=id)
## Instruction:
Add unauthorize to service template
## Code After:
from flask import Blueprint, render_template, url_for, redirect
from flask_login import login_required, current_user
from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
return redirect(url_for('auth.unauthorized'))
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
service = Service.query.get(id)
if service is None or not current_user.team == service.team:
return redirect(url_for('auth.unauthorized'))
return render_template('service.html', service=service)
| - from flask import Blueprint, render_template, flash
? ^^^^
+ from flask import Blueprint, render_template, url_for, redirect
? ++++ ^^^^^^^^^^^^
from flask_login import login_required, current_user
+ from scoring_engine.models.service import Service
mod = Blueprint('services', __name__)
@mod.route('/services')
@login_required
def home():
current_team = current_user.team
if not current_user.is_blue_team:
+ return redirect(url_for('auth.unauthorized'))
- flash('Only blue teams can access services', 'error')
- return render_template('overview.html')
return render_template('services.html', team=current_team)
@mod.route('/service/<id>')
@login_required
def service(id):
+ service = Service.query.get(id)
+ if service is None or not current_user.team == service.team:
+ return redirect(url_for('auth.unauthorized'))
- return render_template('service.html', service=id)
? ^
+ return render_template('service.html', service=service)
? ++++ ^^
|
7cbc6ae58357ef647a007e1b505884e523d924c2 | numba/tests/test_ctypes_call.py | numba/tests/test_ctypes_call.py | import os
import ctypes
from numba import *
@autojit(backend='ast', nopython=True)
def call_ctypes_func(func, value):
return func(value)
def test_ctypes_calls():
libc = ctypes.CDLL(ctypes.util.find_library('c'))
puts = libc.puts
puts.argtypes = [ctypes.c_char_p]
assert call_ctypes_func(puts, "Hello World!")
libm = ctypes.CDLL(ctypes.util.find_library('m'))
ceil = libm.ceil
ceil.argtypes = [ctypes.c_double]
ceil.restype = ctypes.c_double
assert call_ctypes_func(ceil, 10.1) == 11.0
def test_str_return():
try:
import errno
except ImportError:
return
libc = ctypes.CDLL(ctypes.util.find_library('c'))
strerror = libc.strerror
strerror.argtypes = [ctypes.c_int]
strerror.restype = ctypes.c_char_p
expected = os.strerror(errno.EACCES)
got = call_ctypes_func(strerror, errno.EACCES)
assert expected == got
if __name__ == "__main__":
test_ctypes_calls()
# test_str_return() | import os
import ctypes
from numba import *
@autojit(backend='ast', nopython=True)
def call_ctypes_func(func, value):
return func(value)
def test_ctypes_calls():
# Test puts for no segfault
libc = ctypes.CDLL(ctypes.util.find_library('c'))
puts = libc.puts
puts.argtypes = [ctypes.c_char_p]
call_ctypes_func(puts, "Hello World!")
# Test ceil result
libm = ctypes.CDLL(ctypes.util.find_library('m'))
ceil = libm.ceil
ceil.argtypes = [ctypes.c_double]
ceil.restype = ctypes.c_double
assert call_ctypes_func(ceil, 10.1) == 11.0
def test_str_return():
try:
import errno
except ImportError:
return
libc = ctypes.CDLL(ctypes.util.find_library('c'))
strerror = libc.strerror
strerror.argtypes = [ctypes.c_int]
strerror.restype = ctypes.c_char_p
expected = os.strerror(errno.EACCES)
got = call_ctypes_func(strerror, errno.EACCES)
assert expected == got
if __name__ == "__main__":
test_ctypes_calls()
# test_str_return() | Fix ctypes call test for windows | Fix ctypes call test for windows
| Python | bsd-2-clause | sklam/numba,pitrou/numba,GaZ3ll3/numba,numba/numba,jriehl/numba,sklam/numba,IntelLabs/numba,IntelLabs/numba,jriehl/numba,pitrou/numba,shiquanwang/numba,gdementen/numba,stonebig/numba,pombredanne/numba,gmarkall/numba,cpcloud/numba,stonebig/numba,jriehl/numba,ssarangi/numba,gdementen/numba,stuartarchibald/numba,ssarangi/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,ssarangi/numba,gmarkall/numba,seibert/numba,gmarkall/numba,stuartarchibald/numba,sklam/numba,gdementen/numba,stuartarchibald/numba,jriehl/numba,stefanseefeld/numba,jriehl/numba,stonebig/numba,cpcloud/numba,IntelLabs/numba,cpcloud/numba,stonebig/numba,seibert/numba,pitrou/numba,numba/numba,ssarangi/numba,stefanseefeld/numba,stonebig/numba,seibert/numba,pitrou/numba,pombredanne/numba,pombredanne/numba,IntelLabs/numba,stefanseefeld/numba,pitrou/numba,numba/numba,GaZ3ll3/numba,GaZ3ll3/numba,shiquanwang/numba,IntelLabs/numba,stuartarchibald/numba,pombredanne/numba,gdementen/numba,seibert/numba,shiquanwang/numba,sklam/numba,stefanseefeld/numba,sklam/numba,numba/numba,numba/numba,gdementen/numba,GaZ3ll3/numba,cpcloud/numba,gmarkall/numba,stuartarchibald/numba,stefanseefeld/numba,seibert/numba,ssarangi/numba,pombredanne/numba | import os
import ctypes
from numba import *
@autojit(backend='ast', nopython=True)
def call_ctypes_func(func, value):
return func(value)
def test_ctypes_calls():
+ # Test puts for no segfault
libc = ctypes.CDLL(ctypes.util.find_library('c'))
puts = libc.puts
puts.argtypes = [ctypes.c_char_p]
- assert call_ctypes_func(puts, "Hello World!")
+ call_ctypes_func(puts, "Hello World!")
+ # Test ceil result
libm = ctypes.CDLL(ctypes.util.find_library('m'))
ceil = libm.ceil
ceil.argtypes = [ctypes.c_double]
ceil.restype = ctypes.c_double
assert call_ctypes_func(ceil, 10.1) == 11.0
def test_str_return():
try:
import errno
except ImportError:
return
libc = ctypes.CDLL(ctypes.util.find_library('c'))
strerror = libc.strerror
strerror.argtypes = [ctypes.c_int]
strerror.restype = ctypes.c_char_p
expected = os.strerror(errno.EACCES)
got = call_ctypes_func(strerror, errno.EACCES)
assert expected == got
if __name__ == "__main__":
test_ctypes_calls()
# test_str_return() | Fix ctypes call test for windows | ## Code Before:
import os
import ctypes
from numba import *
@autojit(backend='ast', nopython=True)
def call_ctypes_func(func, value):
return func(value)
def test_ctypes_calls():
libc = ctypes.CDLL(ctypes.util.find_library('c'))
puts = libc.puts
puts.argtypes = [ctypes.c_char_p]
assert call_ctypes_func(puts, "Hello World!")
libm = ctypes.CDLL(ctypes.util.find_library('m'))
ceil = libm.ceil
ceil.argtypes = [ctypes.c_double]
ceil.restype = ctypes.c_double
assert call_ctypes_func(ceil, 10.1) == 11.0
def test_str_return():
try:
import errno
except ImportError:
return
libc = ctypes.CDLL(ctypes.util.find_library('c'))
strerror = libc.strerror
strerror.argtypes = [ctypes.c_int]
strerror.restype = ctypes.c_char_p
expected = os.strerror(errno.EACCES)
got = call_ctypes_func(strerror, errno.EACCES)
assert expected == got
if __name__ == "__main__":
test_ctypes_calls()
# test_str_return()
## Instruction:
Fix ctypes call test for windows
## Code After:
import os
import ctypes
from numba import *
@autojit(backend='ast', nopython=True)
def call_ctypes_func(func, value):
return func(value)
def test_ctypes_calls():
# Test puts for no segfault
libc = ctypes.CDLL(ctypes.util.find_library('c'))
puts = libc.puts
puts.argtypes = [ctypes.c_char_p]
call_ctypes_func(puts, "Hello World!")
# Test ceil result
libm = ctypes.CDLL(ctypes.util.find_library('m'))
ceil = libm.ceil
ceil.argtypes = [ctypes.c_double]
ceil.restype = ctypes.c_double
assert call_ctypes_func(ceil, 10.1) == 11.0
def test_str_return():
try:
import errno
except ImportError:
return
libc = ctypes.CDLL(ctypes.util.find_library('c'))
strerror = libc.strerror
strerror.argtypes = [ctypes.c_int]
strerror.restype = ctypes.c_char_p
expected = os.strerror(errno.EACCES)
got = call_ctypes_func(strerror, errno.EACCES)
assert expected == got
if __name__ == "__main__":
test_ctypes_calls()
# test_str_return() | import os
import ctypes
from numba import *
@autojit(backend='ast', nopython=True)
def call_ctypes_func(func, value):
return func(value)
def test_ctypes_calls():
+ # Test puts for no segfault
libc = ctypes.CDLL(ctypes.util.find_library('c'))
puts = libc.puts
puts.argtypes = [ctypes.c_char_p]
- assert call_ctypes_func(puts, "Hello World!")
? -------
+ call_ctypes_func(puts, "Hello World!")
+ # Test ceil result
libm = ctypes.CDLL(ctypes.util.find_library('m'))
ceil = libm.ceil
ceil.argtypes = [ctypes.c_double]
ceil.restype = ctypes.c_double
assert call_ctypes_func(ceil, 10.1) == 11.0
def test_str_return():
try:
import errno
except ImportError:
return
libc = ctypes.CDLL(ctypes.util.find_library('c'))
strerror = libc.strerror
strerror.argtypes = [ctypes.c_int]
strerror.restype = ctypes.c_char_p
expected = os.strerror(errno.EACCES)
got = call_ctypes_func(strerror, errno.EACCES)
assert expected == got
if __name__ == "__main__":
test_ctypes_calls()
# test_str_return() |
64c31f5630975a30a8187ad51566b4ff723b4d28 | nengo_spinnaker/simulator.py | nengo_spinnaker/simulator.py | import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None, use_serial=False):
# Build the model
self.builder = builder.Builder(use_serial=use_serial)
self.dao = self.builder(model, dt, seed)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
| import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None, use_serial=False):
# Build the model
self.builder = builder.Builder()
self.dao = self.builder(model, dt, seed, use_serial=use_serial)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
| Correct `Builder.__call__` parameters when called by the `Simulator` | Correct `Builder.__call__` parameters when called by the `Simulator`
| Python | mit | ctn-archive/nengo_spinnaker_2014,ctn-archive/nengo_spinnaker_2014,ctn-archive/nengo_spinnaker_2014,ctn-archive/nengo_spinnaker_2014 | import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None, use_serial=False):
# Build the model
- self.builder = builder.Builder(use_serial=use_serial)
+ self.builder = builder.Builder()
- self.dao = self.builder(model, dt, seed)
+ self.dao = self.builder(model, dt, seed, use_serial=use_serial)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
| Correct `Builder.__call__` parameters when called by the `Simulator` | ## Code Before:
import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None, use_serial=False):
# Build the model
self.builder = builder.Builder(use_serial=use_serial)
self.dao = self.builder(model, dt, seed)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
## Instruction:
Correct `Builder.__call__` parameters when called by the `Simulator`
## Code After:
import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None, use_serial=False):
# Build the model
self.builder = builder.Builder()
self.dao = self.builder(model, dt, seed, use_serial=use_serial)
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id)
| import sys
from pacman103.core import control
from pacman103 import conf
from . import builder
class Simulator(object):
def __init__(self, model, dt=0.001, seed=None, use_serial=False):
# Build the model
- self.builder = builder.Builder(use_serial=use_serial)
? ---------------------
+ self.builder = builder.Builder()
- self.dao = self.builder(model, dt, seed)
+ self.dao = self.builder(model, dt, seed, use_serial=use_serial)
? +++++++++++++++++++++++
self.dao.writeTextSpecs = True
def run(self, time):
"""Run the model, currently ignores the time."""
self.controller = control.Controller(
sys.modules[__name__],
conf.config.get('Machine', 'machineName')
)
self.controller.dao = self.dao
self.dao.set_hostname(conf.config.get('Machine', 'machineName'))
self.controller.map_model()
self.controller.generate_output()
self.controller.load_targets()
self.controller.load_write_mem()
self.controller.run(self.dao.app_id) |
b698f6925b4629d7473fbe42806f54068d98428a | tests/component/test_component_identidock.py | tests/component/test_component_identidock.py | import sys
print(sys.path)
| import pytest
import requests
from time import sleep
COMPONENT_INDEX_URL = "http://identidock:5000"
COMPONENT_MONSTER_BASE_URL = COMPONENT_INDEX_URL + '/monster'
def test_get_mainpage():
print('component tester sleeping for 1 sec to let the identidock app to be ready adn also start its server')
sleep(1)
page = requests.get(COMPONENT_INDEX_URL)
assert page.status_code == 200
assert 'Joe Bloggs' in str(page.text)
def test_post_mainpage():
page = requests.post(COMPONENT_INDEX_URL, data=dict(name="Moby Dock"))
assert page.status_code == 200
assert 'Moby Dock' in str(page.text)
def test_mainpage_html_escaping():
page = requests.post(COMPONENT_INDEX_URL, data=dict(name='"><b>TEST</b><!--'))
assert page.status_code == 200
assert '<b>' not in str(page.text)
def test_get_identicon_with_valid_name_and_invalid_post_method_should_return_405():
name_hash = 'ABCDEF123456789'
page = requests.post('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
assert page.status_code == 405
def test_get_identicon_with_valid_name_and_cache_miss():
name_hash = 'ABCDEF123456789'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
def test_get_identicon_with_valid_name_and_cache_hit():
name_hash = 'ABCDEF123456789'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
def test_get_identicon_with_insecure_and_unescaped_invalid_name_hash():
invalid_name_hash = '<b>;i_am_invalid|name <{"'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, invalid_name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
if __name__ == '__main__':
# unittest.main()
pytest.main()
| Add component test functions using pytest | Add component test functions using pytest
| Python | mit | anirbanroydas/ci-testing-python,anirbanroydas/ci-testing-python,anirbanroydas/ci-testing-python | - import sys
+ import pytest
+ import requests
+ from time import sleep
- print(sys.path)
+ COMPONENT_INDEX_URL = "http://identidock:5000"
+ COMPONENT_MONSTER_BASE_URL = COMPONENT_INDEX_URL + '/monster'
+
+
+
+
+ def test_get_mainpage():
+ print('component tester sleeping for 1 sec to let the identidock app to be ready adn also start its server')
+ sleep(1)
+ page = requests.get(COMPONENT_INDEX_URL)
+ assert page.status_code == 200
+ assert 'Joe Bloggs' in str(page.text)
+
+
+
+
+ def test_post_mainpage():
+ page = requests.post(COMPONENT_INDEX_URL, data=dict(name="Moby Dock"))
+ assert page.status_code == 200
+ assert 'Moby Dock' in str(page.text)
+
+
+
+
+ def test_mainpage_html_escaping():
+ page = requests.post(COMPONENT_INDEX_URL, data=dict(name='"><b>TEST</b><!--'))
+ assert page.status_code == 200
+ assert '<b>' not in str(page.text)
+
+
+
+ def test_get_identicon_with_valid_name_and_invalid_post_method_should_return_405():
+ name_hash = 'ABCDEF123456789'
+
+ page = requests.post('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ assert page.status_code == 405
+
+
+
+
+ def test_get_identicon_with_valid_name_and_cache_miss():
+ name_hash = 'ABCDEF123456789'
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+ def test_get_identicon_with_valid_name_and_cache_hit():
+ name_hash = 'ABCDEF123456789'
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+ def test_get_identicon_with_insecure_and_unescaped_invalid_name_hash():
+ invalid_name_hash = '<b>;i_am_invalid|name <{"'
+
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, invalid_name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+ if __name__ == '__main__':
+ # unittest.main()
+ pytest.main()
+ | Add component test functions using pytest | ## Code Before:
import sys
print(sys.path)
## Instruction:
Add component test functions using pytest
## Code After:
import pytest
import requests
from time import sleep
COMPONENT_INDEX_URL = "http://identidock:5000"
COMPONENT_MONSTER_BASE_URL = COMPONENT_INDEX_URL + '/monster'
def test_get_mainpage():
print('component tester sleeping for 1 sec to let the identidock app to be ready adn also start its server')
sleep(1)
page = requests.get(COMPONENT_INDEX_URL)
assert page.status_code == 200
assert 'Joe Bloggs' in str(page.text)
def test_post_mainpage():
page = requests.post(COMPONENT_INDEX_URL, data=dict(name="Moby Dock"))
assert page.status_code == 200
assert 'Moby Dock' in str(page.text)
def test_mainpage_html_escaping():
page = requests.post(COMPONENT_INDEX_URL, data=dict(name='"><b>TEST</b><!--'))
assert page.status_code == 200
assert '<b>' not in str(page.text)
def test_get_identicon_with_valid_name_and_invalid_post_method_should_return_405():
name_hash = 'ABCDEF123456789'
page = requests.post('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
assert page.status_code == 405
def test_get_identicon_with_valid_name_and_cache_miss():
name_hash = 'ABCDEF123456789'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
def test_get_identicon_with_valid_name_and_cache_hit():
name_hash = 'ABCDEF123456789'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
def test_get_identicon_with_insecure_and_unescaped_invalid_name_hash():
invalid_name_hash = '<b>;i_am_invalid|name <{"'
page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, invalid_name_hash))
# print('page.content : {0}'.format(page.content))
assert page.status_code == 200
if __name__ == '__main__':
# unittest.main()
pytest.main()
| - import sys
+ import pytest
+ import requests
+ from time import sleep
- print(sys.path)
+ COMPONENT_INDEX_URL = "http://identidock:5000"
+
+ COMPONENT_MONSTER_BASE_URL = COMPONENT_INDEX_URL + '/monster'
+
+
+
+
+ def test_get_mainpage():
+ print('component tester sleeping for 1 sec to let the identidock app to be ready adn also start its server')
+ sleep(1)
+ page = requests.get(COMPONENT_INDEX_URL)
+ assert page.status_code == 200
+ assert 'Joe Bloggs' in str(page.text)
+
+
+
+
+ def test_post_mainpage():
+ page = requests.post(COMPONENT_INDEX_URL, data=dict(name="Moby Dock"))
+ assert page.status_code == 200
+ assert 'Moby Dock' in str(page.text)
+
+
+
+
+ def test_mainpage_html_escaping():
+ page = requests.post(COMPONENT_INDEX_URL, data=dict(name='"><b>TEST</b><!--'))
+ assert page.status_code == 200
+ assert '<b>' not in str(page.text)
+
+
+
+ def test_get_identicon_with_valid_name_and_invalid_post_method_should_return_405():
+ name_hash = 'ABCDEF123456789'
+
+ page = requests.post('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ assert page.status_code == 405
+
+
+
+
+ def test_get_identicon_with_valid_name_and_cache_miss():
+ name_hash = 'ABCDEF123456789'
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+ def test_get_identicon_with_valid_name_and_cache_hit():
+ name_hash = 'ABCDEF123456789'
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+ def test_get_identicon_with_insecure_and_unescaped_invalid_name_hash():
+ invalid_name_hash = '<b>;i_am_invalid|name <{"'
+
+ page = requests.get('{0}/{1}'.format(COMPONENT_MONSTER_BASE_URL, invalid_name_hash))
+
+ # print('page.content : {0}'.format(page.content))
+ assert page.status_code == 200
+
+
+
+
+ if __name__ == '__main__':
+ # unittest.main()
+ pytest.main() |
968c73805e5beff502955ad3dbb8aa86ee8bc0b7 | freelancefinder/jobs/forms.py | freelancefinder/jobs/forms.py | """Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
| """Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
| Tag is not required, of course | Tag is not required, of course
| Python | bsd-3-clause | ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder,ScorpionResponse/freelancefinder | """Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
- tag = forms.ModelChoiceField(queryset=Tag.objects.all())
+ tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
| Tag is not required, of course | ## Code Before:
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all())
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
## Instruction:
Tag is not required, of course
## Code After:
"""Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search'))
| """Forms for dealing with jobs/posts."""
from django import forms
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit
from taggit.models import Tag
class PostFilterForm(forms.Form):
"""Form for filtering the PostListView."""
title = forms.CharField(required=False)
is_job_posting = forms.BooleanField(required=False)
is_freelance = forms.BooleanField(required=False)
def __init__(self, *args, **kwargs):
super(PostFilterForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Filter'))
class JobSearchForm(forms.Form):
"""Form for filtering the JobListView."""
search = forms.CharField(required=False)
- tag = forms.ModelChoiceField(queryset=Tag.objects.all())
+ tag = forms.ModelChoiceField(queryset=Tag.objects.all(), required=False)
? ++++++++++++++++
def __init__(self, *args, **kwargs):
super(JobSearchForm, self).__init__(*args, **kwargs)
self.helper = FormHelper()
self.helper.form_class = 'form-inline'
self.helper.form_method = 'get'
self.helper.add_input(Submit('submit', 'Search')) |
7b9ee45c0791d8368a0bb8af52652d3fcd482c79 | qubesadmin/__init__.py | qubesadmin/__init__.py |
'''Qubes OS management client.'''
import os
import qubesadmin.config
import qubesadmin.base
import qubesadmin.app
DEFAULT = qubesadmin.base.DEFAULT
if os.path.exists(qubesadmin.config.QUBESD_SOCKET):
Qubes = qubesadmin.app.QubesLocal
else:
Qubes = qubesadmin.app.QubesRemote
|
'''Qubes OS management client.'''
import os
import qubesadmin.config
import qubesadmin.base
import qubesadmin.app
DEFAULT = qubesadmin.base.DEFAULT
if os.path.exists('/etc/qubes-release'):
Qubes = qubesadmin.app.QubesLocal
else:
Qubes = qubesadmin.app.QubesRemote
| Choose QubesLocal or QubesRemote based on /etc/qubes-release presence | Choose QubesLocal or QubesRemote based on /etc/qubes-release presence
Do not check for qubesd socket (at module import time), because if not
running at this precise time, it will lead to wrong choice. And a weird
error message in consequence (looking for qrexec-client-vm in dom0).
Fixes QubesOS/qubes-issues#2917
| Python | lgpl-2.1 | marmarek/qubes-core-mgmt-client,marmarek/qubes-core-mgmt-client,marmarek/qubes-core-mgmt-client |
'''Qubes OS management client.'''
import os
import qubesadmin.config
import qubesadmin.base
import qubesadmin.app
DEFAULT = qubesadmin.base.DEFAULT
- if os.path.exists(qubesadmin.config.QUBESD_SOCKET):
+ if os.path.exists('/etc/qubes-release'):
Qubes = qubesadmin.app.QubesLocal
else:
Qubes = qubesadmin.app.QubesRemote
| Choose QubesLocal or QubesRemote based on /etc/qubes-release presence | ## Code Before:
'''Qubes OS management client.'''
import os
import qubesadmin.config
import qubesadmin.base
import qubesadmin.app
DEFAULT = qubesadmin.base.DEFAULT
if os.path.exists(qubesadmin.config.QUBESD_SOCKET):
Qubes = qubesadmin.app.QubesLocal
else:
Qubes = qubesadmin.app.QubesRemote
## Instruction:
Choose QubesLocal or QubesRemote based on /etc/qubes-release presence
## Code After:
'''Qubes OS management client.'''
import os
import qubesadmin.config
import qubesadmin.base
import qubesadmin.app
DEFAULT = qubesadmin.base.DEFAULT
if os.path.exists('/etc/qubes-release'):
Qubes = qubesadmin.app.QubesLocal
else:
Qubes = qubesadmin.app.QubesRemote
|
'''Qubes OS management client.'''
import os
import qubesadmin.config
import qubesadmin.base
import qubesadmin.app
DEFAULT = qubesadmin.base.DEFAULT
- if os.path.exists(qubesadmin.config.QUBESD_SOCKET):
+ if os.path.exists('/etc/qubes-release'):
Qubes = qubesadmin.app.QubesLocal
else:
Qubes = qubesadmin.app.QubesRemote |
14cfc8927b36a89947c1bd4cefc5be88ebbea1b5 | cheroot/test/conftest.py | cheroot/test/conftest.py | import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| Make HTTP server fixture bind to an ephemeral port | Make HTTP server fixture bind to an ephemeral port
| Python | bsd-3-clause | cherrypy/cheroot | import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
+ EPHEMERAL_PORT = 0
config = {
- 'bind_addr': ('127.0.0.1', 54583),
+ 'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| Make HTTP server fixture bind to an ephemeral port | ## Code Before:
import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
config = {
'bind_addr': ('127.0.0.1', 54583),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
## Instruction:
Make HTTP server fixture bind to an ephemeral port
## Code After:
import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
EPHEMERAL_PORT = 0
config = {
'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv
| import threading
import time
import pytest
import cheroot.server
import cheroot.wsgi
+ EPHEMERAL_PORT = 0
config = {
- 'bind_addr': ('127.0.0.1', 54583),
? ^^^^^
+ 'bind_addr': ('127.0.0.1', EPHEMERAL_PORT),
? ^^^^^^^^^^^^^^
'wsgi_app': None,
}
def cheroot_server(server_factory):
conf = config.copy()
httpserver = server_factory(**conf) # create it
threading.Thread(target=httpserver.safe_start).start() # spawn it
while not httpserver.ready: # wait until fully initialized and bound
time.sleep(0.1)
yield httpserver
httpserver.stop() # destroy it
@pytest.fixture(scope='module')
def wsgi_server():
for srv in cheroot_server(cheroot.wsgi.Server):
yield srv
@pytest.fixture(scope='module')
def native_server():
for srv in cheroot_server(cheroot.server.HTTPServer):
yield srv |
424a5da1e867b5b77fe5f241ef0a825988157811 | moksha/config/app_cfg.py | moksha/config/app_cfg.py | from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
| from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
base_config.renderers.append('genshi')
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
| Load up the genshi renderer | Load up the genshi renderer
| Python | apache-2.0 | lmacken/moksha,ralphbean/moksha,pombredanne/moksha,pombredanne/moksha,mokshaproject/moksha,lmacken/moksha,ralphbean/moksha,lmacken/moksha,pombredanne/moksha,mokshaproject/moksha,mokshaproject/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha | from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
+ base_config.renderers.append('genshi')
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
| Load up the genshi renderer | ## Code Before:
from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
## Instruction:
Load up the genshi renderer
## Code After:
from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
base_config.renderers.append('genshi')
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None
| from tg.configuration import AppConfig, Bunch
import moksha
from moksha import model
from moksha.lib import app_globals, helpers
base_config = AppConfig()
base_config.package = moksha
# Set the default renderer
base_config.default_renderer = 'mako'
base_config.renderers = []
+ base_config.renderers.append('genshi')
base_config.renderers.append('mako')
# @@ This is necessary at the moment.
base_config.use_legacy_renderer = True
# Configure the base SQLALchemy Setup
base_config.use_sqlalchemy = True
base_config.model = moksha.model
base_config.DBSession = moksha.model.DBSession
# Configure the authentication backend
base_config.auth_backend = 'sqlalchemy'
base_config.sa_auth.dbsession = model.DBSession
base_config.sa_auth.user_class = model.User
base_config.sa_auth.group_class = model.Group
base_config.sa_auth.permission_class = model.Permission
# override this if you would like to provide a different who plugin for
# managing login and logout of your application
base_config.sa_auth.form_plugin = None |
b718c1d817e767c336654001f3aaea5d7327625a | wsgi_intercept/requests_intercept.py | wsgi_intercept/requests_intercept.py |
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
|
import sys
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPConnection.__init__(self, *args, **kwargs)
HTTPConnection.__init__(self, *args, **kwargs)
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPSConnection.__init__(self, *args, **kwargs)
HTTPSConnection.__init__(self, *args, **kwargs)
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
| Deal with request's urllib3 being annoying about 'strict' | Deal with request's urllib3 being annoying about 'strict'
These changes are required to get tests to pass in python3.4 (and
presumably others).
This is entirely code from @sashahart, who had done the work earlier
to deal with with some Debian related issues uncovered by @thomasgoirand.
These changes will probably mean the debian packages will need to be
updated when the next version is released.
| Python | mit | sileht/python3-wsgi-intercept,cdent/wsgi-intercept | +
+ import sys
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
- pass
+ def __init__(self, *args, **kwargs):
+ if 'strict' in kwargs and sys.version_info > (3, 0):
+ kwargs.pop('strict')
+ WSGI_HTTPConnection.__init__(self, *args, **kwargs)
+ HTTPConnection.__init__(self, *args, **kwargs)
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
- pass
+ def __init__(self, *args, **kwargs):
+ if 'strict' in kwargs and sys.version_info > (3, 0):
+ kwargs.pop('strict')
+ WSGI_HTTPSConnection.__init__(self, *args, **kwargs)
+ HTTPSConnection.__init__(self, *args, **kwargs)
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
| Deal with request's urllib3 being annoying about 'strict' | ## Code Before:
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
pass
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
pass
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
## Instruction:
Deal with request's urllib3 being annoying about 'strict'
## Code After:
import sys
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPConnection.__init__(self, *args, **kwargs)
HTTPConnection.__init__(self, *args, **kwargs)
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
def __init__(self, *args, **kwargs):
if 'strict' in kwargs and sys.version_info > (3, 0):
kwargs.pop('strict')
WSGI_HTTPSConnection.__init__(self, *args, **kwargs)
HTTPSConnection.__init__(self, *args, **kwargs)
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection
| +
+ import sys
from . import WSGI_HTTPConnection, WSGI_HTTPSConnection, wsgi_fake_socket
from requests.packages.urllib3.connectionpool import (HTTPConnectionPool,
HTTPSConnectionPool)
from requests.packages.urllib3.connection import (HTTPConnection,
HTTPSConnection)
wsgi_fake_socket.settimeout = lambda self, timeout: None
class HTTP_WSGIInterceptor(WSGI_HTTPConnection, HTTPConnection):
- pass
+ def __init__(self, *args, **kwargs):
+ if 'strict' in kwargs and sys.version_info > (3, 0):
+ kwargs.pop('strict')
+ WSGI_HTTPConnection.__init__(self, *args, **kwargs)
+ HTTPConnection.__init__(self, *args, **kwargs)
class HTTPS_WSGIInterceptor(WSGI_HTTPSConnection, HTTPSConnection):
- pass
+ def __init__(self, *args, **kwargs):
+ if 'strict' in kwargs and sys.version_info > (3, 0):
+ kwargs.pop('strict')
+ WSGI_HTTPSConnection.__init__(self, *args, **kwargs)
+ HTTPSConnection.__init__(self, *args, **kwargs)
def install():
HTTPConnectionPool.ConnectionCls = HTTP_WSGIInterceptor
HTTPSConnectionPool.ConnectionCls = HTTPS_WSGIInterceptor
def uninstall():
HTTPConnectionPool.ConnectionCls = HTTPConnection
HTTPSConnectionPool.ConnectionCls = HTTPSConnection |
e6a991b91587f0ef081114b0d15390f682563071 | antfarm/base.py | antfarm/base.py |
import logging
log = logging.getLogger(__name__)
from .request import Request
class App(object):
'''
Base Application class.
Create an instance of this, passing configuration options, and use the resulting instance as your WSGI application callable.
application = App(root_view=myview)
You can also sub-class this to provide the root_view.
'''
def __init__(self, **opts):
self.root_view = opts['root_view']
self.opts = opts
def __call__(self, environ, start_response):
request = Request(self, environ)
response = self.root_view(request)
start_response(response.status, response.build_headers())
return response
|
import logging
log = logging.getLogger(__name__)
from .request import Request
class App(object):
'''
Base Application class.
Create an instance of this, passing configuration options, and use the resulting instance as your WSGI application callable.
application = App(root_view=myview)
You can also sub-class this to provide the root_view.
'''
def __init__(self, **opts):
for key, val in opts.items():
setattr(self, key, val)
def __call__(self, environ, start_response):
request = Request(self, environ)
response = self.root_view(request)
start_response(response.status, response.build_headers())
return response
| Update the app with all supplied config arguments | Update the app with all supplied config arguments
| Python | mit | funkybob/antfarm |
import logging
log = logging.getLogger(__name__)
from .request import Request
class App(object):
'''
Base Application class.
Create an instance of this, passing configuration options, and use the resulting instance as your WSGI application callable.
application = App(root_view=myview)
You can also sub-class this to provide the root_view.
'''
def __init__(self, **opts):
- self.root_view = opts['root_view']
- self.opts = opts
+ for key, val in opts.items():
+ setattr(self, key, val)
def __call__(self, environ, start_response):
request = Request(self, environ)
response = self.root_view(request)
start_response(response.status, response.build_headers())
return response
| Update the app with all supplied config arguments | ## Code Before:
import logging
log = logging.getLogger(__name__)
from .request import Request
class App(object):
'''
Base Application class.
Create an instance of this, passing configuration options, and use the resulting instance as your WSGI application callable.
application = App(root_view=myview)
You can also sub-class this to provide the root_view.
'''
def __init__(self, **opts):
self.root_view = opts['root_view']
self.opts = opts
def __call__(self, environ, start_response):
request = Request(self, environ)
response = self.root_view(request)
start_response(response.status, response.build_headers())
return response
## Instruction:
Update the app with all supplied config arguments
## Code After:
import logging
log = logging.getLogger(__name__)
from .request import Request
class App(object):
'''
Base Application class.
Create an instance of this, passing configuration options, and use the resulting instance as your WSGI application callable.
application = App(root_view=myview)
You can also sub-class this to provide the root_view.
'''
def __init__(self, **opts):
for key, val in opts.items():
setattr(self, key, val)
def __call__(self, environ, start_response):
request = Request(self, environ)
response = self.root_view(request)
start_response(response.status, response.build_headers())
return response
|
import logging
log = logging.getLogger(__name__)
from .request import Request
class App(object):
'''
Base Application class.
Create an instance of this, passing configuration options, and use the resulting instance as your WSGI application callable.
application = App(root_view=myview)
You can also sub-class this to provide the root_view.
'''
def __init__(self, **opts):
- self.root_view = opts['root_view']
- self.opts = opts
+ for key, val in opts.items():
+ setattr(self, key, val)
def __call__(self, environ, start_response):
request = Request(self, environ)
response = self.root_view(request)
start_response(response.status, response.build_headers())
return response |
b82dbd63aedf8a6a6af494b6d6be697a9f4230d5 | tests/test_utils.py | tests/test_utils.py | import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
| import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes, expand_axis_label
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
def test_expand_axis_label():
assert expand_axis_label('b') == 'batch'
assert expand_axis_label('c') == 'channel'
assert expand_axis_label('t') == 'time'
assert expand_axis_label('0') == 'axis_0'
assert expand_axis_label('1') == 'axis_1'
assert expand_axis_label('0b') == '0b'
assert expand_axis_label('') == ''
| Add unit test for expand_axis_label | Add unit test for expand_axis_label
| Python | mit | dwf/fuel,ejls/fuel,udibr/fuel,rizar/fuel,capybaralet/fuel,rizar/fuel,EderSantana/fuel,EderSantana/fuel,orhanf/fuel,aalmah/fuel,mila-udem/fuel,mjwillson/fuel,glewis17/fuel,orhanf/fuel,dhruvparamhans/fuel,hantek/fuel,lamblin/fuel,jbornschein/fuel,dribnet/fuel,markusnagel/fuel,udibr/fuel,harmdevries89/fuel,dribnet/fuel,glewis17/fuel,janchorowski/fuel,harmdevries89/fuel,chrishokamp/fuel,jbornschein/fuel,vdumoulin/fuel,codeaudit/fuel,aalmah/fuel,markusnagel/fuel,dmitriy-serdyuk/fuel,rodrigob/fuel,dwf/fuel,dmitriy-serdyuk/fuel,bouthilx/fuel,bouthilx/fuel,capybaralet/fuel,janchorowski/fuel,laurent-dinh/fuel,dhruvparamhans/fuel,mjwillson/fuel,chrishokamp/fuel,mila-udem/fuel,lamblin/fuel,ejls/fuel,vdumoulin/fuel,laurent-dinh/fuel,codeaudit/fuel,hantek/fuel,rodrigob/fuel | import pickle
from six.moves import range
- from fuel.utils import do_not_pickle_attributes
+ from fuel.utils import do_not_pickle_attributes, expand_axis_label
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
+
+ def test_expand_axis_label():
+ assert expand_axis_label('b') == 'batch'
+ assert expand_axis_label('c') == 'channel'
+ assert expand_axis_label('t') == 'time'
+ assert expand_axis_label('0') == 'axis_0'
+ assert expand_axis_label('1') == 'axis_1'
+ assert expand_axis_label('0b') == '0b'
+ assert expand_axis_label('') == ''
+ | Add unit test for expand_axis_label | ## Code Before:
import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
## Instruction:
Add unit test for expand_axis_label
## Code After:
import pickle
from six.moves import range
from fuel.utils import do_not_pickle_attributes, expand_axis_label
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
def test_expand_axis_label():
assert expand_axis_label('b') == 'batch'
assert expand_axis_label('c') == 'channel'
assert expand_axis_label('t') == 'time'
assert expand_axis_label('0') == 'axis_0'
assert expand_axis_label('1') == 'axis_1'
assert expand_axis_label('0b') == '0b'
assert expand_axis_label('') == ''
| import pickle
from six.moves import range
- from fuel.utils import do_not_pickle_attributes
+ from fuel.utils import do_not_pickle_attributes, expand_axis_label
? +++++++++++++++++++
@do_not_pickle_attributes("non_pickable", "bulky_attr")
class TestClass(object):
def __init__(self):
self.load()
def load(self):
self.bulky_attr = list(range(100))
self.non_pickable = lambda x: x
def test_do_not_pickle_attributes():
cl = TestClass()
dump = pickle.dumps(cl)
loaded = pickle.loads(dump)
assert loaded.bulky_attr == list(range(100))
assert loaded.non_pickable is not None
+
+
+ def test_expand_axis_label():
+ assert expand_axis_label('b') == 'batch'
+ assert expand_axis_label('c') == 'channel'
+ assert expand_axis_label('t') == 'time'
+ assert expand_axis_label('0') == 'axis_0'
+ assert expand_axis_label('1') == 'axis_1'
+ assert expand_axis_label('0b') == '0b'
+ assert expand_axis_label('') == '' |
2627c2c5ea6fdbff9d9979765deee8740a11c7c9 | backend/globaleaks/handlers/__init__.py | backend/globaleaks/handlers/__init__.py |
__all__ = ['admin',
'base',
'css',
'files',
'node',
'receiver',
'rtip',
'submission',
'wbtip']
|
__all__ = ['admin',
'base',
'files',
'node',
'receiver',
'rtip',
'submission',
'wbtip']
| Fix module packaging thanks to landscape.io hint | Fix module packaging thanks to landscape.io hint
| Python | agpl-3.0 | vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks |
__all__ = ['admin',
'base',
- 'css',
'files',
'node',
'receiver',
'rtip',
'submission',
'wbtip']
| Fix module packaging thanks to landscape.io hint | ## Code Before:
__all__ = ['admin',
'base',
'css',
'files',
'node',
'receiver',
'rtip',
'submission',
'wbtip']
## Instruction:
Fix module packaging thanks to landscape.io hint
## Code After:
__all__ = ['admin',
'base',
'files',
'node',
'receiver',
'rtip',
'submission',
'wbtip']
|
__all__ = ['admin',
'base',
- 'css',
'files',
'node',
'receiver',
'rtip',
'submission',
'wbtip'] |
06356979dd377137c77139c45a0b40deea3f5b27 | tests/test_api.py | tests/test_api.py | import scipy.interpolate
import numpy as np
import naturalneighbor
def test_output_size_matches_scipy():
points = np.random.rand(10, 3)
values = np.random.rand(10)
grid_ranges = [
[0, 4, 0.6], # step isn't a multiple
[-3, 3, 1.0], # step is a multiple
[0, 1, 3], # step is larger than stop - start
]
mesh_grids = tuple(np.mgrid[
grid_ranges[0][0]:grid_ranges[0][1]:grid_ranges[0][2],
grid_ranges[1][0]:grid_ranges[1][1]:grid_ranges[1][2],
grid_ranges[2][0]:grid_ranges[2][1]:grid_ranges[2][2],
])
scipy_result = scipy.interpolate.griddata(points, values, mesh_grids)
nn_result = naturalneighbor.griddata(points, values, grid_ranges)
assert scipy_result.shape == nn_result.shape
| import scipy.interpolate
import numpy as np
import pytest
import naturalneighbor
@pytest.mark.parametrize("grid_ranges", [
[[0, 4, 0.6], [-3, 3, 1.0], [0, 1, 3]],
[[0, 2, 1], [0, 2, 1j], [0, 2, 2j]],
])
def test_output_size_matches_scipy(grid_ranges):
points = np.random.rand(10, 3)
values = np.random.rand(10)
mesh_grids = tuple(np.mgrid[
grid_ranges[0][0]:grid_ranges[0][1]:grid_ranges[0][2],
grid_ranges[1][0]:grid_ranges[1][1]:grid_ranges[1][2],
grid_ranges[2][0]:grid_ranges[2][1]:grid_ranges[2][2],
])
scipy_result = scipy.interpolate.griddata(points, values, mesh_grids)
nn_result = naturalneighbor.griddata(points, values, grid_ranges)
assert scipy_result.shape == nn_result.shape
| Add test for complex indexing | Add test for complex indexing
| Python | mit | innolitics/natural-neighbor-interpolation,innolitics/natural-neighbor-interpolation,innolitics/natural-neighbor-interpolation | import scipy.interpolate
import numpy as np
+ import pytest
import naturalneighbor
+ @pytest.mark.parametrize("grid_ranges", [
+ [[0, 4, 0.6], [-3, 3, 1.0], [0, 1, 3]],
+ [[0, 2, 1], [0, 2, 1j], [0, 2, 2j]],
+ ])
- def test_output_size_matches_scipy():
+ def test_output_size_matches_scipy(grid_ranges):
points = np.random.rand(10, 3)
values = np.random.rand(10)
-
- grid_ranges = [
- [0, 4, 0.6], # step isn't a multiple
- [-3, 3, 1.0], # step is a multiple
- [0, 1, 3], # step is larger than stop - start
- ]
mesh_grids = tuple(np.mgrid[
grid_ranges[0][0]:grid_ranges[0][1]:grid_ranges[0][2],
grid_ranges[1][0]:grid_ranges[1][1]:grid_ranges[1][2],
grid_ranges[2][0]:grid_ranges[2][1]:grid_ranges[2][2],
])
scipy_result = scipy.interpolate.griddata(points, values, mesh_grids)
nn_result = naturalneighbor.griddata(points, values, grid_ranges)
assert scipy_result.shape == nn_result.shape
| Add test for complex indexing | ## Code Before:
import scipy.interpolate
import numpy as np
import naturalneighbor
def test_output_size_matches_scipy():
points = np.random.rand(10, 3)
values = np.random.rand(10)
grid_ranges = [
[0, 4, 0.6], # step isn't a multiple
[-3, 3, 1.0], # step is a multiple
[0, 1, 3], # step is larger than stop - start
]
mesh_grids = tuple(np.mgrid[
grid_ranges[0][0]:grid_ranges[0][1]:grid_ranges[0][2],
grid_ranges[1][0]:grid_ranges[1][1]:grid_ranges[1][2],
grid_ranges[2][0]:grid_ranges[2][1]:grid_ranges[2][2],
])
scipy_result = scipy.interpolate.griddata(points, values, mesh_grids)
nn_result = naturalneighbor.griddata(points, values, grid_ranges)
assert scipy_result.shape == nn_result.shape
## Instruction:
Add test for complex indexing
## Code After:
import scipy.interpolate
import numpy as np
import pytest
import naturalneighbor
@pytest.mark.parametrize("grid_ranges", [
[[0, 4, 0.6], [-3, 3, 1.0], [0, 1, 3]],
[[0, 2, 1], [0, 2, 1j], [0, 2, 2j]],
])
def test_output_size_matches_scipy(grid_ranges):
points = np.random.rand(10, 3)
values = np.random.rand(10)
mesh_grids = tuple(np.mgrid[
grid_ranges[0][0]:grid_ranges[0][1]:grid_ranges[0][2],
grid_ranges[1][0]:grid_ranges[1][1]:grid_ranges[1][2],
grid_ranges[2][0]:grid_ranges[2][1]:grid_ranges[2][2],
])
scipy_result = scipy.interpolate.griddata(points, values, mesh_grids)
nn_result = naturalneighbor.griddata(points, values, grid_ranges)
assert scipy_result.shape == nn_result.shape
| import scipy.interpolate
import numpy as np
+ import pytest
import naturalneighbor
+ @pytest.mark.parametrize("grid_ranges", [
+ [[0, 4, 0.6], [-3, 3, 1.0], [0, 1, 3]],
+ [[0, 2, 1], [0, 2, 1j], [0, 2, 2j]],
+ ])
- def test_output_size_matches_scipy():
+ def test_output_size_matches_scipy(grid_ranges):
? +++++++++++
points = np.random.rand(10, 3)
values = np.random.rand(10)
-
- grid_ranges = [
- [0, 4, 0.6], # step isn't a multiple
- [-3, 3, 1.0], # step is a multiple
- [0, 1, 3], # step is larger than stop - start
- ]
mesh_grids = tuple(np.mgrid[
grid_ranges[0][0]:grid_ranges[0][1]:grid_ranges[0][2],
grid_ranges[1][0]:grid_ranges[1][1]:grid_ranges[1][2],
grid_ranges[2][0]:grid_ranges[2][1]:grid_ranges[2][2],
])
scipy_result = scipy.interpolate.griddata(points, values, mesh_grids)
nn_result = naturalneighbor.griddata(points, values, grid_ranges)
assert scipy_result.shape == nn_result.shape |
00b31f3025493942c0ce7eb03c7cc09abf0eb8d0 | txlege84/core/views.py | txlege84/core/views.py | from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
| from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
| Print statement snuck in there | Print statement snuck in there
| Python | mit | texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84 | from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
- print context
return context
| Print statement snuck in there | ## Code Before:
from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
## Instruction:
Print statement snuck in there
## Code After:
from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
| from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
- print context
return context |
317926c18ac2e139d2018acd767d10b4f53428f3 | installer/installer_config/views.py | installer/installer_config/views.py | from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = form_class(request.POST)
if form.is_valid():
config_profile = form.save(commit=False)
config_profile.user = request.user
config_profile.save()
return HttpResponseRedirect(reverse('profile:profile'))
return self.render_to_response({'form': form})
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
# import pdb; pdb.set_trace()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
| from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
| Remove unneeded post method from CreateEnvProfile view | Remove unneeded post method from CreateEnvProfile view
| Python | mit | ezPy-co/ezpy,alibulota/Package_Installer,ezPy-co/ezpy,alibulota/Package_Installer | from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
- from django.http import HttpResponseRedirect
+
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
-
- def post(self, request, *args, **kwargs):
- form_class = self.get_form_class()
- form = form_class(request.POST)
- if form.is_valid():
- config_profile = form.save(commit=False)
- config_profile.user = request.user
- config_profile.save()
- return HttpResponseRedirect(reverse('profile:profile'))
- return self.render_to_response({'form': form})
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
- # import pdb; pdb.set_trace()
response = render_to_response('installer_template.py', {'choices': choices},
- content_type='application')
+ content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
| Remove unneeded post method from CreateEnvProfile view | ## Code Before:
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
def post(self, request, *args, **kwargs):
form_class = self.get_form_class()
form = form_class(request.POST)
if form.is_valid():
config_profile = form.save(commit=False)
config_profile.user = request.user
config_profile.save()
return HttpResponseRedirect(reverse('profile:profile'))
return self.render_to_response({'form': form})
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
# import pdb; pdb.set_trace()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
## Instruction:
Remove unneeded post method from CreateEnvProfile view
## Code After:
from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
response = render_to_response('installer_template.py', {'choices': choices},
content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response
| from django.shortcuts import render
from django.shortcuts import render_to_response
from django.views.generic import CreateView, UpdateView, DeleteView
from installer_config.models import EnvironmentProfile, UserChoice, Step
from installer_config.forms import EnvironmentForm
from django.core.urlresolvers import reverse
- from django.http import HttpResponseRedirect
+
class CreateEnvironmentProfile(CreateView):
model = EnvironmentProfile
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
def form_valid(self, form):
form.instance.user = self.request.user
return super(CreateEnvironmentProfile, self).form_valid(form)
-
- def post(self, request, *args, **kwargs):
- form_class = self.get_form_class()
- form = form_class(request.POST)
- if form.is_valid():
- config_profile = form.save(commit=False)
- config_profile.user = request.user
- config_profile.save()
- return HttpResponseRedirect(reverse('profile:profile'))
- return self.render_to_response({'form': form})
class UpdateEnvironmentProfile(UpdateView):
model = EnvironmentProfile
context_object_name = 'profile'
template_name = 'env_profile_form.html'
form_class = EnvironmentForm
success_url = '/profile'
class DeleteEnvironmentProfile(DeleteView):
model = EnvironmentProfile
success_url = '/profile'
def download_profile_view(request, **kwargs):
choices = UserChoice.objects.filter(profiles=kwargs['pk']).all()
- # import pdb; pdb.set_trace()
response = render_to_response('installer_template.py', {'choices': choices},
- content_type='application')
+ content_type='application')
response['Content-Disposition'] = 'attachment; filename=something.py'
return response |
b6dff8fcd7dec56703006f2a7bcf1c8c72d0c21b | price_security/models/invoice.py | price_security/models/invoice.py | from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = 'account.invoice.line'
# we add this fields instead of making original readonly because we need
# on change to change values, we make readonly in view because sometimes
# we want them to be writeable
invoice_line_tax_id_readonly = fields.Many2many(
related='invoice_line_tax_id',
)
price_unit_readonly = fields.Float(
related='price_unit',
)
product_can_modify_prices = fields.Boolean(
related='product_id.can_modify_prices',
string='Product Can modify prices')
@api.one
@api.constrains(
'discount', 'product_can_modify_prices')
def check_discount(self):
if (
self.user_has_groups(
'price_security.group_restrict_prices') and
not self.product_can_modify_prices and self.invoice_id
):
self.env.user.check_discount(
self.discount,
self.invoice_id.partner_id.property_product_pricelist.id)
| from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = 'account.invoice.line'
# we add this fields instead of making original readonly because we need
# on change to change values, we make readonly in view because sometimes
# we want them to be writeable
invoice_line_tax_id_readonly = fields.Many2many(
related='invoice_line_tax_id',
)
price_unit_readonly = fields.Float(
related='price_unit',
)
product_can_modify_prices = fields.Boolean(
related='product_id.can_modify_prices',
readonly=True,
string='Product Can modify prices')
@api.one
@api.constrains(
'discount', 'product_can_modify_prices')
def check_discount(self):
if (
self.user_has_groups(
'price_security.group_restrict_prices') and
not self.product_can_modify_prices and self.invoice_id
):
self.env.user.check_discount(
self.discount,
self.invoice_id.partner_id.property_product_pricelist.id)
| FIX price sec. related field as readonly | FIX price sec. related field as readonly
| Python | agpl-3.0 | ingadhoc/product,ingadhoc/product | from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = 'account.invoice.line'
# we add this fields instead of making original readonly because we need
# on change to change values, we make readonly in view because sometimes
# we want them to be writeable
invoice_line_tax_id_readonly = fields.Many2many(
related='invoice_line_tax_id',
)
price_unit_readonly = fields.Float(
related='price_unit',
)
product_can_modify_prices = fields.Boolean(
related='product_id.can_modify_prices',
+ readonly=True,
string='Product Can modify prices')
@api.one
@api.constrains(
'discount', 'product_can_modify_prices')
def check_discount(self):
if (
self.user_has_groups(
'price_security.group_restrict_prices') and
not self.product_can_modify_prices and self.invoice_id
):
self.env.user.check_discount(
self.discount,
self.invoice_id.partner_id.property_product_pricelist.id)
| FIX price sec. related field as readonly | ## Code Before:
from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = 'account.invoice.line'
# we add this fields instead of making original readonly because we need
# on change to change values, we make readonly in view because sometimes
# we want them to be writeable
invoice_line_tax_id_readonly = fields.Many2many(
related='invoice_line_tax_id',
)
price_unit_readonly = fields.Float(
related='price_unit',
)
product_can_modify_prices = fields.Boolean(
related='product_id.can_modify_prices',
string='Product Can modify prices')
@api.one
@api.constrains(
'discount', 'product_can_modify_prices')
def check_discount(self):
if (
self.user_has_groups(
'price_security.group_restrict_prices') and
not self.product_can_modify_prices and self.invoice_id
):
self.env.user.check_discount(
self.discount,
self.invoice_id.partner_id.property_product_pricelist.id)
## Instruction:
FIX price sec. related field as readonly
## Code After:
from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = 'account.invoice.line'
# we add this fields instead of making original readonly because we need
# on change to change values, we make readonly in view because sometimes
# we want them to be writeable
invoice_line_tax_id_readonly = fields.Many2many(
related='invoice_line_tax_id',
)
price_unit_readonly = fields.Float(
related='price_unit',
)
product_can_modify_prices = fields.Boolean(
related='product_id.can_modify_prices',
readonly=True,
string='Product Can modify prices')
@api.one
@api.constrains(
'discount', 'product_can_modify_prices')
def check_discount(self):
if (
self.user_has_groups(
'price_security.group_restrict_prices') and
not self.product_can_modify_prices and self.invoice_id
):
self.env.user.check_discount(
self.discount,
self.invoice_id.partner_id.property_product_pricelist.id)
| from openerp import fields, models, api
class account_invoice_line(models.Model):
_inherit = 'account.invoice.line'
# we add this fields instead of making original readonly because we need
# on change to change values, we make readonly in view because sometimes
# we want them to be writeable
invoice_line_tax_id_readonly = fields.Many2many(
related='invoice_line_tax_id',
)
price_unit_readonly = fields.Float(
related='price_unit',
)
product_can_modify_prices = fields.Boolean(
related='product_id.can_modify_prices',
+ readonly=True,
string='Product Can modify prices')
@api.one
@api.constrains(
'discount', 'product_can_modify_prices')
def check_discount(self):
if (
self.user_has_groups(
'price_security.group_restrict_prices') and
not self.product_can_modify_prices and self.invoice_id
):
self.env.user.check_discount(
self.discount,
self.invoice_id.partner_id.property_product_pricelist.id) |
f7c9bbd5ac49254d564a56ba3713b55abcfa4079 | byceps/blueprints/news/views.py | byceps/blueprints/news/views.py |
from flask import abort, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
ITEMS_PER_PAGE = 4
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items = news_service.get_items_paginated(g.party.brand.id, page,
ITEMS_PER_PAGE)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
|
from flask import abort, current_app, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items_per_page = _get_items_per_page_value()
items = news_service.get_items_paginated(g.party.brand.id, page,
items_per_page)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
def _get_items_per_page_value(default=4):
return int(current_app.config.get('NEWS_ITEMS_PER_PAGE', default))
| Allow configuration of the number of news items per page | Allow configuration of the number of news items per page
| Python | bsd-3-clause | homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps |
- from flask import abort, g
+ from flask import abort, current_app, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
- ITEMS_PER_PAGE = 4
-
-
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
+ items_per_page = _get_items_per_page_value()
+
items = news_service.get_items_paginated(g.party.brand.id, page,
- ITEMS_PER_PAGE)
+ items_per_page)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
+
+ def _get_items_per_page_value(default=4):
+ return int(current_app.config.get('NEWS_ITEMS_PER_PAGE', default))
+ | Allow configuration of the number of news items per page | ## Code Before:
from flask import abort, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
ITEMS_PER_PAGE = 4
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items = news_service.get_items_paginated(g.party.brand.id, page,
ITEMS_PER_PAGE)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
## Instruction:
Allow configuration of the number of news items per page
## Code After:
from flask import abort, current_app, g
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
items_per_page = _get_items_per_page_value()
items = news_service.get_items_paginated(g.party.brand.id, page,
items_per_page)
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
def _get_items_per_page_value(default=4):
return int(current_app.config.get('NEWS_ITEMS_PER_PAGE', default))
|
- from flask import abort, g
+ from flask import abort, current_app, g
? +++++++++++++
from ...services.news import service as news_service
from ...util.framework import create_blueprint
from ...util.templating import templated
blueprint = create_blueprint('news', __name__)
- ITEMS_PER_PAGE = 4
-
-
@blueprint.route('/', defaults={'page': 1})
@blueprint.route('/pages/<int:page>')
@templated
def index(page):
"""Show a page of news items."""
+ items_per_page = _get_items_per_page_value()
+
items = news_service.get_items_paginated(g.party.brand.id, page,
- ITEMS_PER_PAGE)
? ^^^^^ ^^^ ^^^^
+ items_per_page)
? ^^^^^ ^^^ ^^^^
return {
'items': items,
'page': page,
}
@blueprint.route('/<slug>')
@templated
def view(slug):
"""Show a single news item."""
item = news_service.find_item_by_slug(g.party.brand.id, slug)
if item is None:
abort(404)
return {
'item': item,
}
+
+
+ def _get_items_per_page_value(default=4):
+ return int(current_app.config.get('NEWS_ITEMS_PER_PAGE', default)) |
7873996d49ad32984465086623a3f6537eae11af | nbgrader/preprocessors/headerfooter.py | nbgrader/preprocessors/headerfooter.py | from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
if self.header != "":
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
if self.footer != "":
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
| from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
if self.header:
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
if self.footer:
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
| Fix if statements checking if header/footer exist | Fix if statements checking if header/footer exist
| Python | bsd-3-clause | jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,EdwardJKim/nbgrader,jupyter/nbgrader,jupyter/nbgrader,EdwardJKim/nbgrader,ellisonbg/nbgrader,MatKallada/nbgrader,ellisonbg/nbgrader,modulexcite/nbgrader,jhamrick/nbgrader,EdwardJKim/nbgrader,jdfreder/nbgrader,jdfreder/nbgrader,jupyter/nbgrader,alope107/nbgrader,MatKallada/nbgrader,modulexcite/nbgrader,dementrock/nbgrader,jhamrick/nbgrader,ellisonbg/nbgrader,alope107/nbgrader,dementrock/nbgrader | from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
- if self.header != "":
+ if self.header:
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
- if self.footer != "":
+ if self.footer:
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
| Fix if statements checking if header/footer exist | ## Code Before:
from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
if self.header != "":
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
if self.footer != "":
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
## Instruction:
Fix if statements checking if header/footer exist
## Code After:
from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
if self.header:
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
if self.footer:
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources
| from IPython.nbconvert.preprocessors import Preprocessor
from IPython.nbformat.current import read as read_nb
from IPython.utils.traitlets import Unicode
class IncludeHeaderFooter(Preprocessor):
"""A preprocessor for adding header and/or footer cells to a notebook."""
header = Unicode("", config=True, help="Path to header notebook")
footer = Unicode("", config=True, help="Path to footer notebook")
def preprocess(self, nb, resources):
"""Concatenates the cells from the header and footer notebooks to the
given cells.
"""
new_cells = []
# header
- if self.header != "":
? ------
+ if self.header:
with open(self.header, 'r') as fh:
header_nb = read_nb(fh, 'ipynb')
new_cells.extend(header_nb.worksheets[0].cells)
# body
new_cells.extend(nb.worksheets[0].cells)
# footer
- if self.footer != "":
? ------
+ if self.footer:
with open(self.footer, 'r') as fh:
footer_nb = read_nb(fh, 'ipynb')
new_cells.extend(footer_nb.worksheets[0].cells)
nb.worksheets[0].cells = new_cells
super(IncludeHeaderFooter, self).preprocess(nb, resources)
return nb, resources
def preprocess_cell(self, cell, resources, cell_index):
return cell, resources |
40653d829efcc0461d0da9472111aa89b41e08f1 | hasjob/views/login.py | hasjob/views/login.py |
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
| Support for Lastuser push notifications. | Support for Lastuser push notifications.
| Python | agpl-3.0 | qitianchan/hasjob,qitianchan/hasjob,hasgeek/hasjob,hasgeek/hasjob,nhannv/hasjob,hasgeek/hasjob,sindhus/hasjob,sindhus/hasjob,nhannv/hasjob,sindhus/hasjob,qitianchan/hasjob,sindhus/hasjob,qitianchan/hasjob,ashwin01/hasjob,ashwin01/hasjob,hasgeek/hasjob,ashwin01/hasjob,ashwin01/hasjob,nhannv/hasjob,nhannv/hasjob,qitianchan/hasjob,ashwin01/hasjob,nhannv/hasjob,sindhus/hasjob |
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
+ @app.route('/login/notify')
+ @lastuser.notification_handler
+ def lastusernotify(user):
+ # Save the user object
+ db.session.commit()
+
+
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
| Support for Lastuser push notifications. | ## Code Before:
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
## Instruction:
Support for Lastuser push notifications.
## Code After:
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
@app.route('/login/notify')
@lastuser.notification_handler
def lastusernotify(user):
# Save the user object
db.session.commit()
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain")
|
from flask import Response, redirect, flash
from flask.ext.lastuser.sqlalchemy import UserManager
from coaster.views import get_next_url
from hasjob import app, lastuser
from hasjob.models import db, User
lastuser.init_usermanager(UserManager(db, User))
@app.route('/login')
@lastuser.login_handler
def login():
return {'scope': 'id email organizations'}
@app.route('/logout')
@lastuser.logout_handler
def logout():
flash(u"You are now logged out", category='info')
return get_next_url()
@app.route('/login/redirect')
@lastuser.auth_handler
def lastuserauth():
# Save the user object
db.session.commit()
return redirect(get_next_url())
+ @app.route('/login/notify')
+ @lastuser.notification_handler
+ def lastusernotify(user):
+ # Save the user object
+ db.session.commit()
+
+
@lastuser.auth_error_handler
def lastuser_error(error, error_description=None, error_uri=None):
if error == 'access_denied':
flash("You denied the request to login", category='error')
return redirect(get_next_url())
return Response(u"Error: %s\n"
u"Description: %s\n"
u"URI: %s" % (error, error_description, error_uri),
mimetype="text/plain") |
acba9a027eafb1877f0b6208613206674ba5d55d | tmc/models/employee.py | tmc/models/employee.py |
from odoo import fields, models
class Employee(models.Model):
_name = 'tmc.hr.employee'
_order = 'name'
name = fields.Char()
internal_number = fields.Char(
size=3,
required=True
)
docket_number = fields.Integer(
required=True
)
bank_account_number = fields.Char()
bank_branch = fields.Integer(
size=2
)
admission_date = fields.Date()
email = fields.Char()
active = fields.Boolean(
default=True
)
employee_title_ids = fields.Many2many(
comodel_name='tmc.hr.employee_title'
)
employee_job_id = fields.Many2one(
comodel_name='tmc.hr.employee_job'
)
office_id = fields.Many2one(
comodel_name='tmc.hr.office'
)
_sql_constraints = [
('number_uniq',
'unique(docket_number, bank_account_number)',
'Number must be unique!'),
]
|
from odoo import fields, models
class Employee(models.Model):
_name = 'tmc.hr.employee'
_order = 'name'
name = fields.Char()
internal_number = fields.Char(
size=3
)
docket_number = fields.Integer()
bank_account_number = fields.Char()
bank_branch = fields.Integer(
size=2
)
admission_date = fields.Date()
email = fields.Char()
active = fields.Boolean(
default=True
)
employee_title_ids = fields.Many2many(
comodel_name='tmc.hr.employee_title'
)
employee_job_id = fields.Many2one(
comodel_name='tmc.hr.employee_job'
)
office_id = fields.Many2one(
comodel_name='tmc.hr.office'
)
_sql_constraints = [
('number_uniq',
'unique(docket_number, bank_account_number)',
'Number must be unique!'),
]
| Remove required property on some fields | [DEL] Remove required property on some fields
| Python | agpl-3.0 | tmcrosario/odoo-tmc |
from odoo import fields, models
class Employee(models.Model):
_name = 'tmc.hr.employee'
_order = 'name'
name = fields.Char()
internal_number = fields.Char(
- size=3,
+ size=3
- required=True
)
- docket_number = fields.Integer(
+ docket_number = fields.Integer()
- required=True
- )
bank_account_number = fields.Char()
bank_branch = fields.Integer(
size=2
)
admission_date = fields.Date()
email = fields.Char()
active = fields.Boolean(
default=True
)
employee_title_ids = fields.Many2many(
comodel_name='tmc.hr.employee_title'
)
employee_job_id = fields.Many2one(
comodel_name='tmc.hr.employee_job'
)
office_id = fields.Many2one(
comodel_name='tmc.hr.office'
)
_sql_constraints = [
('number_uniq',
'unique(docket_number, bank_account_number)',
'Number must be unique!'),
]
| Remove required property on some fields | ## Code Before:
from odoo import fields, models
class Employee(models.Model):
_name = 'tmc.hr.employee'
_order = 'name'
name = fields.Char()
internal_number = fields.Char(
size=3,
required=True
)
docket_number = fields.Integer(
required=True
)
bank_account_number = fields.Char()
bank_branch = fields.Integer(
size=2
)
admission_date = fields.Date()
email = fields.Char()
active = fields.Boolean(
default=True
)
employee_title_ids = fields.Many2many(
comodel_name='tmc.hr.employee_title'
)
employee_job_id = fields.Many2one(
comodel_name='tmc.hr.employee_job'
)
office_id = fields.Many2one(
comodel_name='tmc.hr.office'
)
_sql_constraints = [
('number_uniq',
'unique(docket_number, bank_account_number)',
'Number must be unique!'),
]
## Instruction:
Remove required property on some fields
## Code After:
from odoo import fields, models
class Employee(models.Model):
_name = 'tmc.hr.employee'
_order = 'name'
name = fields.Char()
internal_number = fields.Char(
size=3
)
docket_number = fields.Integer()
bank_account_number = fields.Char()
bank_branch = fields.Integer(
size=2
)
admission_date = fields.Date()
email = fields.Char()
active = fields.Boolean(
default=True
)
employee_title_ids = fields.Many2many(
comodel_name='tmc.hr.employee_title'
)
employee_job_id = fields.Many2one(
comodel_name='tmc.hr.employee_job'
)
office_id = fields.Many2one(
comodel_name='tmc.hr.office'
)
_sql_constraints = [
('number_uniq',
'unique(docket_number, bank_account_number)',
'Number must be unique!'),
]
|
from odoo import fields, models
class Employee(models.Model):
_name = 'tmc.hr.employee'
_order = 'name'
name = fields.Char()
internal_number = fields.Char(
- size=3,
? -
+ size=3
- required=True
)
- docket_number = fields.Integer(
+ docket_number = fields.Integer()
? +
- required=True
- )
bank_account_number = fields.Char()
bank_branch = fields.Integer(
size=2
)
admission_date = fields.Date()
email = fields.Char()
active = fields.Boolean(
default=True
)
employee_title_ids = fields.Many2many(
comodel_name='tmc.hr.employee_title'
)
employee_job_id = fields.Many2one(
comodel_name='tmc.hr.employee_job'
)
office_id = fields.Many2one(
comodel_name='tmc.hr.office'
)
_sql_constraints = [
('number_uniq',
'unique(docket_number, bank_account_number)',
'Number must be unique!'),
] |
828215d3de3ddd2febdd190de067b0f6e5c2e9e1 | query/migrations/0017_auto_20160224_1306.py | query/migrations/0017_auto_20160224_1306.py | from __future__ import unicode_literals
from django.db import models, migrations
from query.operations import engine_specific
class Migration(migrations.Migration):
dependencies = [
('query', '0016_auto_20160203_1324'),
]
operations = [
engine_specific(('mysql',),
migrations.RunSQL(
'alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
'alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
)
),
]
| from __future__ import unicode_literals
from django.db import models, migrations
from query.operations import engine_specific
class Migration(migrations.Migration):
dependencies = [
('query', '0016_auto_20160203_1324'),
]
operations = [
engine_specific(('mysql',),
migrations.RunSQL(
sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
reverse_sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
)
),
]
| Use named arguments for RunSQL | Use named arguments for RunSQL
| Python | apache-2.0 | UUDigitalHumanitieslab/texcavator,UUDigitalHumanitieslab/texcavator,UUDigitalHumanitieslab/texcavator | from __future__ import unicode_literals
from django.db import models, migrations
from query.operations import engine_specific
class Migration(migrations.Migration):
dependencies = [
('query', '0016_auto_20160203_1324'),
]
operations = [
engine_specific(('mysql',),
migrations.RunSQL(
- 'alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
+ sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
- 'alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
+ reverse_sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
)
),
]
| Use named arguments for RunSQL | ## Code Before:
from __future__ import unicode_literals
from django.db import models, migrations
from query.operations import engine_specific
class Migration(migrations.Migration):
dependencies = [
('query', '0016_auto_20160203_1324'),
]
operations = [
engine_specific(('mysql',),
migrations.RunSQL(
'alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
'alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
)
),
]
## Instruction:
Use named arguments for RunSQL
## Code After:
from __future__ import unicode_literals
from django.db import models, migrations
from query.operations import engine_specific
class Migration(migrations.Migration):
dependencies = [
('query', '0016_auto_20160203_1324'),
]
operations = [
engine_specific(('mysql',),
migrations.RunSQL(
sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
reverse_sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
)
),
]
| from __future__ import unicode_literals
from django.db import models, migrations
from query.operations import engine_specific
class Migration(migrations.Migration):
dependencies = [
('query', '0016_auto_20160203_1324'),
]
operations = [
engine_specific(('mysql',),
migrations.RunSQL(
- 'alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
+ sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_bin;',
? ++++
- 'alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
+ reverse_sql='alter table query_term modify word varchar(200) character set utf8 collate utf8_general_ci;'
? ++++++++++++
)
),
] |
b56c1cb1185c8d20276688f29509947cb46a26d4 | test/test_compiled.py | test/test_compiled.py | import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass | import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
| Add test with standard lib | Add test with standard lib
math.cos( should return <Param: x @0,0>
| Python | mit | WoLpH/jedi,WoLpH/jedi,dwillmer/jedi,jonashaag/jedi,mfussenegger/jedi,flurischt/jedi,mfussenegger/jedi,dwillmer/jedi,jonashaag/jedi,tjwei/jedi,tjwei/jedi,flurischt/jedi | import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
- def test_call_signatures():
+ def test_call_signatures_extension():
+ # with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
+
+
+ def test_call_signatures_stdlib():
+ code = "import math; math.cos("
+ s = jedi.Script(code)
+ defs = s.call_signatures()
+ for call_def in defs:
+ for p in call_def.params:
+ assert str(p) == 'x'
+ | Add test with standard lib | ## Code Before:
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
## Instruction:
Add test with standard lib
## Code After:
import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
def test_call_signatures_extension():
# with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
def test_call_signatures_stdlib():
code = "import math; math.cos("
s = jedi.Script(code)
defs = s.call_signatures()
for call_def in defs:
for p in call_def.params:
assert str(p) == 'x'
| import os
import platform
import sys
import jedi
from .helpers import cwd_at
@cwd_at('test/extensions')
def test_completions():
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled." % package_name)
assert len(s.completions()) >= 2
@cwd_at('test/extensions')
- def test_call_signatures():
+ def test_call_signatures_extension():
? ++++++++++
+ # with a cython extension
if platform.architecture()[0] == '64bit':
package_name = "compiled%s%s" % sys.version_info[:2]
sys.path.insert(0, os.getcwd())
if os.path.exists(package_name):
s = jedi.Script("from %s import compiled; compiled.Foo(" %
package_name)
defs = s.call_signatures()
for call_def in defs:
for param in call_def.params:
pass
+
+
+ def test_call_signatures_stdlib():
+ code = "import math; math.cos("
+ s = jedi.Script(code)
+ defs = s.call_signatures()
+ for call_def in defs:
+ for p in call_def.params:
+ assert str(p) == 'x' |
56bc9c79522fd534f2a756bd5a18193635e2adae | tests/test_default_security_groups.py | tests/test_default_security_groups.py | """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| Fix missing mock and rename variable | tests: Fix missing mock and rename variable
| Python | apache-2.0 | gogoair/foremast,gogoair/foremast | """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
+ @mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
- def test_default_security_groups(mock_properties):
+ def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
+ test_sg = {
+ 'myapp': [
- test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
+ {'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
+ ]
+ }
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
- test_sg = SpinnakerSecurityGroup()
+ sg = SpinnakerSecurityGroup()
- ingress = test_sg.update_default_securitygroup_rules()
+ ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| Fix missing mock and rename variable | ## Code Before:
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
test_sg = SpinnakerSecurityGroup()
ingress = test_sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
## Instruction:
Fix missing mock and rename variable
## Code After:
"""Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
@mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
def test_default_security_groups(mock_properties, mock_details):
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
test_sg = {
'myapp': [
{'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
]
}
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
sg = SpinnakerSecurityGroup()
ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress
| """Test default Security Groups."""
from unittest import mock
from foremast.securitygroup.create_securitygroup import SpinnakerSecurityGroup
+ @mock.patch('foremast.securitygroup.create_securitygroup.get_details')
@mock.patch('foremast.securitygroup.create_securitygroup.get_properties')
- def test_default_security_groups(mock_properties):
+ def test_default_security_groups(mock_properties, mock_details):
? ++++++++++++++
"""Make sure default Security Groups are added to the ingress rules."""
ingress = {
'test_app': [
{
'start_port': 30,
'end_port': 30,
},
],
}
mock_properties.return_value = {
'security_group': {
'ingress': ingress,
'description': '',
},
}
+ test_sg = {
+ 'myapp': [
- test_sg = {'myapp': [{'start_port': '22', 'end_port': '22', 'protocol': 'tcp' }]}
? ------- - --------- ^ - ^^
+ {'start_port': '22', 'end_port': '22', 'protocol': 'tcp'},
? ^^^^^ ^
+ ]
+ }
with mock.patch.dict('foremast.securitygroup.create_securitygroup.DEFAULT_SECURITYGROUP_RULES', test_sg):
- test_sg = SpinnakerSecurityGroup()
? -----
+ sg = SpinnakerSecurityGroup()
- ingress = test_sg.update_default_securitygroup_rules()
? -----
+ ingress = sg.update_default_securitygroup_rules()
assert 'myapp' in ingress |
9b19d366c7e1cf41ffc6af4eaed789995ddc5cc2 | byceps/blueprints/core_admin/views.py | byceps/blueprints/core_admin/views.py |
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
def inject_brands():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
|
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
def inject_template_variables():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
| Generalize name of function to inject admin template variables | Generalize name of function to inject admin template variables
| Python | bsd-3-clause | homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps |
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
- def inject_brands():
+ def inject_template_variables():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
| Generalize name of function to inject admin template variables | ## Code Before:
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
def inject_brands():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
## Instruction:
Generalize name of function to inject admin template variables
## Code After:
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
def inject_template_variables():
brands = brand_service.get_brands()
return {
'all_brands': brands,
}
|
from ...services.brand import service as brand_service
from ...util.framework.blueprint import create_blueprint
from ..authorization.registry import permission_registry
from .authorization import AdminPermission
blueprint = create_blueprint('core_admin', __name__)
permission_registry.register_enum(AdminPermission)
@blueprint.app_context_processor
- def inject_brands():
+ def inject_template_variables():
brands = brand_service.get_brands()
return {
'all_brands': brands,
} |
5982e5a4f0bb7f47e604aea2c851ba50bcbe07e1 | hexify.py | hexify.py | import uflash
import argparse
import sys
import os
_HELP_TEXT = """
A simple utility script intended for creating hexified versions of MicroPython
scripts on the local filesystem _NOT_ the microbit. Does not autodetect a
microbit. Accepts multiple input scripts and optionally one output directory.
"""
def main(argv=None):
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='*', default=None)
parser.add_argument('-o', '--outdir', default=None,
help="Output directory")
args = parser.parse_args(argv)
for file in args.source:
if not args.outdir:
(script_path, script_name) = os.path.split(file)
args.outdir = script_path
uflash.flash(path_to_python=file,
paths_to_microbits=[args.outdir], keepname=True)
if __name__ == '__main__':
main(sys.argv[1:])
| import uflash
import argparse
import sys
import os
_HELP_TEXT = """
A simple utility script intended for creating hexified versions of MicroPython
scripts on the local filesystem _NOT_ the microbit. Does not autodetect a
microbit. Accepts multiple input scripts and optionally one output directory.
"""
def main(argv=None):
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='*', default=None)
parser.add_argument('-r', '--runtime', default=None,
help="Use the referenced MicroPython runtime.")
parser.add_argument('-o', '--outdir', default=None,
help="Output directory")
parser.add_argument('-m', '--minify',
action='store_true',
help='Minify the source')
args = parser.parse_args(argv)
for file in args.source:
if not args.outdir:
(script_path, script_name) = os.path.split(file)
args.outdir = script_path
uflash.flash(path_to_python=file,
path_to_runtime=args.runtime,
paths_to_microbits=[args.outdir],
minify=args.minify,
keepname=True) # keepname is always True in hexify
if __name__ == '__main__':
main(sys.argv[1:])
| Add runtime and minify support | Add runtime and minify support
Added command line arguments for runtime and minify.
| Python | mit | ntoll/uflash | import uflash
import argparse
import sys
import os
_HELP_TEXT = """
A simple utility script intended for creating hexified versions of MicroPython
scripts on the local filesystem _NOT_ the microbit. Does not autodetect a
microbit. Accepts multiple input scripts and optionally one output directory.
"""
def main(argv=None):
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='*', default=None)
+ parser.add_argument('-r', '--runtime', default=None,
+ help="Use the referenced MicroPython runtime.")
parser.add_argument('-o', '--outdir', default=None,
help="Output directory")
+ parser.add_argument('-m', '--minify',
+ action='store_true',
+ help='Minify the source')
args = parser.parse_args(argv)
for file in args.source:
if not args.outdir:
(script_path, script_name) = os.path.split(file)
args.outdir = script_path
uflash.flash(path_to_python=file,
+ path_to_runtime=args.runtime,
- paths_to_microbits=[args.outdir], keepname=True)
+ paths_to_microbits=[args.outdir],
+ minify=args.minify,
+ keepname=True) # keepname is always True in hexify
if __name__ == '__main__':
main(sys.argv[1:])
| Add runtime and minify support | ## Code Before:
import uflash
import argparse
import sys
import os
_HELP_TEXT = """
A simple utility script intended for creating hexified versions of MicroPython
scripts on the local filesystem _NOT_ the microbit. Does not autodetect a
microbit. Accepts multiple input scripts and optionally one output directory.
"""
def main(argv=None):
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='*', default=None)
parser.add_argument('-o', '--outdir', default=None,
help="Output directory")
args = parser.parse_args(argv)
for file in args.source:
if not args.outdir:
(script_path, script_name) = os.path.split(file)
args.outdir = script_path
uflash.flash(path_to_python=file,
paths_to_microbits=[args.outdir], keepname=True)
if __name__ == '__main__':
main(sys.argv[1:])
## Instruction:
Add runtime and minify support
## Code After:
import uflash
import argparse
import sys
import os
_HELP_TEXT = """
A simple utility script intended for creating hexified versions of MicroPython
scripts on the local filesystem _NOT_ the microbit. Does not autodetect a
microbit. Accepts multiple input scripts and optionally one output directory.
"""
def main(argv=None):
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='*', default=None)
parser.add_argument('-r', '--runtime', default=None,
help="Use the referenced MicroPython runtime.")
parser.add_argument('-o', '--outdir', default=None,
help="Output directory")
parser.add_argument('-m', '--minify',
action='store_true',
help='Minify the source')
args = parser.parse_args(argv)
for file in args.source:
if not args.outdir:
(script_path, script_name) = os.path.split(file)
args.outdir = script_path
uflash.flash(path_to_python=file,
path_to_runtime=args.runtime,
paths_to_microbits=[args.outdir],
minify=args.minify,
keepname=True) # keepname is always True in hexify
if __name__ == '__main__':
main(sys.argv[1:])
| import uflash
import argparse
import sys
import os
_HELP_TEXT = """
A simple utility script intended for creating hexified versions of MicroPython
scripts on the local filesystem _NOT_ the microbit. Does not autodetect a
microbit. Accepts multiple input scripts and optionally one output directory.
"""
def main(argv=None):
if not argv:
argv = sys.argv[1:]
parser = argparse.ArgumentParser(description=_HELP_TEXT)
parser.add_argument('source', nargs='*', default=None)
+ parser.add_argument('-r', '--runtime', default=None,
+ help="Use the referenced MicroPython runtime.")
parser.add_argument('-o', '--outdir', default=None,
help="Output directory")
+ parser.add_argument('-m', '--minify',
+ action='store_true',
+ help='Minify the source')
args = parser.parse_args(argv)
for file in args.source:
if not args.outdir:
(script_path, script_name) = os.path.split(file)
args.outdir = script_path
uflash.flash(path_to_python=file,
+ path_to_runtime=args.runtime,
- paths_to_microbits=[args.outdir], keepname=True)
? ---------------
+ paths_to_microbits=[args.outdir],
+ minify=args.minify,
+ keepname=True) # keepname is always True in hexify
if __name__ == '__main__':
main(sys.argv[1:]) |
8771bbdba5b10a3b9fab2822eccdec64d221edb4 | catalog/admin.py | catalog/admin.py | from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
| from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
class AuthorsInstanceInline(admin.TabularInline):
model = Book
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
list_display = ('book', 'status', 'due_back', 'id')
| Configure BookInstance list view and add an inline listing | Configure BookInstance list view and add an inline listing
| Python | bsd-3-clause | pavlenk0/my-catalog,pavlenk0/my-catalog | from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
+ class AuthorsInstanceInline(admin.TabularInline):
+ model = Book
+
+
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
+ inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
+ list_display = ('book', 'status', 'due_back', 'id')
| Configure BookInstance list view and add an inline listing | ## Code Before:
from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
## Instruction:
Configure BookInstance list view and add an inline listing
## Code After:
from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
class AuthorsInstanceInline(admin.TabularInline):
model = Book
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
list_display = ('book', 'status', 'due_back', 'id')
| from django.contrib import admin
from .models import Author, Book, BookInstance, Genre, Language
# admin.site.register(Book)
# admin.site.register(Author)
admin.site.register(Genre)
# admin.site.register(BookInstance)
admin.site.register(Language)
+ class AuthorsInstanceInline(admin.TabularInline):
+ model = Book
+
+
# Define the admin class
class AuthorAdmin(admin.ModelAdmin):
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
+ inlines = [AuthorsInstanceInline]
# Register the admin class with the associated model
admin.site.register(Author, AuthorAdmin)
# Register the Admin classes for Book using the decorator
class BooksInstanceInline(admin.TabularInline):
model = BookInstance
@admin.register(Book)
class BookAdmin(admin.ModelAdmin):
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
# Register the Admin classes for BookInstance using the decorator
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book', 'imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back')
}),
)
+ list_display = ('book', 'status', 'due_back', 'id') |
f7e218b72a09615259b4d77e9169f5237a4cae32 | mopidy/core/mixer.py | mopidy/core/mixer.py | from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
| from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
| Remove test-only code paths in MixerController | core: Remove test-only code paths in MixerController
| Python | apache-2.0 | jmarsik/mopidy,vrs01/mopidy,SuperStarPL/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,pacificIT/mopidy,vrs01/mopidy,diandiankan/mopidy,jcass77/mopidy,tkem/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,bencevans/mopidy,bencevans/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,swak/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,vrs01/mopidy,mokieyue/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,hkariti/mopidy,ZenithDK/mopidy,rawdlite/mopidy,mopidy/mopidy,jmarsik/mopidy,mokieyue/mopidy,swak/mopidy,mopidy/mopidy,bencevans/mopidy,bacontext/mopidy,dbrgn/mopidy,hkariti/mopidy,bacontext/mopidy,quartz55/mopidy,dbrgn/mopidy,adamcik/mopidy,kingosticks/mopidy,hkariti/mopidy,jmarsik/mopidy,ali/mopidy,kingosticks/mopidy,jcass77/mopidy,mokieyue/mopidy,ali/mopidy,bacontext/mopidy,mopidy/mopidy,ZenithDK/mopidy,rawdlite/mopidy,jodal/mopidy,adamcik/mopidy,rawdlite/mopidy,bacontext/mopidy,ali/mopidy,ali/mopidy,adamcik/mopidy,dbrgn/mopidy,quartz55/mopidy,tkem/mopidy,rawdlite/mopidy,jcass77/mopidy,bencevans/mopidy,ZenithDK/mopidy,vrs01/mopidy,jodal/mopidy,swak/mopidy,tkem/mopidy,glogiotatidis/mopidy,swak/mopidy,jodal/mopidy,ZenithDK/mopidy,tkem/mopidy,quartz55/mopidy,diandiankan/mopidy,hkariti/mopidy,jmarsik/mopidy,diandiankan/mopidy,pacificIT/mopidy,quartz55/mopidy | from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
- if self._mixer:
+ if self._mixer is not None:
return self._mixer.get_volume().get()
- else:
- # For testing
- return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
- if self._mixer:
+ if self._mixer is not None:
self._mixer.set_volume(volume)
- else:
- # For testing
- self._volume = volume
def get_mute(self):
"""Get mute state.
- :class:`True` if muted, :class:`False` otherwise.
+ :class:`True` if muted, :class:`False` unmuted, :class:`None` if
+ unknown.
"""
- if self._mixer:
+ if self._mixer is not None:
return self._mixer.get_mute().get()
- else:
- # For testing
- return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
- mute = bool(mute)
- if self._mixer:
+ if self._mixer is not None:
- self._mixer.set_mute(mute)
+ self._mixer.set_mute(bool(mute))
- else:
- # For testing
- self._mute = mute
| Remove test-only code paths in MixerController | ## Code Before:
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer:
return self._mixer.get_volume().get()
else:
# For testing
return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer:
self._mixer.set_volume(volume)
else:
# For testing
self._volume = volume
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` otherwise.
"""
if self._mixer:
return self._mixer.get_mute().get()
else:
# For testing
return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
mute = bool(mute)
if self._mixer:
self._mixer.set_mute(mute)
else:
# For testing
self._mute = mute
## Instruction:
Remove test-only code paths in MixerController
## Code After:
from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
if self._mixer is not None:
return self._mixer.get_volume().get()
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
if self._mixer is not None:
self._mixer.set_volume(volume)
def get_mute(self):
"""Get mute state.
:class:`True` if muted, :class:`False` unmuted, :class:`None` if
unknown.
"""
if self._mixer is not None:
return self._mixer.get_mute().get()
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
if self._mixer is not None:
self._mixer.set_mute(bool(mute))
| from __future__ import absolute_import, unicode_literals
import logging
logger = logging.getLogger(__name__)
class MixerController(object):
pykka_traversable = True
def __init__(self, mixer):
self._mixer = mixer
self._volume = None
self._mute = False
def get_volume(self):
"""Get the volume.
Integer in range [0..100] or :class:`None` if unknown.
The volume scale is linear.
"""
- if self._mixer:
+ if self._mixer is not None:
? ++++++++++++
return self._mixer.get_volume().get()
- else:
- # For testing
- return self._volume
def set_volume(self, volume):
"""Set the volume.
The volume is defined as an integer in range [0..100].
The volume scale is linear.
"""
- if self._mixer:
+ if self._mixer is not None:
? ++++++++++++
self._mixer.set_volume(volume)
- else:
- # For testing
- self._volume = volume
def get_mute(self):
"""Get mute state.
- :class:`True` if muted, :class:`False` otherwise.
? ^^ ^^ ^^^
+ :class:`True` if muted, :class:`False` unmuted, :class:`None` if
? ++++++++++++++++++ ^ ^^ ^
+ unknown.
"""
- if self._mixer:
+ if self._mixer is not None:
? ++++++++++++
return self._mixer.get_mute().get()
- else:
- # For testing
- return self._mute
def set_mute(self, mute):
"""Set mute state.
:class:`True` to mute, :class:`False` to unmute.
"""
- mute = bool(mute)
- if self._mixer:
+ if self._mixer is not None:
? ++++++++++++
- self._mixer.set_mute(mute)
+ self._mixer.set_mute(bool(mute))
? +++++ +
- else:
- # For testing
- self._mute = mute |
7d08a71874dd7b1ab7ba4bb1cd345161d9118266 | src/extract.py | src/extract.py | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff() | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') | Move repo name to arg | Move repo name to arg
| Python | mit | rajikaimal/emma,rajikaimal/emma | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
- def get_parsed_diff(self):
+ def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
- for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
+ for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
- ex.get_parsed_diff()
+ ex.get_parsed_diff('/home/rajika/projects/babel-bot') | Move repo name to arg | ## Code Before:
import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff()
## Instruction:
Move repo name to arg
## Code After:
import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
- def get_parsed_diff(self):
+ def get_parsed_diff(self, repo_path):
? +++++++++++
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
- for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
+ for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
- ex.get_parsed_diff()
+ ex.get_parsed_diff('/home/rajika/projects/babel-bot') |
9ffc56e947dea40cd49c76beada2ec469a01f8f8 | __init__.py | __init__.py | import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
| import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
| Make api file path OS safe | Make api file path OS safe
| Python | mit | joshuamsmith/ConnectPyse | import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
- _api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
+ _api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
| Make api file path OS safe | ## Code Before:
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
## Instruction:
Make api file path OS safe
## Code After:
import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
_api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))}
| import base64
import json
from os import path
import sys
sys.path.insert(0, path.dirname(path.dirname(path.abspath(__file__))))
api_file = 'my_api.json'
- _api_file = '{}\{}'.format(path.dirname(path.abspath(__file__)), api_file)
? ^^^^^^^^^^^^
+ _api_file = path.join(path.dirname(path.abspath(__file__)), api_file)
? ^ ++++++
with open(_api_file) as fin:
cw_api_settings = json.load(fin)
API_URL = cw_api_settings['API_URL']
_cid = cw_api_settings['COMPANYID']
_pubk = cw_api_settings['PUBLICKEY']
_privk = cw_api_settings['PRIVATEKEY']
basic_auth = base64.b64encode("{}+{}:{}".format(_cid, _pubk, _privk).encode('utf-8'))
basic_auth = {'Authorization': 'Basic {}'.format(str(basic_auth, 'utf-8'))} |
9a239c993502e3f317edd478a5d8b5f225c24b18 | globus_cli/run.py | globus_cli/run.py | from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| Fix doubled-help on main command | Fix doubled-help on main command
Main command was being doubly decorated with the common options. As a
result, it had funky looking helptext.
| Python | apache-2.0 | globus/globus-cli,globus/globus-cli | from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
- from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
- @common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| Fix doubled-help on main command | ## Code Before:
from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
@common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
## Instruction:
Fix doubled-help on main command
## Code After:
from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command)
| from globus_cli.parsing import globus_main_func
from globus_cli.login import login_command
from globus_cli.list_commands import list_commands
from globus_cli.config_command import config_command
- from globus_cli.helpers import common_options
from globus_cli.services.auth import auth_command
from globus_cli.services.transfer import transfer_command
@globus_main_func
- @common_options
def main():
pass
main.add_command(auth_command)
main.add_command(transfer_command)
main.add_command(login_command)
main.add_command(list_commands)
main.add_command(config_command) |
7654a81760d228227c3e3ef9ff9cac9927b4674a | scheduler/tests.py | scheduler/tests.py | from django.test import TestCase
# Create your tests here.
| from django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
| Add a test for public names | Add a test for public names
| Python | mit | thomasleese/rooster,thomasleese/rooster,thomasleese/rooster | from django.test import TestCase
- # Create your tests here.
+ from .models import Event, Volunteer
+
+ class VolunteerTestCase(TestCase):
+
+ def test_gets_public_name(self):
+ event = Event.objects.create(name='event', slug='event',
+ description='event', slots_per_day=1,
+ number_of_days=1)
+ volunteer = Volunteer.objects.create(event=event,
+ real_name='Real Name',
+ email_address='a@b.c',
+ phone_number='123456789')
+ volunteer.ensure_has_public_name()
+ self.assertIsNot(volunteer.public_name, None)
+ self.assertIsNot(volunteer.slug, None)
+ | Add a test for public names | ## Code Before:
from django.test import TestCase
# Create your tests here.
## Instruction:
Add a test for public names
## Code After:
from django.test import TestCase
from .models import Event, Volunteer
class VolunteerTestCase(TestCase):
def test_gets_public_name(self):
event = Event.objects.create(name='event', slug='event',
description='event', slots_per_day=1,
number_of_days=1)
volunteer = Volunteer.objects.create(event=event,
real_name='Real Name',
email_address='a@b.c',
phone_number='123456789')
volunteer.ensure_has_public_name()
self.assertIsNot(volunteer.public_name, None)
self.assertIsNot(volunteer.slug, None)
| from django.test import TestCase
- # Create your tests here.
+ from .models import Event, Volunteer
+
+
+ class VolunteerTestCase(TestCase):
+
+ def test_gets_public_name(self):
+ event = Event.objects.create(name='event', slug='event',
+ description='event', slots_per_day=1,
+ number_of_days=1)
+ volunteer = Volunteer.objects.create(event=event,
+ real_name='Real Name',
+ email_address='a@b.c',
+ phone_number='123456789')
+ volunteer.ensure_has_public_name()
+ self.assertIsNot(volunteer.public_name, None)
+ self.assertIsNot(volunteer.slug, None) |
f3eb94bbe10160a4337c5eb9241166f60b9724a8 | pyvideo/settings.py | pyvideo/settings.py | from richard.settings import *
ALLOWED_HOSTS = ['pyvideo.ru']
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru'
SECRET_KEY = 'this_is_not_production_so_who_cares'
ROOT_URLCONF = 'pyvideo.urls'
WSGI_APPLICATION = 'pyvideo.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT, 'templates'),
)
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
| from richard.settings import *
ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com']
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru'
SECRET_KEY = 'this_is_not_production_so_who_cares'
ROOT_URLCONF = 'pyvideo.urls'
WSGI_APPLICATION = 'pyvideo.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT, 'templates'),
)
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
| Add heroku host to ALLOWED_HOSTS | Add heroku host to ALLOWED_HOSTS
| Python | bsd-3-clause | WarmongeR1/pyvideo.ru,WarmongeR1/pyvideo.ru,WarmongeR1/pyvideo.ru,coagulant/pyvideo.ru,coagulant/pyvideo.ru,coagulant/pyvideo.ru | from richard.settings import *
- ALLOWED_HOSTS = ['pyvideo.ru']
+ ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com']
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru'
SECRET_KEY = 'this_is_not_production_so_who_cares'
ROOT_URLCONF = 'pyvideo.urls'
WSGI_APPLICATION = 'pyvideo.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT, 'templates'),
)
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
| Add heroku host to ALLOWED_HOSTS | ## Code Before:
from richard.settings import *
ALLOWED_HOSTS = ['pyvideo.ru']
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru'
SECRET_KEY = 'this_is_not_production_so_who_cares'
ROOT_URLCONF = 'pyvideo.urls'
WSGI_APPLICATION = 'pyvideo.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT, 'templates'),
)
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
## Instruction:
Add heroku host to ALLOWED_HOSTS
## Code After:
from richard.settings import *
ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com']
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru'
SECRET_KEY = 'this_is_not_production_so_who_cares'
ROOT_URLCONF = 'pyvideo.urls'
WSGI_APPLICATION = 'pyvideo.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT, 'templates'),
)
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
| from richard.settings import *
- ALLOWED_HOSTS = ['pyvideo.ru']
+ ALLOWED_HOSTS = ['pyvideo.ru', 'pyvideoru.herokuapp.com']
TIME_ZONE = 'Europe/Moscow'
LANGUAGE_CODE = 'ru'
SECRET_KEY = 'this_is_not_production_so_who_cares'
ROOT_URLCONF = 'pyvideo.urls'
WSGI_APPLICATION = 'pyvideo.wsgi.application'
TEMPLATE_DIRS = (
os.path.join(ROOT, 'templates'),
)
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
) |
073b55113ac91b2f6fcfbebe9550f0740f8149d4 | jxaas/utils.py | jxaas/utils.py | import logging
from cliff.command import Command
import jujuxaas.client
def get_jxaas_client(command):
tenant = 'abcdef'
username = '123'
password= '123'
client = jujuxaas.client.Client(url="http://127.0.0.1:8080/xaas", tenant=tenant, username=username, password=password)
return client
| import logging
import os
from cliff.command import Command
import jujuxaas.client
def get_jxaas_client(command):
tenant = 'abcdef'
username = '123'
password= '123'
url = os.getenv('JXAAS_URL', "http://127.0.0.1:8080/xaas")
client = jujuxaas.client.Client(url=url, tenant=tenant, username=username, password=password)
return client
| Allow JXAAS_URL to be configured as an env var | Allow JXAAS_URL to be configured as an env var
| Python | apache-2.0 | jxaas/cli | import logging
+ import os
from cliff.command import Command
import jujuxaas.client
def get_jxaas_client(command):
tenant = 'abcdef'
username = '123'
password= '123'
+ url = os.getenv('JXAAS_URL', "http://127.0.0.1:8080/xaas")
- client = jujuxaas.client.Client(url="http://127.0.0.1:8080/xaas", tenant=tenant, username=username, password=password)
+ client = jujuxaas.client.Client(url=url, tenant=tenant, username=username, password=password)
return client
| Allow JXAAS_URL to be configured as an env var | ## Code Before:
import logging
from cliff.command import Command
import jujuxaas.client
def get_jxaas_client(command):
tenant = 'abcdef'
username = '123'
password= '123'
client = jujuxaas.client.Client(url="http://127.0.0.1:8080/xaas", tenant=tenant, username=username, password=password)
return client
## Instruction:
Allow JXAAS_URL to be configured as an env var
## Code After:
import logging
import os
from cliff.command import Command
import jujuxaas.client
def get_jxaas_client(command):
tenant = 'abcdef'
username = '123'
password= '123'
url = os.getenv('JXAAS_URL', "http://127.0.0.1:8080/xaas")
client = jujuxaas.client.Client(url=url, tenant=tenant, username=username, password=password)
return client
| import logging
+ import os
from cliff.command import Command
import jujuxaas.client
def get_jxaas_client(command):
tenant = 'abcdef'
username = '123'
password= '123'
+ url = os.getenv('JXAAS_URL', "http://127.0.0.1:8080/xaas")
- client = jujuxaas.client.Client(url="http://127.0.0.1:8080/xaas", tenant=tenant, username=username, password=password)
? ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ client = jujuxaas.client.Client(url=url, tenant=tenant, username=username, password=password)
? ^^^
return client |
869db9b392356912beebfc4ed1db97baa82e87e3 | resin/models/config.py | resin/models/config.py | import sys
from ..base_request import BaseRequest
from ..settings import Settings
class Config(object):
"""
This class implements configuration model for Resin Python SDK.
Attributes:
_config (dict): caching configuration.
"""
def __init__(self):
self.base_request = BaseRequest()
self.settings = Settings()
self._config = None
def _get_config(self, key):
if self._config:
return self._config[key]
# Load all config again
self.get_all()
return self._config[key]
def get_all(self):
"""
Get all configuration.
Returns:
dict: configuration information.
Examples:
>>> resin.models.config.get_all()
{ all configuration details }
"""
if self._config is None:
self._config = self.base_request.request(
'config', 'GET', endpoint=self.settings.get('api_endpoint'))
return self._config
def get_device_types(self):
"""
Get device types configuration.
Returns:
list: device types information.
Examples:
>>> resin.models.config.get_device_types()
[ all configuration details ]
"""
return self._get_config('deviceTypes')
| import sys
from ..base_request import BaseRequest
from ..settings import Settings
def _normalize_device_type(dev_type):
if dev_type['state'] == 'PREVIEW':
dev_type['state'] = 'ALPHA'
dev_type['name'] = dev_type['name'].replace('(PREVIEW)', '(ALPHA)')
if dev_type['state'] == 'EXPERIMENTAL':
dev_type['state'] = 'BETA'
dev_type['name'] = dev_type['name'].replace('(EXPERIMENTAL)', ('BETA'))
if dev_type['slug'] == 'raspberry-pi':
dev_type['name'] = 'Raspberry Pi (v1 or Zero)'
return dev_type
class Config(object):
"""
This class implements configuration model for Resin Python SDK.
Attributes:
_config (dict): caching configuration.
"""
def __init__(self):
self.base_request = BaseRequest()
self.settings = Settings()
self._config = None
def _get_config(self, key):
if self._config:
return self._config[key]
# Load all config again
self.get_all()
return self._config[key]
def get_all(self):
"""
Get all configuration.
Returns:
dict: configuration information.
Examples:
>>> resin.models.config.get_all()
{ all configuration details }
"""
if self._config is None:
self._config = self.base_request.request(
'config', 'GET', endpoint=self.settings.get('api_endpoint'))
self._config['deviceTypes'] = map(_normalize_device_type, self._config['deviceTypes'])
return self._config
def get_device_types(self):
"""
Get device types configuration.
Returns:
list: device types information.
Examples:
>>> resin.models.config.get_device_types()
[ all configuration details ]
"""
return self._get_config('deviceTypes')
| Patch device types to be marked as ALPHA and BETA | Patch device types to be marked as ALPHA and BETA
| Python | apache-2.0 | resin-io/resin-sdk-python,resin-io/resin-sdk-python,nghiant2710/resin-sdk-python,nghiant2710/resin-sdk-python | import sys
from ..base_request import BaseRequest
from ..settings import Settings
+
+
+ def _normalize_device_type(dev_type):
+ if dev_type['state'] == 'PREVIEW':
+ dev_type['state'] = 'ALPHA'
+ dev_type['name'] = dev_type['name'].replace('(PREVIEW)', '(ALPHA)')
+ if dev_type['state'] == 'EXPERIMENTAL':
+ dev_type['state'] = 'BETA'
+ dev_type['name'] = dev_type['name'].replace('(EXPERIMENTAL)', ('BETA'))
+ if dev_type['slug'] == 'raspberry-pi':
+ dev_type['name'] = 'Raspberry Pi (v1 or Zero)'
+ return dev_type
class Config(object):
"""
This class implements configuration model for Resin Python SDK.
Attributes:
_config (dict): caching configuration.
"""
def __init__(self):
self.base_request = BaseRequest()
self.settings = Settings()
self._config = None
def _get_config(self, key):
if self._config:
return self._config[key]
# Load all config again
self.get_all()
return self._config[key]
def get_all(self):
"""
Get all configuration.
Returns:
dict: configuration information.
Examples:
>>> resin.models.config.get_all()
{ all configuration details }
"""
if self._config is None:
self._config = self.base_request.request(
'config', 'GET', endpoint=self.settings.get('api_endpoint'))
+ self._config['deviceTypes'] = map(_normalize_device_type, self._config['deviceTypes'])
return self._config
def get_device_types(self):
"""
Get device types configuration.
Returns:
list: device types information.
Examples:
>>> resin.models.config.get_device_types()
[ all configuration details ]
"""
return self._get_config('deviceTypes')
| Patch device types to be marked as ALPHA and BETA | ## Code Before:
import sys
from ..base_request import BaseRequest
from ..settings import Settings
class Config(object):
"""
This class implements configuration model for Resin Python SDK.
Attributes:
_config (dict): caching configuration.
"""
def __init__(self):
self.base_request = BaseRequest()
self.settings = Settings()
self._config = None
def _get_config(self, key):
if self._config:
return self._config[key]
# Load all config again
self.get_all()
return self._config[key]
def get_all(self):
"""
Get all configuration.
Returns:
dict: configuration information.
Examples:
>>> resin.models.config.get_all()
{ all configuration details }
"""
if self._config is None:
self._config = self.base_request.request(
'config', 'GET', endpoint=self.settings.get('api_endpoint'))
return self._config
def get_device_types(self):
"""
Get device types configuration.
Returns:
list: device types information.
Examples:
>>> resin.models.config.get_device_types()
[ all configuration details ]
"""
return self._get_config('deviceTypes')
## Instruction:
Patch device types to be marked as ALPHA and BETA
## Code After:
import sys
from ..base_request import BaseRequest
from ..settings import Settings
def _normalize_device_type(dev_type):
if dev_type['state'] == 'PREVIEW':
dev_type['state'] = 'ALPHA'
dev_type['name'] = dev_type['name'].replace('(PREVIEW)', '(ALPHA)')
if dev_type['state'] == 'EXPERIMENTAL':
dev_type['state'] = 'BETA'
dev_type['name'] = dev_type['name'].replace('(EXPERIMENTAL)', ('BETA'))
if dev_type['slug'] == 'raspberry-pi':
dev_type['name'] = 'Raspberry Pi (v1 or Zero)'
return dev_type
class Config(object):
"""
This class implements configuration model for Resin Python SDK.
Attributes:
_config (dict): caching configuration.
"""
def __init__(self):
self.base_request = BaseRequest()
self.settings = Settings()
self._config = None
def _get_config(self, key):
if self._config:
return self._config[key]
# Load all config again
self.get_all()
return self._config[key]
def get_all(self):
"""
Get all configuration.
Returns:
dict: configuration information.
Examples:
>>> resin.models.config.get_all()
{ all configuration details }
"""
if self._config is None:
self._config = self.base_request.request(
'config', 'GET', endpoint=self.settings.get('api_endpoint'))
self._config['deviceTypes'] = map(_normalize_device_type, self._config['deviceTypes'])
return self._config
def get_device_types(self):
"""
Get device types configuration.
Returns:
list: device types information.
Examples:
>>> resin.models.config.get_device_types()
[ all configuration details ]
"""
return self._get_config('deviceTypes')
| import sys
from ..base_request import BaseRequest
from ..settings import Settings
+
+
+ def _normalize_device_type(dev_type):
+ if dev_type['state'] == 'PREVIEW':
+ dev_type['state'] = 'ALPHA'
+ dev_type['name'] = dev_type['name'].replace('(PREVIEW)', '(ALPHA)')
+ if dev_type['state'] == 'EXPERIMENTAL':
+ dev_type['state'] = 'BETA'
+ dev_type['name'] = dev_type['name'].replace('(EXPERIMENTAL)', ('BETA'))
+ if dev_type['slug'] == 'raspberry-pi':
+ dev_type['name'] = 'Raspberry Pi (v1 or Zero)'
+ return dev_type
class Config(object):
"""
This class implements configuration model for Resin Python SDK.
Attributes:
_config (dict): caching configuration.
"""
def __init__(self):
self.base_request = BaseRequest()
self.settings = Settings()
self._config = None
def _get_config(self, key):
if self._config:
return self._config[key]
# Load all config again
self.get_all()
return self._config[key]
def get_all(self):
"""
Get all configuration.
Returns:
dict: configuration information.
Examples:
>>> resin.models.config.get_all()
{ all configuration details }
"""
if self._config is None:
self._config = self.base_request.request(
'config', 'GET', endpoint=self.settings.get('api_endpoint'))
+ self._config['deviceTypes'] = map(_normalize_device_type, self._config['deviceTypes'])
return self._config
def get_device_types(self):
"""
Get device types configuration.
Returns:
list: device types information.
Examples:
>>> resin.models.config.get_device_types()
[ all configuration details ]
"""
return self._get_config('deviceTypes') |
4bfa74aa2ea9ef936d5ec5efbf32f2d6a8a10634 | adr/recipes/config_durations.py | adr/recipes/config_durations.py | from __future__ import absolute_import, print_function
from ..query import run_query
def run(config, args):
# process config data
data = run_query('config_durations', config, args)["data"]
result = []
for record in data:
if not record or not record[args.sort_key]:
continue
if isinstance(record[1], list):
record[1] = record[1][-1]
if record[2] is None:
continue
if record[3] is None:
continue
record[3] = round(record[3] / 60, 2)
record.append(int(round(record[2] * record[3], 0)))
result.append(record)
result = sorted(result, key=lambda k: k[args.sort_key], reverse=True)[:args.limit]
result.insert(0, ['Platform', 'Type', 'Num Jobs', 'Average Hours', 'Total Hours'])
return result
| from __future__ import absolute_import, print_function
from ..query import run_query
BROKEN = True
def run(config, args):
# process config data
data = run_query('config_durations', config, args)["data"]
result = []
for record in data:
if not record or not record[args.sort_key]:
continue
if isinstance(record[1], list):
record[1] = record[1][-1]
if record[2] is None:
continue
if record[3] is None:
continue
record[3] = round(record[3] / 60, 2)
record.append(int(round(record[2] * record[3], 0)))
result.append(record)
result = sorted(result, key=lambda k: k[args.sort_key], reverse=True)[:args.limit]
result.insert(0, ['Platform', 'Type', 'Num Jobs', 'Average Hours', 'Total Hours'])
return result
| Disable failing recipe in CRON tests | Disable failing recipe in CRON tests
| Python | mpl-2.0 | ahal/active-data-recipes,ahal/active-data-recipes | from __future__ import absolute_import, print_function
from ..query import run_query
+
+ BROKEN = True
def run(config, args):
# process config data
data = run_query('config_durations', config, args)["data"]
result = []
for record in data:
if not record or not record[args.sort_key]:
continue
if isinstance(record[1], list):
record[1] = record[1][-1]
if record[2] is None:
continue
if record[3] is None:
continue
record[3] = round(record[3] / 60, 2)
record.append(int(round(record[2] * record[3], 0)))
result.append(record)
result = sorted(result, key=lambda k: k[args.sort_key], reverse=True)[:args.limit]
result.insert(0, ['Platform', 'Type', 'Num Jobs', 'Average Hours', 'Total Hours'])
return result
| Disable failing recipe in CRON tests | ## Code Before:
from __future__ import absolute_import, print_function
from ..query import run_query
def run(config, args):
# process config data
data = run_query('config_durations', config, args)["data"]
result = []
for record in data:
if not record or not record[args.sort_key]:
continue
if isinstance(record[1], list):
record[1] = record[1][-1]
if record[2] is None:
continue
if record[3] is None:
continue
record[3] = round(record[3] / 60, 2)
record.append(int(round(record[2] * record[3], 0)))
result.append(record)
result = sorted(result, key=lambda k: k[args.sort_key], reverse=True)[:args.limit]
result.insert(0, ['Platform', 'Type', 'Num Jobs', 'Average Hours', 'Total Hours'])
return result
## Instruction:
Disable failing recipe in CRON tests
## Code After:
from __future__ import absolute_import, print_function
from ..query import run_query
BROKEN = True
def run(config, args):
# process config data
data = run_query('config_durations', config, args)["data"]
result = []
for record in data:
if not record or not record[args.sort_key]:
continue
if isinstance(record[1], list):
record[1] = record[1][-1]
if record[2] is None:
continue
if record[3] is None:
continue
record[3] = round(record[3] / 60, 2)
record.append(int(round(record[2] * record[3], 0)))
result.append(record)
result = sorted(result, key=lambda k: k[args.sort_key], reverse=True)[:args.limit]
result.insert(0, ['Platform', 'Type', 'Num Jobs', 'Average Hours', 'Total Hours'])
return result
| from __future__ import absolute_import, print_function
from ..query import run_query
+
+ BROKEN = True
def run(config, args):
# process config data
data = run_query('config_durations', config, args)["data"]
result = []
for record in data:
if not record or not record[args.sort_key]:
continue
if isinstance(record[1], list):
record[1] = record[1][-1]
if record[2] is None:
continue
if record[3] is None:
continue
record[3] = round(record[3] / 60, 2)
record.append(int(round(record[2] * record[3], 0)))
result.append(record)
result = sorted(result, key=lambda k: k[args.sort_key], reverse=True)[:args.limit]
result.insert(0, ['Platform', 'Type', 'Num Jobs', 'Average Hours', 'Total Hours'])
return result |
41b5a95a5c396c131d1426dd926e0a1a4beccc86 | mrp_workorder_sequence/models/mrp_production.py | mrp_workorder_sequence/models/mrp_production.py |
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
def _generate_workorders(self, exploded_boms):
res = super()._generate_workorders(exploded_boms)
self._reset_work_order_sequence()
return res
|
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
def _create_workorder(self):
res = super()._create_workorder()
self._reset_work_order_sequence()
return res
| Call method changed on v14 | [FIX] mrp_workorder_sequence: Call method changed on v14
| Python | agpl-3.0 | OCA/manufacture,OCA/manufacture |
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
- def _generate_workorders(self, exploded_boms):
+ def _create_workorder(self):
- res = super()._generate_workorders(exploded_boms)
+ res = super()._create_workorder()
self._reset_work_order_sequence()
return res
| Call method changed on v14 | ## Code Before:
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
def _generate_workorders(self, exploded_boms):
res = super()._generate_workorders(exploded_boms)
self._reset_work_order_sequence()
return res
## Instruction:
Call method changed on v14
## Code After:
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
def _create_workorder(self):
res = super()._create_workorder()
self._reset_work_order_sequence()
return res
|
from odoo import models
class MrpProduction(models.Model):
_inherit = "mrp.production"
def _reset_work_order_sequence(self):
for rec in self:
current_sequence = 1
for work in rec.workorder_ids:
work.sequence = current_sequence
current_sequence += 1
- def _generate_workorders(self, exploded_boms):
+ def _create_workorder(self):
- res = super()._generate_workorders(exploded_boms)
? ^ --- - -------------
+ res = super()._create_workorder()
? ^^
self._reset_work_order_sequence()
return res |
8f3760697dffc8f8be789a1a8594dae97b245536 | app/redidropper/startup/settings.py | app/redidropper/startup/settings.py | import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
| import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
| Allow max 20MB file chunks | Allow max 20MB file chunks
| Python | bsd-3-clause | indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client,indera/redi-dropper-client | import os
+
+ # Limit the max upload size for the app to 20 MB
+ # @see https://pythonhosted.org/Flask-Uploads/
+ DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
+ MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
| Allow max 20MB file chunks | ## Code Before:
import os
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
## Instruction:
Allow max 20MB file chunks
## Code After:
import os
# Limit the max upload size for the app to 20 MB
# @see https://pythonhosted.org/Flask-Uploads/
DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved'))
| import os
+
+ # Limit the max upload size for the app to 20 MB
+ # @see https://pythonhosted.org/Flask-Uploads/
+ DEFAULT_MAX_CONTENT_LENGTH = 20 * 1024 * 1024
+ MAX_CONTENT_LENGTH = os.getenv('REDI_DROPPER_MAX_CONTENT_LENGTH', DEFAULT_MAX_CONTENT_LENGTH)
DB_USER = os.getenv('REDI_DROPPER_DB_USER', 'redidropper')
DB_PASS = os.getenv('REDI_DROPPER_DB_PASS', 'securepass')
# http://effbot.org/librarybook/os-path.htm
INCOMING_TEMP_DIR = os.getenv('REDI_DROPPER_INCOMING_TEMP_DIR', \
os.path.expanduser('~/.redidropper/incoming/temp'))
INCOMING_SAVED_DIR = os.getenv('REDI_DROPPER_NCOMING_SAVED_DIR',\
os.path.expanduser('~/.redidropper/incoming/saved')) |
81dfb5cb952fbca90882bd39e76887f0fa6479eb | msmexplorer/tests/test_msm_plot.py | msmexplorer/tests/test_msm_plot.py | import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
| import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
| Add test for implied timescales plot | Add test for implied timescales plot
| Python | mit | msmexplorer/msmexplorer,msmexplorer/msmexplorer | import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
- from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
+ from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
+
+ def test_plot_implied_timescales():
+ lag_times = [1, 10, 50, 100, 200, 250, 500]
+ msm_objs = []
+ for lag in lag_times:
+ # Construct MSM
+ msm = MarkovStateModel(lag_time=lag, n_timescales=5)
+ msm.fit(clustered_trajs)
+ msm_objs.append(msm)
+ ax = plot_implied_timescales(msm_objs)
+ assert isinstance(ax, SubplotBase)
+ | Add test for implied timescales plot | ## Code Before:
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
## Instruction:
Add test for implied timescales plot
## Code After:
import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
def test_plot_implied_timescales():
lag_times = [1, 10, 50, 100, 200, 250, 500]
msm_objs = []
for lag in lag_times:
# Construct MSM
msm = MarkovStateModel(lag_time=lag, n_timescales=5)
msm.fit(clustered_trajs)
msm_objs.append(msm)
ax = plot_implied_timescales(msm_objs)
assert isinstance(ax, SubplotBase)
| import numpy as np
from msmbuilder.msm import MarkovStateModel, BayesianMarkovStateModel
from matplotlib.axes import SubplotBase
from seaborn.apionly import JointGrid
- from ..plots import plot_pop_resids, plot_msm_network, plot_timescales
+ from ..plots import plot_pop_resids, plot_msm_network, plot_timescales, plot_implied_timescales
? +++++++++++++++++++++++++
rs = np.random.RandomState(42)
data = rs.randint(low=0, high=10, size=100000)
msm = MarkovStateModel()
msm.fit(data)
bmsm = BayesianMarkovStateModel()
bmsm.fit(data)
def test_plot_pop_resids():
ax = plot_pop_resids(msm)
assert isinstance(ax, JointGrid)
def test_plot_msm_network():
ax = plot_msm_network(msm)
assert isinstance(ax, SubplotBase)
def test_plot_timescales_msm():
ax = plot_timescales(msm, n_timescales=3, xlabel='x', ylabel='y')
assert isinstance(ax, SubplotBase)
def test_plot_timescales_bmsm():
ax = plot_timescales(bmsm)
assert isinstance(ax, SubplotBase)
+
+
+ def test_plot_implied_timescales():
+ lag_times = [1, 10, 50, 100, 200, 250, 500]
+ msm_objs = []
+ for lag in lag_times:
+ # Construct MSM
+ msm = MarkovStateModel(lag_time=lag, n_timescales=5)
+ msm.fit(clustered_trajs)
+ msm_objs.append(msm)
+ ax = plot_implied_timescales(msm_objs)
+ assert isinstance(ax, SubplotBase) |
84f6cc46e7ba7e2e3c046e957545687ce6802278 | cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py | cegui/src/ScriptingModules/PythonScriptModule/bindings/distutils/PyCEGUI/__init__.py | import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
| import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
| Use a less pathetic method to retrieve the PyCEGUI dirname | MOD: Use a less pathetic method to retrieve the PyCEGUI dirname
| Python | mit | cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two,cbeck88/cegui-mirror-two | import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
- return os.path.dirname(str(fake).split()[3][1:])
+ return os.path.dirname(os.path.abspath(fake.__file__))
- libpath = os.path.abspath(get_my_path())
+ libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
| Use a less pathetic method to retrieve the PyCEGUI dirname | ## Code Before:
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(str(fake).split()[3][1:])
libpath = os.path.abspath(get_my_path())
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
## Instruction:
Use a less pathetic method to retrieve the PyCEGUI dirname
## Code After:
import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
return os.path.dirname(os.path.abspath(fake.__file__))
libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import *
| import os
import os.path
# atrocious and unholy!
def get_my_path():
import fake
- return os.path.dirname(str(fake).split()[3][1:])
+ return os.path.dirname(os.path.abspath(fake.__file__))
- libpath = os.path.abspath(get_my_path())
+ libpath = get_my_path()
#print "libpath =", libpath
os.environ['PATH'] = libpath + ";" + os.environ['PATH']
from PyCEGUI import * |
9f8a8321fbed1008f0eec608ba7bce9b08897e40 | manage.py | manage.py | import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
| import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
from server.models import Lecturer, Course, Lecture, Comment
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
def mock_db():
""" Insert mock data into database """
init_db()
simon = Lecturer('Simon', 'McCallum')
db.session.add(simon)
imt3601 = Course('IMT3601 - Game Programming', simon)
db.session.add(imt3601)
imt3601_l1 = Lecture('Lecture 1', imt3601)
db.session.add(imt3601_l1)
imt3601_l1_c1 = Comment('This is boring', imt3601_l1)
db.session.add(imt3601_l1_c1)
imt3601_l1_c2 = Comment('This is fun!', imt3601_l1)
db.session.add(imt3601_l1_c2)
db.session.commit()
@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
| Add command to mock some db data | Add command to mock some db data
| Python | mit | MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS,MACSIFS/IFS | import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
+
+ from server.models import Lecturer, Course, Lecture, Comment
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
+ def mock_db():
+ """ Insert mock data into database """
+ init_db()
+
+ simon = Lecturer('Simon', 'McCallum')
+ db.session.add(simon)
+
+ imt3601 = Course('IMT3601 - Game Programming', simon)
+ db.session.add(imt3601)
+
+ imt3601_l1 = Lecture('Lecture 1', imt3601)
+ db.session.add(imt3601_l1)
+
+ imt3601_l1_c1 = Comment('This is boring', imt3601_l1)
+ db.session.add(imt3601_l1_c1)
+ imt3601_l1_c2 = Comment('This is fun!', imt3601_l1)
+ db.session.add(imt3601_l1_c2)
+
+ db.session.commit()
+
+
+ @manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
| Add command to mock some db data | ## Code Before:
import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
## Instruction:
Add command to mock some db data
## Code After:
import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
from server.models import Lecturer, Course, Lecture, Comment
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
def mock_db():
""" Insert mock data into database """
init_db()
simon = Lecturer('Simon', 'McCallum')
db.session.add(simon)
imt3601 = Course('IMT3601 - Game Programming', simon)
db.session.add(imt3601)
imt3601_l1 = Lecture('Lecture 1', imt3601)
db.session.add(imt3601_l1)
imt3601_l1_c1 = Comment('This is boring', imt3601_l1)
db.session.add(imt3601_l1_c1)
imt3601_l1_c2 = Comment('This is fun!', imt3601_l1)
db.session.add(imt3601_l1_c2)
db.session.commit()
@manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run()
| import os
import unittest
from flask.ext.script import Manager
from server import app
from server.models import db
+
+ from server.models import Lecturer, Course, Lecture, Comment
manager = Manager(app)
@manager.command
def init_db():
""" Initialize database: drop and create all columns """
db.drop_all()
db.create_all()
@manager.command
+ def mock_db():
+ """ Insert mock data into database """
+ init_db()
+
+ simon = Lecturer('Simon', 'McCallum')
+ db.session.add(simon)
+
+ imt3601 = Course('IMT3601 - Game Programming', simon)
+ db.session.add(imt3601)
+
+ imt3601_l1 = Lecture('Lecture 1', imt3601)
+ db.session.add(imt3601_l1)
+
+ imt3601_l1_c1 = Comment('This is boring', imt3601_l1)
+ db.session.add(imt3601_l1_c1)
+ imt3601_l1_c2 = Comment('This is fun!', imt3601_l1)
+ db.session.add(imt3601_l1_c2)
+
+ db.session.commit()
+
+
+ @manager.command
def test():
tests_path = os.path.join(os.path.dirname(__file__), 'server', 'tests')
tests = unittest.defaultTestLoader.discover(tests_path)
runner = unittest.TextTestRunner()
runner.run(tests)
if __name__ == '__main__':
manager.run() |
879ea9c4234a5d8435f213c6f9b082a86a794ecc | employees/serializers.py | employees/serializers.py | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
| from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
| Add 1 level depth and also categories fields to employee serializer | Add 1 level depth and also categories fields to employee serializer
| Python | apache-2.0 | belatrix/BackendAllStars | from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
+ depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
+ 'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
| Add 1 level depth and also categories fields to employee serializer | ## Code Before:
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
## Instruction:
Add 1 level depth and also categories fields to employee serializer
## Code After:
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
| from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
+ depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
+ 'categories',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar') |
c0673083709d08f80672cfa58623a667e0edeffa | reddit_adzerk/adzerkads.py | reddit_adzerk/adzerkads.py | from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
origin=c.request_origin,
)
self.frame_id = "ad_main"
| from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| Add subreddit query param to adzerk frame. | Add subreddit query param to adzerk frame.
For tracking / targeting.
| Python | bsd-3-clause | madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk | + from urllib import quote
+
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
+ subreddit=quote(c.site.name),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| Add subreddit query param to adzerk frame. | ## Code Before:
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
origin=c.request_origin,
)
self.frame_id = "ad_main"
## Instruction:
Add subreddit query param to adzerk frame.
## Code After:
from urllib import quote
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
subreddit=quote(c.site.name),
origin=c.request_origin,
)
self.frame_id = "ad_main"
| + from urllib import quote
+
from pylons import c, g
from r2.lib.pages import Ads as BaseAds
class Ads(BaseAds):
def __init__(self):
BaseAds.__init__(self)
adzerk_test_srs = g.live_config.get("adzerk_test_srs")
if adzerk_test_srs and c.site.name in adzerk_test_srs:
url_key = "adzerk_https_url" if c.secure else "adzerk_url"
self.ad_url = g.config[url_key].format(
+ subreddit=quote(c.site.name),
origin=c.request_origin,
)
self.frame_id = "ad_main" |
a0b4476d08c59da74eb64cbcc92621cad160fbce | scipy_distutils/setup.py | scipy_distutils/setup.py | import sys
sys.path.insert(0,'..')
import os
d = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
if d == 'scipy_distutils':
execfile('setup_scipy_distutils.py')
else:
os.system('cd .. && ln -s %s scipy_distutils' % (d))
execfile('setup_scipy_distutils.py')
os.system('cd .. && rm -f scipy_distutils')
| import sys
sys.path.insert(0,'..')
import os
d = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
if d == 'scipy_distutils':
import scipy_distutils
del sys.path[0]
execfile('setup_scipy_distutils.py')
else:
os.system('cd .. && ln -s %s scipy_distutils' % (d))
import scipy_distutils
del sys.path[0]
execfile('setup_scipy_distutils.py')
os.system('cd .. && rm -f scipy_distutils')
| Clean up sys.path after scipy_distutils has been imported. | Clean up sys.path after scipy_distutils has been imported.
| Python | bsd-3-clause | mattip/numpy,andsor/numpy,naritta/numpy,GaZ3ll3/numpy,jschueller/numpy,felipebetancur/numpy,bertrand-l/numpy,argriffing/numpy,pbrod/numpy,dato-code/numpy,AustereCuriosity/numpy,cjermain/numpy,argriffing/numpy,astrofrog/numpy,seberg/numpy,jankoslavic/numpy,rmcgibbo/numpy,Yusa95/numpy,cjermain/numpy,nguyentu1602/numpy,githubmlai/numpy,behzadnouri/numpy,maniteja123/numpy,cjermain/numpy,WarrenWeckesser/numpy,joferkington/numpy,SunghanKim/numpy,sinhrks/numpy,numpy/numpy,skymanaditya1/numpy,rajathkumarmp/numpy,bringingheavendown/numpy,MSeifert04/numpy,madphysicist/numpy,Eric89GXL/numpy,MaPePeR/numpy,pyparallel/numpy,jankoslavic/numpy,maniteja123/numpy,yiakwy/numpy,KaelChen/numpy,rmcgibbo/numpy,pbrod/numpy,ahaldane/numpy,numpy/numpy,rhythmsosad/numpy,nguyentu1602/numpy,MSeifert04/numpy,larsmans/numpy,musically-ut/numpy,ContinuumIO/numpy,NextThought/pypy-numpy,dato-code/numpy,NextThought/pypy-numpy,kirillzhuravlev/numpy,MichaelAquilina/numpy,madphysicist/numpy,embray/numpy,gfyoung/numpy,MaPePeR/numpy,stuarteberg/numpy,WarrenWeckesser/numpy,ahaldane/numpy,jankoslavic/numpy,Srisai85/numpy,madphysicist/numpy,embray/numpy,anntzer/numpy,Dapid/numpy,ogrisel/numpy,stefanv/numpy,madphysicist/numpy,dwf/numpy,KaelChen/numpy,ChanderG/numpy,utke1/numpy,ChanderG/numpy,rajathkumarmp/numpy,kiwifb/numpy,dwillmer/numpy,SunghanKim/numpy,stefanv/numpy,simongibbons/numpy,stuarteberg/numpy,skwbc/numpy,rherault-insa/numpy,tdsmith/numpy,pizzathief/numpy,endolith/numpy,chatcannon/numpy,sonnyhu/numpy,sinhrks/numpy,leifdenby/numpy,cowlicks/numpy,shoyer/numpy,ajdawson/numpy,BabeNovelty/numpy,chatcannon/numpy,behzadnouri/numpy,GrimDerp/numpy,joferkington/numpy,grlee77/numpy,seberg/numpy,dwf/numpy,dch312/numpy,Anwesh43/numpy,AustereCuriosity/numpy,ChristopherHogan/numpy,grlee77/numpy,ogrisel/numpy,Eric89GXL/numpy,Yusa95/numpy,yiakwy/numpy,numpy/numpy-refactor,anntzer/numpy,jakirkham/numpy,b-carter/numpy,skwbc/numpy,BMJHayward/numpy,jorisvandenbossche/numpy,Anwesh43/numpy,githubmlai/numpy,has2k1/numpy,cjermain/numpy,mortada/numpy,groutr/numpy,dwillmer/numpy,tynn/numpy,SiccarPoint/numpy,rherault-insa/numpy,anntzer/numpy,dch312/numpy,ahaldane/numpy,andsor/numpy,gfyoung/numpy,chiffa/numpy,grlee77/numpy,MaPePeR/numpy,BabeNovelty/numpy,matthew-brett/numpy,utke1/numpy,ViralLeadership/numpy,rudimeier/numpy,SiccarPoint/numpy,ViralLeadership/numpy,abalkin/numpy,bringingheavendown/numpy,ewmoore/numpy,mingwpy/numpy,mathdd/numpy,pelson/numpy,astrofrog/numpy,brandon-rhodes/numpy,KaelChen/numpy,nguyentu1602/numpy,sinhrks/numpy,ESSS/numpy,embray/numpy,bertrand-l/numpy,groutr/numpy,stefanv/numpy,mwiebe/numpy,pyparallel/numpy,mindw/numpy,yiakwy/numpy,jonathanunderwood/numpy,pelson/numpy,AustereCuriosity/numpy,musically-ut/numpy,dwf/numpy,bmorris3/numpy,Yusa95/numpy,gmcastil/numpy,immerrr/numpy,tacaswell/numpy,drasmuss/numpy,nbeaver/numpy,ContinuumIO/numpy,hainm/numpy,Anwesh43/numpy,madphysicist/numpy,mathdd/numpy,numpy/numpy-refactor,naritta/numpy,pyparallel/numpy,simongibbons/numpy,stefanv/numpy,skwbc/numpy,mathdd/numpy,BabeNovelty/numpy,mortada/numpy,stuarteberg/numpy,rhythmsosad/numpy,anntzer/numpy,simongibbons/numpy,skymanaditya1/numpy,utke1/numpy,gfyoung/numpy,dimasad/numpy,ssanderson/numpy,bringingheavendown/numpy,ogrisel/numpy,empeeu/numpy,jakirkham/numpy,andsor/numpy,ogrisel/numpy,chatcannon/numpy,pizzathief/numpy,numpy/numpy-refactor,MSeifert04/numpy,hainm/numpy,BMJHayward/numpy,immerrr/numpy,CMartelLML/numpy,musically-ut/numpy,leifdenby/numpy,skymanaditya1/numpy,charris/numpy,Yusa95/numpy,rudimeier/numpy,joferkington/numpy,larsmans/numpy,charris/numpy,ContinuumIO/numpy,jorisvandenbossche/numpy,bmorris3/numpy,endolith/numpy,charris/numpy,mindw/numpy,SunghanKim/numpy,WillieMaddox/numpy,chiffa/numpy,brandon-rhodes/numpy,trankmichael/numpy,WarrenWeckesser/numpy,stefanv/numpy,dato-code/numpy,CMartelLML/numpy,ChristopherHogan/numpy,Dapid/numpy,pelson/numpy,ekalosak/numpy,cowlicks/numpy,ogrisel/numpy,ddasilva/numpy,trankmichael/numpy,rhythmsosad/numpy,ssanderson/numpy,kirillzhuravlev/numpy,dch312/numpy,numpy/numpy-refactor,nbeaver/numpy,gmcastil/numpy,bmorris3/numpy,simongibbons/numpy,rhythmsosad/numpy,abalkin/numpy,felipebetancur/numpy,tacaswell/numpy,immerrr/numpy,MSeifert04/numpy,drasmuss/numpy,mhvk/numpy,Eric89GXL/numpy,numpy/numpy-refactor,Linkid/numpy,Srisai85/numpy,sonnyhu/numpy,leifdenby/numpy,maniteja123/numpy,rgommers/numpy,empeeu/numpy,rgommers/numpy,mingwpy/numpy,mortada/numpy,mwiebe/numpy,ewmoore/numpy,rgommers/numpy,BMJHayward/numpy,SunghanKim/numpy,pdebuyl/numpy,larsmans/numpy,b-carter/numpy,naritta/numpy,ajdawson/numpy,Linkid/numpy,drasmuss/numpy,MichaelAquilina/numpy,jorisvandenbossche/numpy,KaelChen/numpy,dwf/numpy,larsmans/numpy,groutr/numpy,CMartelLML/numpy,ekalosak/numpy,simongibbons/numpy,hainm/numpy,stuarteberg/numpy,GaZ3ll3/numpy,dato-code/numpy,sonnyhu/numpy,ssanderson/numpy,behzadnouri/numpy,endolith/numpy,mwiebe/numpy,WillieMaddox/numpy,pizzathief/numpy,tacaswell/numpy,SiccarPoint/numpy,dch312/numpy,Srisai85/numpy,brandon-rhodes/numpy,numpy/numpy,Dapid/numpy,jankoslavic/numpy,mindw/numpy,dimasad/numpy,jakirkham/numpy,mortada/numpy,WarrenWeckesser/numpy,pbrod/numpy,mindw/numpy,matthew-brett/numpy,seberg/numpy,ddasilva/numpy,solarjoe/numpy,pdebuyl/numpy,Anwesh43/numpy,ESSS/numpy,sonnyhu/numpy,joferkington/numpy,grlee77/numpy,rherault-insa/numpy,rmcgibbo/numpy,dwillmer/numpy,andsor/numpy,ChanderG/numpy,pizzathief/numpy,grlee77/numpy,tynn/numpy,ddasilva/numpy,BMJHayward/numpy,moreati/numpy,rmcgibbo/numpy,njase/numpy,immerrr/numpy,mattip/numpy,NextThought/pypy-numpy,gmcastil/numpy,Linkid/numpy,moreati/numpy,pizzathief/numpy,musically-ut/numpy,GrimDerp/numpy,cowlicks/numpy,pelson/numpy,jschueller/numpy,mathdd/numpy,dwf/numpy,Linkid/numpy,dimasad/numpy,Srisai85/numpy,has2k1/numpy,nbeaver/numpy,matthew-brett/numpy,sigma-random/numpy,kiwifb/numpy,trankmichael/numpy,jorisvandenbossche/numpy,shoyer/numpy,kirillzhuravlev/numpy,ahaldane/numpy,mhvk/numpy,brandon-rhodes/numpy,rajathkumarmp/numpy,jschueller/numpy,has2k1/numpy,felipebetancur/numpy,cowlicks/numpy,pdebuyl/numpy,mhvk/numpy,rudimeier/numpy,solarjoe/numpy,sigma-random/numpy,jorisvandenbossche/numpy,GaZ3ll3/numpy,ESSS/numpy,shoyer/numpy,felipebetancur/numpy,ewmoore/numpy,has2k1/numpy,pelson/numpy,empeeu/numpy,shoyer/numpy,moreati/numpy,hainm/numpy,SiccarPoint/numpy,rajathkumarmp/numpy,dimasad/numpy,shoyer/numpy,pbrod/numpy,sigma-random/numpy,chiffa/numpy,ajdawson/numpy,MaPePeR/numpy,tdsmith/numpy,empeeu/numpy,skymanaditya1/numpy,nguyentu1602/numpy,GrimDerp/numpy,rgommers/numpy,trankmichael/numpy,jakirkham/numpy,Eric89GXL/numpy,jonathanunderwood/numpy,ChristopherHogan/numpy,GaZ3ll3/numpy,argriffing/numpy,tdsmith/numpy,MichaelAquilina/numpy,kiwifb/numpy,jschueller/numpy,MichaelAquilina/numpy,seberg/numpy,githubmlai/numpy,matthew-brett/numpy,bertrand-l/numpy,sinhrks/numpy,b-carter/numpy,tdsmith/numpy,ewmoore/numpy,mhvk/numpy,bmorris3/numpy,numpy/numpy,ViralLeadership/numpy,GrimDerp/numpy,ekalosak/numpy,njase/numpy,tynn/numpy,MSeifert04/numpy,mattip/numpy,pbrod/numpy,yiakwy/numpy,naritta/numpy,ekalosak/numpy,jonathanunderwood/numpy,githubmlai/numpy,WarrenWeckesser/numpy,njase/numpy,jakirkham/numpy,embray/numpy,ChanderG/numpy,astrofrog/numpy,kirillzhuravlev/numpy,endolith/numpy,mingwpy/numpy,BabeNovelty/numpy,abalkin/numpy,CMartelLML/numpy,astrofrog/numpy,pdebuyl/numpy,mingwpy/numpy,ewmoore/numpy,astrofrog/numpy,ajdawson/numpy,mhvk/numpy,WillieMaddox/numpy,rudimeier/numpy,NextThought/pypy-numpy,dwillmer/numpy,ChristopherHogan/numpy,charris/numpy,solarjoe/numpy,ahaldane/numpy,mattip/numpy,sigma-random/numpy,matthew-brett/numpy,embray/numpy | import sys
sys.path.insert(0,'..')
import os
d = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
if d == 'scipy_distutils':
+ import scipy_distutils
+ del sys.path[0]
execfile('setup_scipy_distutils.py')
else:
os.system('cd .. && ln -s %s scipy_distutils' % (d))
+ import scipy_distutils
+ del sys.path[0]
execfile('setup_scipy_distutils.py')
os.system('cd .. && rm -f scipy_distutils')
| Clean up sys.path after scipy_distutils has been imported. | ## Code Before:
import sys
sys.path.insert(0,'..')
import os
d = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
if d == 'scipy_distutils':
execfile('setup_scipy_distutils.py')
else:
os.system('cd .. && ln -s %s scipy_distutils' % (d))
execfile('setup_scipy_distutils.py')
os.system('cd .. && rm -f scipy_distutils')
## Instruction:
Clean up sys.path after scipy_distutils has been imported.
## Code After:
import sys
sys.path.insert(0,'..')
import os
d = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
if d == 'scipy_distutils':
import scipy_distutils
del sys.path[0]
execfile('setup_scipy_distutils.py')
else:
os.system('cd .. && ln -s %s scipy_distutils' % (d))
import scipy_distutils
del sys.path[0]
execfile('setup_scipy_distutils.py')
os.system('cd .. && rm -f scipy_distutils')
| import sys
sys.path.insert(0,'..')
import os
d = os.path.basename(os.path.dirname(os.path.abspath(__file__)))
if d == 'scipy_distutils':
+ import scipy_distutils
+ del sys.path[0]
execfile('setup_scipy_distutils.py')
else:
os.system('cd .. && ln -s %s scipy_distutils' % (d))
+ import scipy_distutils
+ del sys.path[0]
execfile('setup_scipy_distutils.py')
os.system('cd .. && rm -f scipy_distutils') |
af184be16c4bd0ea45f7e8c6dc59388c4d8893c5 | main.py | main.py | import tweepy
import app_config
# Twitter API configuration
consumer_key = app_config.twitter["consumer_key"]
consumer_secret = app_config.twitter["consumer_secret"]
access_token = app_config.twitter["access_token"]
access_token_secret = app_config.twitter["access_token_secret"]
# Start
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
followers = api.followers(count=200)
for follower in followers:
print follower.screen_name | import tweepy
import json
import unicodedata
import sqlite3
import app_config
import definitions
API_launch()
followers_list(followers_name[1])
create_db()
create_table()
tweet_info(followers_name[1],tweets_number=100)
| Update Main.py to work with definitions.py | Update Main.py to work with definitions.py
functions are loaded from definitions.py and the database can be created without modifying functions | Python | mit | franckbrignoli/twitter-bot-detection | import tweepy
+ import json
+ import unicodedata
+ import sqlite3
+
import app_config
+ import definitions
+ API_launch()
- # Twitter API configuration
- consumer_key = app_config.twitter["consumer_key"]
- consumer_secret = app_config.twitter["consumer_secret"]
+ followers_list(followers_name[1])
- access_token = app_config.twitter["access_token"]
- access_token_secret = app_config.twitter["access_token_secret"]
+ create_db()
- # Start
- auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
- auth.set_access_token(access_token, access_token_secret)
- api = tweepy.API(auth)
+ create_table()
+ tweet_info(followers_name[1],tweets_number=100)
+
- followers = api.followers(count=200)
- for follower in followers:
- print follower.screen_name | Update Main.py to work with definitions.py | ## Code Before:
import tweepy
import app_config
# Twitter API configuration
consumer_key = app_config.twitter["consumer_key"]
consumer_secret = app_config.twitter["consumer_secret"]
access_token = app_config.twitter["access_token"]
access_token_secret = app_config.twitter["access_token_secret"]
# Start
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth)
followers = api.followers(count=200)
for follower in followers:
print follower.screen_name
## Instruction:
Update Main.py to work with definitions.py
## Code After:
import tweepy
import json
import unicodedata
import sqlite3
import app_config
import definitions
API_launch()
followers_list(followers_name[1])
create_db()
create_table()
tweet_info(followers_name[1],tweets_number=100)
| import tweepy
+ import json
+ import unicodedata
+ import sqlite3
+
import app_config
+ import definitions
+ API_launch()
- # Twitter API configuration
- consumer_key = app_config.twitter["consumer_key"]
- consumer_secret = app_config.twitter["consumer_secret"]
+ followers_list(followers_name[1])
- access_token = app_config.twitter["access_token"]
- access_token_secret = app_config.twitter["access_token_secret"]
+ create_db()
- # Start
- auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
- auth.set_access_token(access_token, access_token_secret)
- api = tweepy.API(auth)
+ create_table()
+ tweet_info(followers_name[1],tweets_number=100)
- followers = api.followers(count=200)
- for follower in followers:
- print follower.screen_name |
88de184c1d9daa79e47873b0bd8912ea67b32ec1 | app/__init__.py | app/__init__.py | from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64']))
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = \
cf_services['elasticsearch-compose'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(
base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64'])
)
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| Change the VCAP_SERVICE key for elasticsearch | Change the VCAP_SERVICE key for elasticsearch
GOV.UK PaaS have recently changed the name of their elasticsearch service in preparation for migration.
This quick fix will work until elasticsearch-compose is withdrawn; a future solution should use a more robust way of determining the elasticsearch URI.
| Python | mit | alphagov/digitalmarketplace-search-api,alphagov/digitalmarketplace-search-api | from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
- application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris']
+ application.config['ELASTICSEARCH_HOST'] = \
+ cf_services['elasticsearch-compose'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
+ es_certfile.write(
- es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64']))
+ base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64'])
+ )
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| Change the VCAP_SERVICE key for elasticsearch | ## Code Before:
from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64']))
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
## Instruction:
Change the VCAP_SERVICE key for elasticsearch
## Code After:
from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
application.config['ELASTICSEARCH_HOST'] = \
cf_services['elasticsearch-compose'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
es_certfile.write(
base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64'])
)
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application
| from flask import Flask
import base64
import json
from config import config as configs
from flask.ext.elasticsearch import FlaskElasticsearch
from dmutils import init_app, flask_featureflags
feature_flags = flask_featureflags.FeatureFlag()
elasticsearch_client = FlaskElasticsearch()
def create_app(config_name):
application = Flask(__name__)
init_app(
application,
configs[config_name],
feature_flags=feature_flags
)
if application.config['VCAP_SERVICES']:
cf_services = json.loads(application.config['VCAP_SERVICES'])
- application.config['ELASTICSEARCH_HOST'] = cf_services['elasticsearch'][0]['credentials']['uris']
+ application.config['ELASTICSEARCH_HOST'] = \
+ cf_services['elasticsearch-compose'][0]['credentials']['uris']
with open(application.config['DM_ELASTICSEARCH_CERT_PATH'], 'wb') as es_certfile:
+ es_certfile.write(
- es_certfile.write(base64.b64decode(cf_services['elasticsearch'][0]['credentials']['ca_certificate_base64']))
? ^^^^^^^^^^^^^^^^^^ -
+ base64.b64decode(cf_services['elasticsearch-compose'][0]['credentials']['ca_certificate_base64'])
? ^^^^ ++++++++
+ )
elasticsearch_client.init_app(
application,
verify_certs=True,
ca_certs=application.config['DM_ELASTICSEARCH_CERT_PATH']
)
from .main import main as main_blueprint
from .status import status as status_blueprint
application.register_blueprint(status_blueprint)
application.register_blueprint(main_blueprint)
return application |
98ca748996fe462cedf284ad91a74bdd30eb81f3 | mopidy/__init__.py | mopidy/__init__.py | from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
| from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
| Use print function instead of print statement | py3: Use print function instead of print statement
| Python | apache-2.0 | jcass77/mopidy,ZenithDK/mopidy,SuperStarPL/mopidy,vrs01/mopidy,jcass77/mopidy,diandiankan/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,rawdlite/mopidy,jcass77/mopidy,jmarsik/mopidy,mopidy/mopidy,bencevans/mopidy,mopidy/mopidy,diandiankan/mopidy,jmarsik/mopidy,vrs01/mopidy,mokieyue/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ali/mopidy,ali/mopidy,tkem/mopidy,hkariti/mopidy,glogiotatidis/mopidy,quartz55/mopidy,kingosticks/mopidy,rawdlite/mopidy,bencevans/mopidy,quartz55/mopidy,bencevans/mopidy,swak/mopidy,rawdlite/mopidy,dbrgn/mopidy,bacontext/mopidy,jodal/mopidy,ZenithDK/mopidy,diandiankan/mopidy,priestd09/mopidy,hkariti/mopidy,kingosticks/mopidy,adamcik/mopidy,jodal/mopidy,pacificIT/mopidy,quartz55/mopidy,mopidy/mopidy,swak/mopidy,priestd09/mopidy,ali/mopidy,pacificIT/mopidy,adamcik/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,bacontext/mopidy,pacificIT/mopidy,pacificIT/mopidy,bacontext/mopidy,tkem/mopidy,hkariti/mopidy,swak/mopidy,mokieyue/mopidy,ZenithDK/mopidy,bacontext/mopidy,rawdlite/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,dbrgn/mopidy,jmarsik/mopidy,swak/mopidy,diandiankan/mopidy,priestd09/mopidy,SuperStarPL/mopidy,vrs01/mopidy,quartz55/mopidy,adamcik/mopidy,glogiotatidis/mopidy,jodal/mopidy,tkem/mopidy,jmarsik/mopidy,dbrgn/mopidy,hkariti/mopidy,vrs01/mopidy,bencevans/mopidy,tkem/mopidy,ali/mopidy | - from __future__ import absolute_import, unicode_literals
+ from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
| Use print function instead of print statement | ## Code Before:
from __future__ import absolute_import, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
## Instruction:
Use print function instead of print statement
## Code After:
from __future__ import absolute_import, print_function, unicode_literals
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4'
| - from __future__ import absolute_import, unicode_literals
+ from __future__ import absolute_import, print_function, unicode_literals
? ++++++++++++++++
import platform
import sys
import textwrap
import warnings
if not (2, 7) <= sys.version_info < (3,):
sys.exit(
'ERROR: Mopidy requires Python 2.7, but found %s.' %
platform.python_version())
try:
import gobject # noqa
except ImportError:
print(textwrap.dedent("""
ERROR: The gobject Python package was not found.
Mopidy requires GStreamer (and GObject) to work. These are C libraries
with a number of dependencies themselves, and cannot be installed with
the regular Python tools like pip.
Please see http://docs.mopidy.com/en/latest/installation/ for
instructions on how to install the required dependencies.
"""))
raise
warnings.filterwarnings('ignore', 'could not open display')
__version__ = '0.19.4' |
1b7509d8bd624bbf33352f622d8c03be6f3e35f2 | src/sentry/api/serializers/models/organization_member.py | src/sentry/api/serializers/models/organization_member.py | from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
}
return d
| from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
| Add avatarUrl to team member serializers | Add avatarUrl to team member serializers
Conflicts:
src/sentry/api/serializers/models/organization_member.py
src/sentry/api/serializers/models/release.py
cherry-pick 8ee1bee748ae7f51987ea8ec5ee10795b656cfd9
| Python | bsd-3-clause | jean/sentry,gencer/sentry,looker/sentry,ngonzalvez/sentry,gg7/sentry,mvaled/sentry,nicholasserra/sentry,wong2/sentry,beeftornado/sentry,JamesMura/sentry,alexm92/sentry,JamesMura/sentry,korealerts1/sentry,wujuguang/sentry,BayanGroup/sentry,imankulov/sentry,fotinakis/sentry,JTCunning/sentry,kevinlondon/sentry,jean/sentry,gencer/sentry,hongliang5623/sentry,TedaLIEz/sentry,looker/sentry,pauloschilling/sentry,llonchj/sentry,llonchj/sentry,hongliang5623/sentry,Natim/sentry,wong2/sentry,BuildingLink/sentry,fuziontech/sentry,daevaorn/sentry,jokey2k/sentry,argonemyth/sentry,zenefits/sentry,nicholasserra/sentry,daevaorn/sentry,mvaled/sentry,nicholasserra/sentry,ifduyue/sentry,ngonzalvez/sentry,vperron/sentry,ifduyue/sentry,JTCunning/sentry,gencer/sentry,beeftornado/sentry,gg7/sentry,ewdurbin/sentry,mvaled/sentry,fotinakis/sentry,JTCunning/sentry,BayanGroup/sentry,vperron/sentry,drcapulet/sentry,felixbuenemann/sentry,zenefits/sentry,camilonova/sentry,korealerts1/sentry,JackDanger/sentry,kevinastone/sentry,fuziontech/sentry,kevinlondon/sentry,BuildingLink/sentry,alexm92/sentry,kevinlondon/sentry,korealerts1/sentry,drcapulet/sentry,wong2/sentry,pauloschilling/sentry,JamesMura/sentry,kevinastone/sentry,JackDanger/sentry,looker/sentry,daevaorn/sentry,Kryz/sentry,jean/sentry,felixbuenemann/sentry,looker/sentry,Natim/sentry,1tush/sentry,TedaLIEz/sentry,beeftornado/sentry,JamesMura/sentry,ifduyue/sentry,JamesMura/sentry,felixbuenemann/sentry,mvaled/sentry,boneyao/sentry,zenefits/sentry,looker/sentry,Kryz/sentry,zenefits/sentry,argonemyth/sentry,jokey2k/sentry,camilonova/sentry,Natim/sentry,daevaorn/sentry,jean/sentry,mvaled/sentry,wujuguang/sentry,BuildingLink/sentry,wujuguang/sentry,imankulov/sentry,ewdurbin/sentry,fotinakis/sentry,argonemyth/sentry,hongliang5623/sentry,ifduyue/sentry,llonchj/sentry,drcapulet/sentry,1tush/sentry,ewdurbin/sentry,songyi199111/sentry,gg7/sentry,fotinakis/sentry,songyi199111/sentry,Kryz/sentry,boneyao/sentry,jean/sentry,1tush/sentry,jokey2k/sentry,vperron/sentry,fuziontech/sentry,zenefits/sentry,TedaLIEz/sentry,gencer/sentry,kevinastone/sentry,songyi199111/sentry,boneyao/sentry,JackDanger/sentry,mitsuhiko/sentry,BuildingLink/sentry,alexm92/sentry,ngonzalvez/sentry,mvaled/sentry,pauloschilling/sentry,mitsuhiko/sentry,BayanGroup/sentry,imankulov/sentry,ifduyue/sentry,camilonova/sentry,BuildingLink/sentry,gencer/sentry | from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
+ from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
+ 'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
| Add avatarUrl to team member serializers | ## Code Before:
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
}
return d
## Instruction:
Add avatarUrl to team member serializers
## Code After:
from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d
| from __future__ import absolute_import
from sentry.api.serializers import Serializer, register
from sentry.models import OrganizationMember
+ from sentry.utils.avatar import get_gravatar_url
@register(OrganizationMember)
class OrganizationMemberSerializer(Serializer):
def serialize(self, obj, attrs, user):
d = {
'id': str(obj.id),
'email': obj.email or obj.user.email,
'access': obj.get_type_display(),
'pending': obj.is_pending,
'dateCreated': obj.date_added,
+ 'avatarUrl': get_gravatar_url(obj.email, size=32),
}
return d |
501ede985c034f4883ac93a38f8486af6fddf766 | src/nodeconductor_saltstack/saltstack/perms.py | src/nodeconductor_saltstack/saltstack/perms.py | from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic
from nodeconductor.structure.models import CustomerRole, ProjectGroupRole, ProjectRole
PERMISSION_LOGICS = (
('saltstack.SaltStackService', FilteredCollaboratorsPermissionLogic(
collaborators_query='customer__roles__permission_group__user',
collaborators_filter={
'customer__roles__role_type': CustomerRole.OWNER,
},
any_permission=True,
)),
('saltstack.SaltStackServiceProjectLink', FilteredCollaboratorsPermissionLogic(
collaborators_query=[
'service__customer__roles__permission_group__user',
'project__project_groups__roles__permission_group__user',
],
collaborators_filter=[
{'service__customer__roles__role_type': CustomerRole.OWNER},
{'project__project_groups__roles__role_type': ProjectGroupRole.MANAGER},
],
any_permission=True,
)),
)
property_permission_logic = FilteredCollaboratorsPermissionLogic(
collaborators_query=[
'tenant__service_project_link__project__roles__permission_group__user',
'tenant__service_project_link__project__customer__roles__permission_group__user',
],
collaborators_filter=[
{'tenant__service_project_link__project__roles__role_type': ProjectRole.ADMINISTRATOR},
{'tenant__service_project_link__project__customer__roles__role_type': CustomerRole.OWNER},
],
any_permission=True,
)
| from nodeconductor.structure import perms as structure_perms
PERMISSION_LOGICS = (
('saltstack.SaltStackService', structure_perms.service_permission_logic),
('saltstack.SaltStackServiceProjectLink', structure_perms.service_project_link_permission_logic),
)
property_permission_logic = structure_perms.property_permission_logic('tenant')
| Make permissions declaration DRY (NC-1282) | Make permissions declaration DRY (NC-1282)
| Python | mit | opennode/nodeconductor-saltstack | + from nodeconductor.structure import perms as structure_perms
- from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic
- from nodeconductor.structure.models import CustomerRole, ProjectGroupRole, ProjectRole
PERMISSION_LOGICS = (
+ ('saltstack.SaltStackService', structure_perms.service_permission_logic),
+ ('saltstack.SaltStackServiceProjectLink', structure_perms.service_project_link_permission_logic),
- ('saltstack.SaltStackService', FilteredCollaboratorsPermissionLogic(
- collaborators_query='customer__roles__permission_group__user',
- collaborators_filter={
- 'customer__roles__role_type': CustomerRole.OWNER,
- },
- any_permission=True,
- )),
- ('saltstack.SaltStackServiceProjectLink', FilteredCollaboratorsPermissionLogic(
- collaborators_query=[
- 'service__customer__roles__permission_group__user',
- 'project__project_groups__roles__permission_group__user',
- ],
- collaborators_filter=[
- {'service__customer__roles__role_type': CustomerRole.OWNER},
- {'project__project_groups__roles__role_type': ProjectGroupRole.MANAGER},
- ],
- any_permission=True,
- )),
)
+ property_permission_logic = structure_perms.property_permission_logic('tenant')
- property_permission_logic = FilteredCollaboratorsPermissionLogic(
- collaborators_query=[
- 'tenant__service_project_link__project__roles__permission_group__user',
- 'tenant__service_project_link__project__customer__roles__permission_group__user',
- ],
- collaborators_filter=[
- {'tenant__service_project_link__project__roles__role_type': ProjectRole.ADMINISTRATOR},
- {'tenant__service_project_link__project__customer__roles__role_type': CustomerRole.OWNER},
- ],
- any_permission=True,
- )
| Make permissions declaration DRY (NC-1282) | ## Code Before:
from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic
from nodeconductor.structure.models import CustomerRole, ProjectGroupRole, ProjectRole
PERMISSION_LOGICS = (
('saltstack.SaltStackService', FilteredCollaboratorsPermissionLogic(
collaborators_query='customer__roles__permission_group__user',
collaborators_filter={
'customer__roles__role_type': CustomerRole.OWNER,
},
any_permission=True,
)),
('saltstack.SaltStackServiceProjectLink', FilteredCollaboratorsPermissionLogic(
collaborators_query=[
'service__customer__roles__permission_group__user',
'project__project_groups__roles__permission_group__user',
],
collaborators_filter=[
{'service__customer__roles__role_type': CustomerRole.OWNER},
{'project__project_groups__roles__role_type': ProjectGroupRole.MANAGER},
],
any_permission=True,
)),
)
property_permission_logic = FilteredCollaboratorsPermissionLogic(
collaborators_query=[
'tenant__service_project_link__project__roles__permission_group__user',
'tenant__service_project_link__project__customer__roles__permission_group__user',
],
collaborators_filter=[
{'tenant__service_project_link__project__roles__role_type': ProjectRole.ADMINISTRATOR},
{'tenant__service_project_link__project__customer__roles__role_type': CustomerRole.OWNER},
],
any_permission=True,
)
## Instruction:
Make permissions declaration DRY (NC-1282)
## Code After:
from nodeconductor.structure import perms as structure_perms
PERMISSION_LOGICS = (
('saltstack.SaltStackService', structure_perms.service_permission_logic),
('saltstack.SaltStackServiceProjectLink', structure_perms.service_project_link_permission_logic),
)
property_permission_logic = structure_perms.property_permission_logic('tenant')
| + from nodeconductor.structure import perms as structure_perms
- from nodeconductor.core.permissions import FilteredCollaboratorsPermissionLogic
- from nodeconductor.structure.models import CustomerRole, ProjectGroupRole, ProjectRole
PERMISSION_LOGICS = (
+ ('saltstack.SaltStackService', structure_perms.service_permission_logic),
+ ('saltstack.SaltStackServiceProjectLink', structure_perms.service_project_link_permission_logic),
- ('saltstack.SaltStackService', FilteredCollaboratorsPermissionLogic(
- collaborators_query='customer__roles__permission_group__user',
- collaborators_filter={
- 'customer__roles__role_type': CustomerRole.OWNER,
- },
- any_permission=True,
- )),
- ('saltstack.SaltStackServiceProjectLink', FilteredCollaboratorsPermissionLogic(
- collaborators_query=[
- 'service__customer__roles__permission_group__user',
- 'project__project_groups__roles__permission_group__user',
- ],
- collaborators_filter=[
- {'service__customer__roles__role_type': CustomerRole.OWNER},
- {'project__project_groups__roles__role_type': ProjectGroupRole.MANAGER},
- ],
- any_permission=True,
- )),
)
+ property_permission_logic = structure_perms.property_permission_logic('tenant')
- property_permission_logic = FilteredCollaboratorsPermissionLogic(
- collaborators_query=[
- 'tenant__service_project_link__project__roles__permission_group__user',
- 'tenant__service_project_link__project__customer__roles__permission_group__user',
- ],
- collaborators_filter=[
- {'tenant__service_project_link__project__roles__role_type': ProjectRole.ADMINISTRATOR},
- {'tenant__service_project_link__project__customer__roles__role_type': CustomerRole.OWNER},
- ],
- any_permission=True,
- ) |
7fc4a8d2a12100bae9b2ddb5c0b08fbfd94091f2 | dataproperty/_container.py | dataproperty/_container.py |
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
|
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
def __eq__(self, other):
return all([
self.min_value == other.min_value,
self.max_value == other.max_value,
])
def __ne__(self, other):
return any([
self.min_value != other.min_value,
self.max_value != other.max_value,
])
def __contains__(self, x):
return self.min_value <= x <= self.max_value
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
| Add __eq__, __ne__, __contains__ methods | Add __eq__, __ne__, __contains__ methods
| Python | mit | thombashi/DataProperty |
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
+
+ def __eq__(self, other):
+ return all([
+ self.min_value == other.min_value,
+ self.max_value == other.max_value,
+ ])
+
+ def __ne__(self, other):
+ return any([
+ self.min_value != other.min_value,
+ self.max_value != other.max_value,
+ ])
+
+ def __contains__(self, x):
+ return self.min_value <= x <= self.max_value
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
| Add __eq__, __ne__, __contains__ methods | ## Code Before:
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
## Instruction:
Add __eq__, __ne__, __contains__ methods
## Code After:
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
def __eq__(self, other):
return all([
self.min_value == other.min_value,
self.max_value == other.max_value,
])
def __ne__(self, other):
return any([
self.min_value != other.min_value,
self.max_value != other.max_value,
])
def __contains__(self, x):
return self.min_value <= x <= self.max_value
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value)
|
'''
@author: Tsuyoshi Hombashi
'''
class MinMaxContainer(object):
@property
def min_value(self):
return self.__min_value
@property
def max_value(self):
return self.__max_value
def __init__(self, value_list=[]):
self.__min_value = None
self.__max_value = None
for value in value_list:
self.update(value)
+
+ def __eq__(self, other):
+ return all([
+ self.min_value == other.min_value,
+ self.max_value == other.max_value,
+ ])
+
+ def __ne__(self, other):
+ return any([
+ self.min_value != other.min_value,
+ self.max_value != other.max_value,
+ ])
+
+ def __contains__(self, x):
+ return self.min_value <= x <= self.max_value
def diff(self):
try:
return self.max_value - self.min_value
except TypeError:
return float("nan")
def mean(self):
try:
return (self.max_value + self.min_value) * 0.5
except TypeError:
return float("nan")
def update(self, value):
if value is None:
return
if self.__min_value is None:
self.__min_value = value
else:
self.__min_value = min(self.__min_value, value)
if self.__max_value is None:
self.__max_value = value
else:
self.__max_value = max(self.__max_value, value) |
190463fb4538654a62b440fc92041383f8b15957 | helusers/migrations/0001_add_ad_groups.py | helusers/migrations/0001_add_ad_groups.py | from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ADGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('display_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ADGroupMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='helusers.ADGroup')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ad_groups', to='auth.Group')),
],
options={
'verbose_name': 'AD Group Mapping',
},
),
migrations.AlterUniqueTogether(
name='adgroupmapping',
unique_together=set([('group', 'ad_group')]),
),
]
| from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ADGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('display_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ADGroupMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='helusers.ADGroup')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ad_groups', to='auth.Group')),
],
options={
'verbose_name': 'AD group mapping', 'verbose_name_plural': 'AD group mappings'
},
),
migrations.AlterUniqueTogether(
name='adgroupmapping',
unique_together=set([('group', 'ad_group')]),
),
]
| Fix migration for model verbose name changes | Fix migration for model verbose name changes
| Python | bsd-2-clause | City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers | from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ADGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('display_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ADGroupMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='helusers.ADGroup')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ad_groups', to='auth.Group')),
],
options={
- 'verbose_name': 'AD Group Mapping',
+ 'verbose_name': 'AD group mapping', 'verbose_name_plural': 'AD group mappings'
},
),
migrations.AlterUniqueTogether(
name='adgroupmapping',
unique_together=set([('group', 'ad_group')]),
),
]
| Fix migration for model verbose name changes | ## Code Before:
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ADGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('display_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ADGroupMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='helusers.ADGroup')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ad_groups', to='auth.Group')),
],
options={
'verbose_name': 'AD Group Mapping',
},
),
migrations.AlterUniqueTogether(
name='adgroupmapping',
unique_together=set([('group', 'ad_group')]),
),
]
## Instruction:
Fix migration for model verbose name changes
## Code After:
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ADGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('display_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ADGroupMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='helusers.ADGroup')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ad_groups', to='auth.Group')),
],
options={
'verbose_name': 'AD group mapping', 'verbose_name_plural': 'AD group mappings'
},
),
migrations.AlterUniqueTogether(
name='adgroupmapping',
unique_together=set([('group', 'ad_group')]),
),
]
| from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ADGroup',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(db_index=True, max_length=200)),
('display_name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='ADGroupMapping',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('ad_group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='groups', to='helusers.ADGroup')),
('group', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='ad_groups', to='auth.Group')),
],
options={
- 'verbose_name': 'AD Group Mapping',
+ 'verbose_name': 'AD group mapping', 'verbose_name_plural': 'AD group mappings'
},
),
migrations.AlterUniqueTogether(
name='adgroupmapping',
unique_together=set([('group', 'ad_group')]),
),
] |
9b83a9dbfe1cc3dc4e8da3df71b6c414e304f53f | testing/runtests.py | testing/runtests.py |
import os, sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import call_command
if __name__ == "__main__":
args = sys.argv[1:]
call_command("test", *args, verbosity=2)
|
import os, sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
if __name__ == "__main__":
from django.core.management import execute_from_command_line
args = sys.argv
args.insert(1, "test")
args.insert(2, "pg_uuid_fields")
execute_from_command_line(args)
| Fix tests to run with django 1.7 | Fix tests to run with django 1.7
| Python | bsd-3-clause | niwinz/djorm-ext-pguuid |
import os, sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
+ if __name__ == "__main__":
- from django.core.management import call_command
+ from django.core.management import execute_from_command_line
- if __name__ == "__main__":
- args = sys.argv[1:]
+ args = sys.argv
- call_command("test", *args, verbosity=2)
+ args.insert(1, "test")
+ args.insert(2, "pg_uuid_fields")
+ execute_from_command_line(args)
+ | Fix tests to run with django 1.7 | ## Code Before:
import os, sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.management import call_command
if __name__ == "__main__":
args = sys.argv[1:]
call_command("test", *args, verbosity=2)
## Instruction:
Fix tests to run with django 1.7
## Code After:
import os, sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
if __name__ == "__main__":
from django.core.management import execute_from_command_line
args = sys.argv
args.insert(1, "test")
args.insert(2, "pg_uuid_fields")
execute_from_command_line(args)
|
import os, sys
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
+ if __name__ == "__main__":
- from django.core.management import call_command
? ^^^
+ from django.core.management import execute_from_command_line
? ++++ +++ ^^^^^^^^ +++++
- if __name__ == "__main__":
- args = sys.argv[1:]
? ----
+ args = sys.argv
- call_command("test", *args, verbosity=2)
+ args.insert(1, "test")
+ args.insert(2, "pg_uuid_fields")
+
+ execute_from_command_line(args) |
286cba2b3e7cf323835acd07f1e3bb510d74bcb2 | biopsy/tests.py | biopsy/tests.py | from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
| from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda",
status = "status",
exam = "exame"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
self.assertEquals("status",biopsy.status)
self.assertEquals("exame",biopsy.exam)
| Add status and exam in test Biopsy | Add status and exam in test Biopsy
| Python | mit | msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub,msfernandes/anato-hub | from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
- footer= "legenda"
+ footer= "legenda",
+ status = "status",
+ exam = "exame"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
+ self.assertEquals("status",biopsy.status)
+ self.assertEquals("exame",biopsy.exam)
| Add status and exam in test Biopsy | ## Code Before:
from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
## Instruction:
Add status and exam in test Biopsy
## Code After:
from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
footer= "legenda",
status = "status",
exam = "exame"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
self.assertEquals("status",biopsy.status)
self.assertEquals("exame",biopsy.exam)
| from django.test import TestCase
from django.db import models
from biopsy.models import Biopsy
class BiopsyTest(TestCase):
def biopy_test(self):
biopsy = Biopsy(
clinical_information= "clinica",
macroscopic= "macroscopia",
microscopic= "microscopia",
conclusion= "conclusao",
notes= "nota",
- footer= "legenda"
+ footer= "legenda",
? +
+ status = "status",
+ exam = "exame"
)
biopsy.save()
self.assertEquals("clinica",biopsy.clinical_information)
self.assertEquals("macroscopia",biopsy.macroscopic)
self.assertEquals("microscopia",biopsy.microscopic)
self.assertEquals("conclusao",biopsy.conclusion)
self.assertEquals("nota",biopsy.notes)
self.assertEquals("legenda",biopsy.footer)
+ self.assertEquals("status",biopsy.status)
+ self.assertEquals("exame",biopsy.exam) |
ffd8bb1e85fe7ed80d85062e4d5932f28065b84c | auditlog/apps.py | auditlog/apps.py | from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
| from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
| Apply default_auto_field to app config. | Apply default_auto_field to app config.
| Python | mit | jjkester/django-auditlog | from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
+ default_auto_field = 'django.db.models.AutoField'
| Apply default_auto_field to app config. | ## Code Before:
from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
## Instruction:
Apply default_auto_field to app config.
## Code After:
from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
default_auto_field = 'django.db.models.AutoField'
| from django.apps import AppConfig
class AuditlogConfig(AppConfig):
name = "auditlog"
verbose_name = "Audit log"
+ default_auto_field = 'django.db.models.AutoField' |
632a655f8f1f5867069f1c4d381417fa567b79a6 | controlled_vocabularies/urls.py | controlled_vocabularies/urls.py | from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
| from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
| Replace re_path with path wherever possible | Replace re_path with path wherever possible
| Python | bsd-3-clause | unt-libraries/django-controlled-vocabularies,unt-libraries/django-controlled-vocabularies | - from django.urls import re_path
+ from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
- re_path(r'^$', vocabulary_list, name="vocabulary_list"),
+ path('', vocabulary_list, name="vocabulary_list"),
- re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
+ path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
- re_path(r'^about/', about, name="about"),
+ path('about/', about, name="about"),
- re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
+ path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
- re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
- re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
+ path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
- name="vocabulary_file"),
+ name="vocabulary_file"),
+ path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
| Replace re_path with path wherever possible | ## Code Before:
from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
## Instruction:
Replace re_path with path wherever possible
## Code After:
from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
| - from django.urls import re_path
+ from django.urls import path, re_path
? ++++++
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
- re_path(r'^$', vocabulary_list, name="vocabulary_list"),
? --- - --
+ path('', vocabulary_list, name="vocabulary_list"),
- re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
? --- - - --
+ path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
- re_path(r'^about/', about, name="about"),
? --- - -
+ path('about/', about, name="about"),
- re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
? --- - - --
+ path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
- re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
- re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
? --- - ---- ------- --- ---- -
+ path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
? +++++ +++++
- name="vocabulary_file"),
? ---
+ name="vocabulary_file"),
+ path('<slug:vocabulary_name>/', term_list, name="term_list"),
] |
d7bec88009b73a57124dbfacc91446927328abf9 | src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py | src/command_modules/azure-cli-network/azure/cli/command_modules/network/custom.py | from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None, name=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
| from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=security_rule_name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
| Fix broken NSG create command (duplicate --name parameter) | Fix broken NSG create command (duplicate --name parameter)
| Python | mit | QingChenmsft/azure-cli,samedder/azure-cli,BurtBiel/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,yugangw-msft/azure-cli,BurtBiel/azure-cli | from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
- description=None, priority=None, name=None):
+ description=None, priority=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
- name=name)
+ name=security_rule_name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
| Fix broken NSG create command (duplicate --name parameter) | ## Code Before:
from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None, name=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
## Instruction:
Fix broken NSG create command (duplicate --name parameter)
## Code After:
from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
description=None, priority=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
name=security_rule_name)
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__
| from azure.mgmt.network.models import Subnet, SecurityRule
from ._factory import _network_client_factory
def create_update_subnet(resource_group_name, subnet_name, virtual_network_name, address_prefix):
'''Create or update a virtual network (VNet) subnet'''
subnet_settings = Subnet(name=subnet_name, address_prefix=address_prefix)
ncf = _network_client_factory()
return ncf.subnets.create_or_update(
resource_group_name, virtual_network_name, subnet_name, subnet_settings)
def create_update_nsg_rule(resource_group_name, network_security_group_name, security_rule_name,
protocol, source_address_prefix, destination_address_prefix,
access, direction, source_port_range, destination_port_range,
- description=None, priority=None, name=None):
? -----------
+ description=None, priority=None):
settings = SecurityRule(protocol=protocol, source_address_prefix=source_address_prefix,
destination_address_prefix=destination_address_prefix, access=access,
direction=direction,
description=description, source_port_range=source_port_range,
destination_port_range=destination_port_range, priority=priority,
- name=name)
+ name=security_rule_name)
? ++++++++++++++
ncf = _network_client_factory()
return ncf.security_rules.create_or_update(
resource_group_name, network_security_group_name, security_rule_name, settings)
create_update_nsg_rule.__doc__ = SecurityRule.__doc__ |
72b660a9d71e1b29aa10a704e918c0c49dde8d86 | pygotham/admin/events.py | pygotham/admin/events.py | """Admin for event-related models."""
import wtforms
from pygotham.admin.utils import model_view
from pygotham.events import models
__all__ = ('EventModelView',)
CATEGORY = 'Events'
EventModelView = model_view(
models.Event,
'Events',
CATEGORY,
column_list=('name', 'slug', 'begins', 'ends', 'active'),
form_excluded_columns=(
'about_pages',
'announcements',
'calls_to_action',
'days',
'sponsor_levels',
'talks',
),
form_overrides={
'activity_begins': wtforms.DateTimeField,
'activity_ends': wtforms.DateTimeField,
'proposals_begin': wtforms.DateTimeField,
'proposals_end': wtforms.DateTimeField,
'registration_begins': wtforms.DateTimeField,
'registration_ends': wtforms.DateTimeField,
'talk_list_begins': wtforms.DateTimeField,
},
)
VolunteerModelView = model_view(
models.Volunteer,
'Volunteers',
CATEGORY,
column_filters=('event.slug', 'event.name'),
column_list=('event', 'user'),
)
| """Admin for event-related models."""
import wtforms
from pygotham.admin.utils import model_view
from pygotham.events import models
__all__ = ('EventModelView',)
CATEGORY = 'Events'
EventModelView = model_view(
models.Event,
'Events',
CATEGORY,
column_list=('name', 'slug', 'begins', 'ends', 'active'),
form_excluded_columns=(
'about_pages',
'announcements',
'calls_to_action',
'days',
'sponsor_levels',
'talks',
'volunteers',
),
form_overrides={
'activity_begins': wtforms.DateTimeField,
'activity_ends': wtforms.DateTimeField,
'proposals_begin': wtforms.DateTimeField,
'proposals_end': wtforms.DateTimeField,
'registration_begins': wtforms.DateTimeField,
'registration_ends': wtforms.DateTimeField,
'talk_list_begins': wtforms.DateTimeField,
},
)
VolunteerModelView = model_view(
models.Volunteer,
'Volunteers',
CATEGORY,
column_filters=('event.slug', 'event.name'),
column_list=('event', 'user'),
)
| Exclude volunteers from the event admin | Exclude volunteers from the event admin
Showing many-to-many relationships in the admin can cause a page to take
a while to load. Plus the event admin isn't really the place to manage
volunteers.
Closes #198
| Python | bsd-3-clause | pathunstrom/pygotham,pathunstrom/pygotham,pathunstrom/pygotham,djds23/pygotham-1,PyGotham/pygotham,pathunstrom/pygotham,PyGotham/pygotham,djds23/pygotham-1,djds23/pygotham-1,djds23/pygotham-1,djds23/pygotham-1,pathunstrom/pygotham,PyGotham/pygotham,PyGotham/pygotham,PyGotham/pygotham | """Admin for event-related models."""
import wtforms
from pygotham.admin.utils import model_view
from pygotham.events import models
__all__ = ('EventModelView',)
CATEGORY = 'Events'
EventModelView = model_view(
models.Event,
'Events',
CATEGORY,
column_list=('name', 'slug', 'begins', 'ends', 'active'),
form_excluded_columns=(
'about_pages',
'announcements',
'calls_to_action',
'days',
'sponsor_levels',
'talks',
+ 'volunteers',
),
form_overrides={
'activity_begins': wtforms.DateTimeField,
'activity_ends': wtforms.DateTimeField,
'proposals_begin': wtforms.DateTimeField,
'proposals_end': wtforms.DateTimeField,
'registration_begins': wtforms.DateTimeField,
'registration_ends': wtforms.DateTimeField,
'talk_list_begins': wtforms.DateTimeField,
},
)
VolunteerModelView = model_view(
models.Volunteer,
'Volunteers',
CATEGORY,
column_filters=('event.slug', 'event.name'),
column_list=('event', 'user'),
)
| Exclude volunteers from the event admin | ## Code Before:
"""Admin for event-related models."""
import wtforms
from pygotham.admin.utils import model_view
from pygotham.events import models
__all__ = ('EventModelView',)
CATEGORY = 'Events'
EventModelView = model_view(
models.Event,
'Events',
CATEGORY,
column_list=('name', 'slug', 'begins', 'ends', 'active'),
form_excluded_columns=(
'about_pages',
'announcements',
'calls_to_action',
'days',
'sponsor_levels',
'talks',
),
form_overrides={
'activity_begins': wtforms.DateTimeField,
'activity_ends': wtforms.DateTimeField,
'proposals_begin': wtforms.DateTimeField,
'proposals_end': wtforms.DateTimeField,
'registration_begins': wtforms.DateTimeField,
'registration_ends': wtforms.DateTimeField,
'talk_list_begins': wtforms.DateTimeField,
},
)
VolunteerModelView = model_view(
models.Volunteer,
'Volunteers',
CATEGORY,
column_filters=('event.slug', 'event.name'),
column_list=('event', 'user'),
)
## Instruction:
Exclude volunteers from the event admin
## Code After:
"""Admin for event-related models."""
import wtforms
from pygotham.admin.utils import model_view
from pygotham.events import models
__all__ = ('EventModelView',)
CATEGORY = 'Events'
EventModelView = model_view(
models.Event,
'Events',
CATEGORY,
column_list=('name', 'slug', 'begins', 'ends', 'active'),
form_excluded_columns=(
'about_pages',
'announcements',
'calls_to_action',
'days',
'sponsor_levels',
'talks',
'volunteers',
),
form_overrides={
'activity_begins': wtforms.DateTimeField,
'activity_ends': wtforms.DateTimeField,
'proposals_begin': wtforms.DateTimeField,
'proposals_end': wtforms.DateTimeField,
'registration_begins': wtforms.DateTimeField,
'registration_ends': wtforms.DateTimeField,
'talk_list_begins': wtforms.DateTimeField,
},
)
VolunteerModelView = model_view(
models.Volunteer,
'Volunteers',
CATEGORY,
column_filters=('event.slug', 'event.name'),
column_list=('event', 'user'),
)
| """Admin for event-related models."""
import wtforms
from pygotham.admin.utils import model_view
from pygotham.events import models
__all__ = ('EventModelView',)
CATEGORY = 'Events'
EventModelView = model_view(
models.Event,
'Events',
CATEGORY,
column_list=('name', 'slug', 'begins', 'ends', 'active'),
form_excluded_columns=(
'about_pages',
'announcements',
'calls_to_action',
'days',
'sponsor_levels',
'talks',
+ 'volunteers',
),
form_overrides={
'activity_begins': wtforms.DateTimeField,
'activity_ends': wtforms.DateTimeField,
'proposals_begin': wtforms.DateTimeField,
'proposals_end': wtforms.DateTimeField,
'registration_begins': wtforms.DateTimeField,
'registration_ends': wtforms.DateTimeField,
'talk_list_begins': wtforms.DateTimeField,
},
)
VolunteerModelView = model_view(
models.Volunteer,
'Volunteers',
CATEGORY,
column_filters=('event.slug', 'event.name'),
column_list=('event', 'user'),
) |
f25e0fe435f334e19fc84a9c9458a1bea4a051f9 | money/parser/__init__.py | money/parser/__init__.py | import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected | import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0, reverse_order=False):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
if reverse_order:
rows.reverse()
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected
| Allow to reverse the order of the CSV for a proper reading | Allow to reverse the order of the CSV for a proper reading
| Python | bsd-3-clause | shakaran/casterly,shakaran/casterly | import csv
from money.models import Movement
- def parse_csv(raw_csv, parser, header_lines=0):
+ def parse_csv(raw_csv, parser, header_lines=0, reverse_order=False):
- reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
+ reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
- rows = []
+ rows = []
- for row in reader:
+ for row in reader:
- if reader.line_num > header_lines and row:
+ if reader.line_num > header_lines and row:
- rows.append(parser.parse_row(row))
+ rows.append(parser.parse_row(row))
+ if reverse_order:
+ rows.reverse()
- return rows
+ return rows
def import_movements(data, bank_account):
- rejected = []
+ rejected = []
- accepted = 0
+ accepted = 0
- for row in data:
+ for row in data:
- obj, created = Movement.objects.get_or_create(
+ obj, created = Movement.objects.get_or_create(
- bank_account=bank_account,
+ bank_account=bank_account,
- description=row["description"],
+ description=row["description"],
- amount=row["amount"],
- date=row["date"],
- )
- if created:
- accepted += 1
- else:
- rejected.append(row)
+ amount=row["amount"],
+ date=row["date"],
+ )
+ if created:
+ accepted += 1
+ else:
+ rejected.append(row)
- return accepted, rejected
+ return accepted, rejected
+ | Allow to reverse the order of the CSV for a proper reading | ## Code Before:
import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected
## Instruction:
Allow to reverse the order of the CSV for a proper reading
## Code After:
import csv
from money.models import Movement
def parse_csv(raw_csv, parser, header_lines=0, reverse_order=False):
reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
rows = []
for row in reader:
if reader.line_num > header_lines and row:
rows.append(parser.parse_row(row))
if reverse_order:
rows.reverse()
return rows
def import_movements(data, bank_account):
rejected = []
accepted = 0
for row in data:
obj, created = Movement.objects.get_or_create(
bank_account=bank_account,
description=row["description"],
amount=row["amount"],
date=row["date"],
)
if created:
accepted += 1
else:
rejected.append(row)
return accepted, rejected
| import csv
from money.models import Movement
- def parse_csv(raw_csv, parser, header_lines=0):
+ def parse_csv(raw_csv, parser, header_lines=0, reverse_order=False):
? +++++++++++++++++++++
- reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
? ^
+ reader = csv.reader(raw_csv, delimiter=',', quotechar='"')
? ^^^^
- rows = []
? ^
+ rows = []
? ^^^^
- for row in reader:
? ^
+ for row in reader:
? ^^^^
- if reader.line_num > header_lines and row:
? ^^
+ if reader.line_num > header_lines and row:
? ^^^^^^^^
- rows.append(parser.parse_row(row))
? ^^^
+ rows.append(parser.parse_row(row))
? ^^^^^^^^^^^^
+ if reverse_order:
+ rows.reverse()
- return rows
? ^
+ return rows
? ^^^^
def import_movements(data, bank_account):
- rejected = []
? ^
+ rejected = []
? ^^^^
- accepted = 0
? ^
+ accepted = 0
? ^^^^
- for row in data:
? ^
+ for row in data:
? ^^^^
- obj, created = Movement.objects.get_or_create(
? ^^
+ obj, created = Movement.objects.get_or_create(
? ^^^^^^^^
- bank_account=bank_account,
? ^^^
+ bank_account=bank_account,
? ^^^^^^^^^^^^
- description=row["description"],
? ^^^
+ description=row["description"],
? ^^^^^^^^^^^^
- amount=row["amount"],
- date=row["date"],
- )
- if created:
- accepted += 1
- else:
- rejected.append(row)
+ amount=row["amount"],
+ date=row["date"],
+ )
+ if created:
+ accepted += 1
+ else:
+ rejected.append(row)
- return accepted, rejected
? ^
+ return accepted, rejected
? ^^^^
|
6fabe58bda70c9f6f05a226585259d44b178d6de | tests/from_json_test.py | tests/from_json_test.py | from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
| from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
| Use set to compare elements between two lists (making them immutable) | Use set to compare elements between two lists (making them immutable)
| Python | mit | maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex | from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
- assert tmp_df.column_names == df.column_names
+ assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
| Use set to compare elements between two lists (making them immutable) | ## Code Before:
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert tmp_df.column_names == df.column_names
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
## Instruction:
Use set to compare elements between two lists (making them immutable)
## Code After:
from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
assert set(tmp_df.column_names) == set(df.column_names)
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist()
| from common import *
import tempfile
def test_from_json(ds_local):
df = ds_local
# Create temporary json files
pandas_df = df.to_pandas_df(df)
tmp = tempfile.mktemp('.json')
with open(tmp, 'w') as f:
f.write(pandas_df.to_json())
tmp_df = vaex.from_json(tmp, copy_index=False)
- assert tmp_df.column_names == df.column_names
+ assert set(tmp_df.column_names) == set(df.column_names)
? ++++ + ++++ +
assert len(tmp_df) == len(df)
assert tmp_df.x.tolist() == df.x.tolist()
assert tmp_df.bool.tolist() == df.bool.tolist() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.