commit stringlengths 40 40 | old_file stringlengths 4 106 | new_file stringlengths 4 106 | old_contents stringlengths 10 2.94k | new_contents stringlengths 21 2.95k | subject stringlengths 16 444 | message stringlengths 17 2.63k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 7 43k | ndiff stringlengths 52 3.31k | instruction stringlengths 16 444 | content stringlengths 133 4.32k | diff stringlengths 49 3.61k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e84a06ea851a81648ba6ee54c88a61c049e913f2 | gorilla/__init__.py | gorilla/__init__.py |
from gorilla.decorators import apply, name, patch
from gorilla.utils import get_original_attribute
__version__ = '0.1.0'
__all__ = [
'decorators',
'extension',
'settings',
'utils'
]
|
from gorilla.decorators import apply, name, patch
__version__ = '0.1.0'
__all__ = [
'decorators',
'extension',
'settings',
'utils'
]
| Remove the `get_original_attribute` shortcut from the root module. | Remove the `get_original_attribute` shortcut from the root module.
| Python | mit | christophercrouzet/gorilla |
from gorilla.decorators import apply, name, patch
- from gorilla.utils import get_original_attribute
__version__ = '0.1.0'
__all__ = [
'decorators',
'extension',
'settings',
'utils'
]
| Remove the `get_original_attribute` shortcut from the root module. | ## Code Before:
from gorilla.decorators import apply, name, patch
from gorilla.utils import get_original_attribute
__version__ = '0.1.0'
__all__ = [
'decorators',
'extension',
'settings',
'utils'
]
## Instruction:
Remove the `get_original_attribute` shortcut from the root module.
## Code After:
from gorilla.decorators import apply, name, patch
__version__ = '0.1.0'
__all__ = [
'decorators',
'extension',
'settings',
'utils'
]
|
from gorilla.decorators import apply, name, patch
- from gorilla.utils import get_original_attribute
__version__ = '0.1.0'
__all__ = [
'decorators',
'extension',
'settings',
'utils'
] |
87983a254ba1d1f036a555aab73fcc07c7f5882b | doc/pyplots/plot_density.py | doc/pyplots/plot_density.py |
import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset
from mpl_toolkits.basemap import Basemap
import typhon
nc = Dataset('_data/test_data.nc')
lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
vmr = nc.variables['qv'][:]
fig, ax = plt.subplots(figsize=(10, 8))
m = Basemap(projection='cyl', resolution='i',
llcrnrlat=47, llcrnrlon=3,
urcrnrlat=56, urcrnrlon=16)
m.drawcoastlines()
m.drawcountries()
m.drawmeridians(np.arange(0, 20, 2), labels=[0, 0, 0, 1])
m.drawparallels(np.arange(45, 60, 2), labels=[1, 0, 0, 0])
m.pcolormesh(lon, lat, vmr, latlon=True, cmap='density', rasterized=True)
cb = m.colorbar(label='Water vapor [VMR]')
fig.tight_layout()
plt.show()
| """Plot to demonstrate the density colormap. """
import matplotlib.pyplot as plt
import netCDF4
import numpy as np
import cartopy.crs as ccrs
from cartopy.mpl.gridliner import (LONGITUDE_FORMATTER, LATITUDE_FORMATTER)
from typhon.plots.maps import get_cfeatures_at_scale
# Read air temperature data.
with netCDF4.Dataset('_data/test_data.nc') as nc:
lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
h2o = nc.variables['qv'][:]
# Create plot with PlateCarree projection.
fig, ax = plt.subplots(figsize=(10, 8))
ax = plt.axes(projection=ccrs.PlateCarree())
ax.set_extent([3, 16, 47, 56])
# Add map "features".
features = get_cfeatures_at_scale(scale='50m')
ax.add_feature(features.BORDERS)
ax.add_feature(features.COASTLINE)
# Plot the actual data.
sm = ax.pcolormesh(lon, lat, h2o,
cmap='density',
rasterized=True,
transform=ccrs.PlateCarree(),
)
fig.colorbar(sm, label='Water vapor [VMR]', fraction=0.0328, pad=0.02)
# Add coordinate system without drawing gridlines.
gl = ax.gridlines(draw_labels=True, color='none')
gl.xformatter, gl.yformatter = LONGITUDE_FORMATTER, LATITUDE_FORMATTER
gl.xlabels_top = gl.ylabels_right = False
fig.tight_layout()
plt.show()
| Migrate density example to cartopy. | Migrate density example to cartopy.
| Python | mit | atmtools/typhon,atmtools/typhon | + """Plot to demonstrate the density colormap. """
+ import matplotlib.pyplot as plt
+ import netCDF4
import numpy as np
+ import cartopy.crs as ccrs
+ from cartopy.mpl.gridliner import (LONGITUDE_FORMATTER, LATITUDE_FORMATTER)
- import matplotlib.pyplot as plt
- from netCDF4 import Dataset
- from mpl_toolkits.basemap import Basemap
- import typhon
+ from typhon.plots.maps import get_cfeatures_at_scale
- nc = Dataset('_data/test_data.nc')
+ # Read air temperature data.
+ with netCDF4.Dataset('_data/test_data.nc') as nc:
- lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
+ lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
- vmr = nc.variables['qv'][:]
+ h2o = nc.variables['qv'][:]
+ # Create plot with PlateCarree projection.
fig, ax = plt.subplots(figsize=(10, 8))
- m = Basemap(projection='cyl', resolution='i',
- llcrnrlat=47, llcrnrlon=3,
- urcrnrlat=56, urcrnrlon=16)
- m.drawcoastlines()
- m.drawcountries()
- m.drawmeridians(np.arange(0, 20, 2), labels=[0, 0, 0, 1])
- m.drawparallels(np.arange(45, 60, 2), labels=[1, 0, 0, 0])
- m.pcolormesh(lon, lat, vmr, latlon=True, cmap='density', rasterized=True)
- cb = m.colorbar(label='Water vapor [VMR]')
+ ax = plt.axes(projection=ccrs.PlateCarree())
+ ax.set_extent([3, 16, 47, 56])
+
+ # Add map "features".
+ features = get_cfeatures_at_scale(scale='50m')
+ ax.add_feature(features.BORDERS)
+ ax.add_feature(features.COASTLINE)
+
+ # Plot the actual data.
+ sm = ax.pcolormesh(lon, lat, h2o,
+ cmap='density',
+ rasterized=True,
+ transform=ccrs.PlateCarree(),
+ )
+ fig.colorbar(sm, label='Water vapor [VMR]', fraction=0.0328, pad=0.02)
+
+ # Add coordinate system without drawing gridlines.
+ gl = ax.gridlines(draw_labels=True, color='none')
+ gl.xformatter, gl.yformatter = LONGITUDE_FORMATTER, LATITUDE_FORMATTER
+ gl.xlabels_top = gl.ylabels_right = False
fig.tight_layout()
plt.show()
| Migrate density example to cartopy. | ## Code Before:
import numpy as np
import matplotlib.pyplot as plt
from netCDF4 import Dataset
from mpl_toolkits.basemap import Basemap
import typhon
nc = Dataset('_data/test_data.nc')
lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
vmr = nc.variables['qv'][:]
fig, ax = plt.subplots(figsize=(10, 8))
m = Basemap(projection='cyl', resolution='i',
llcrnrlat=47, llcrnrlon=3,
urcrnrlat=56, urcrnrlon=16)
m.drawcoastlines()
m.drawcountries()
m.drawmeridians(np.arange(0, 20, 2), labels=[0, 0, 0, 1])
m.drawparallels(np.arange(45, 60, 2), labels=[1, 0, 0, 0])
m.pcolormesh(lon, lat, vmr, latlon=True, cmap='density', rasterized=True)
cb = m.colorbar(label='Water vapor [VMR]')
fig.tight_layout()
plt.show()
## Instruction:
Migrate density example to cartopy.
## Code After:
"""Plot to demonstrate the density colormap. """
import matplotlib.pyplot as plt
import netCDF4
import numpy as np
import cartopy.crs as ccrs
from cartopy.mpl.gridliner import (LONGITUDE_FORMATTER, LATITUDE_FORMATTER)
from typhon.plots.maps import get_cfeatures_at_scale
# Read air temperature data.
with netCDF4.Dataset('_data/test_data.nc') as nc:
lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
h2o = nc.variables['qv'][:]
# Create plot with PlateCarree projection.
fig, ax = plt.subplots(figsize=(10, 8))
ax = plt.axes(projection=ccrs.PlateCarree())
ax.set_extent([3, 16, 47, 56])
# Add map "features".
features = get_cfeatures_at_scale(scale='50m')
ax.add_feature(features.BORDERS)
ax.add_feature(features.COASTLINE)
# Plot the actual data.
sm = ax.pcolormesh(lon, lat, h2o,
cmap='density',
rasterized=True,
transform=ccrs.PlateCarree(),
)
fig.colorbar(sm, label='Water vapor [VMR]', fraction=0.0328, pad=0.02)
# Add coordinate system without drawing gridlines.
gl = ax.gridlines(draw_labels=True, color='none')
gl.xformatter, gl.yformatter = LONGITUDE_FORMATTER, LATITUDE_FORMATTER
gl.xlabels_top = gl.ylabels_right = False
fig.tight_layout()
plt.show()
| + """Plot to demonstrate the density colormap. """
+ import matplotlib.pyplot as plt
+ import netCDF4
import numpy as np
+ import cartopy.crs as ccrs
+ from cartopy.mpl.gridliner import (LONGITUDE_FORMATTER, LATITUDE_FORMATTER)
- import matplotlib.pyplot as plt
- from netCDF4 import Dataset
- from mpl_toolkits.basemap import Basemap
- import typhon
+ from typhon.plots.maps import get_cfeatures_at_scale
- nc = Dataset('_data/test_data.nc')
+ # Read air temperature data.
+ with netCDF4.Dataset('_data/test_data.nc') as nc:
- lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
+ lon, lat = np.meshgrid(nc.variables['lon'][:], nc.variables['lat'][:])
? ++++
- vmr = nc.variables['qv'][:]
? ^^^
+ h2o = nc.variables['qv'][:]
? ^^^^^^^
+ # Create plot with PlateCarree projection.
fig, ax = plt.subplots(figsize=(10, 8))
- m = Basemap(projection='cyl', resolution='i',
- llcrnrlat=47, llcrnrlon=3,
- urcrnrlat=56, urcrnrlon=16)
- m.drawcoastlines()
- m.drawcountries()
- m.drawmeridians(np.arange(0, 20, 2), labels=[0, 0, 0, 1])
- m.drawparallels(np.arange(45, 60, 2), labels=[1, 0, 0, 0])
- m.pcolormesh(lon, lat, vmr, latlon=True, cmap='density', rasterized=True)
- cb = m.colorbar(label='Water vapor [VMR]')
+ ax = plt.axes(projection=ccrs.PlateCarree())
+ ax.set_extent([3, 16, 47, 56])
+
+ # Add map "features".
+ features = get_cfeatures_at_scale(scale='50m')
+ ax.add_feature(features.BORDERS)
+ ax.add_feature(features.COASTLINE)
+
+ # Plot the actual data.
+ sm = ax.pcolormesh(lon, lat, h2o,
+ cmap='density',
+ rasterized=True,
+ transform=ccrs.PlateCarree(),
+ )
+ fig.colorbar(sm, label='Water vapor [VMR]', fraction=0.0328, pad=0.02)
+
+ # Add coordinate system without drawing gridlines.
+ gl = ax.gridlines(draw_labels=True, color='none')
+ gl.xformatter, gl.yformatter = LONGITUDE_FORMATTER, LATITUDE_FORMATTER
+ gl.xlabels_top = gl.ylabels_right = False
fig.tight_layout()
plt.show() |
f5d864c2a5c9b4d2ea1ff95e59d60adf2ebd176e | recipes/sos-bash/run_test.py | recipes/sos-bash/run_test.py | import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
@unittest.skipIf(sys.platform == 'win32', 'bash does not exist on win32')
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Bash\n%get a\necho $a')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
| import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Bash\n%get a\necho $a')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
| Test does not have to fail under windows | Test does not have to fail under windows
| Python | bsd-3-clause | birdsarah/staged-recipes,synapticarbors/staged-recipes,SylvainCorlay/staged-recipes,kwilcox/staged-recipes,johanneskoester/staged-recipes,synapticarbors/staged-recipes,asmeurer/staged-recipes,SylvainCorlay/staged-recipes,scopatz/staged-recipes,patricksnape/staged-recipes,goanpeca/staged-recipes,hadim/staged-recipes,hadim/staged-recipes,ReimarBauer/staged-recipes,chrisburr/staged-recipes,conda-forge/staged-recipes,chrisburr/staged-recipes,dschreij/staged-recipes,stuertz/staged-recipes,ReimarBauer/staged-recipes,scopatz/staged-recipes,stuertz/staged-recipes,kwilcox/staged-recipes,jakirkham/staged-recipes,goanpeca/staged-recipes,jochym/staged-recipes,Juanlu001/staged-recipes,jochym/staged-recipes,ocefpaf/staged-recipes,ocefpaf/staged-recipes,Juanlu001/staged-recipes,asmeurer/staged-recipes,johanneskoester/staged-recipes,mariusvniekerk/staged-recipes,mcs07/staged-recipes,isuruf/staged-recipes,petrushy/staged-recipes,conda-forge/staged-recipes,mariusvniekerk/staged-recipes,dschreij/staged-recipes,petrushy/staged-recipes,birdsarah/staged-recipes,isuruf/staged-recipes,igortg/staged-recipes,jakirkham/staged-recipes,patricksnape/staged-recipes,igortg/staged-recipes,mcs07/staged-recipes | import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
- @unittest.skipIf(sys.platform == 'win32', 'bash does not exist on win32')
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Bash\n%get a\necho $a')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
| Test does not have to fail under windows | ## Code Before:
import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
@unittest.skipIf(sys.platform == 'win32', 'bash does not exist on win32')
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Bash\n%get a\necho $a')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
## Instruction:
Test does not have to fail under windows
## Code After:
import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Bash\n%get a\necho $a')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main()
| import unittest
import sys
from sos_notebook.test_utils import sos_kernel
from ipykernel.tests.utils import execute, wait_for_idle, assemble_output
- @unittest.skipIf(sys.platform == 'win32', 'bash does not exist on win32')
class TestSoSKernel(unittest.TestCase):
def testKernel(self):
with sos_kernel() as kc:
execute(kc=kc, code='a = 1')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stdout.strip(), '', f'Stdout is not empty, "{stdout}" received')
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
execute(kc=kc, code='%use Bash\n%get a\necho $a')
stdout, stderr = assemble_output(kc.iopub_channel)
self.assertEqual(stderr.strip(), '', f'Stderr is not empty, "{stderr}" received')
self.assertEqual(stdout.strip(), '1', f'Stdout should be 1, "{stdout}" received')
if __name__ == '__main__':
unittest.main() |
d87cb6b401e38a06c5d594e40ad813a9db0738e6 | taca/analysis/cli.py | taca/analysis/cli.py | import click
from taca.analysis import analysis as an
@click.group()
def analysis():
""" Analysis methods entry point """
pass
# analysis subcommands
@analysis.command()
@click.option('-r', '--run', type=click.Path(exists=True), default=None,
help='Demultiplex only a particular run')
def demultiplex(run):
""" Demultiplex all runs present in the data directories
"""
an.run_preprocessing(run)
@analysis.command()
@click.argument('rundir')
def transfer(rundir):
"""Transfers the run without qc"""
an.transfer_run(rundir)
| import click
from taca.analysis import analysis as an
@click.group()
def analysis():
""" Analysis methods entry point """
pass
# analysis subcommands
@analysis.command()
@click.option('-r', '--run', type=click.Path(exists=True), default=None,
help='Demultiplex only a particular run')
def demultiplex(run):
""" Demultiplex all runs present in the data directories
"""
an.run_preprocessing(run)
@analysis.command()
@click.option('-a','--analysis', is_flag=True, help='Trigger the analysis for the transferred flowcell')
@click.argument('rundir')
def transfer(rundir, analysis):
"""Transfers the run without qc"""
an.transfer_run(rundir, analysis=analysis)
| Add option for triggering or not the analysis | Add option for triggering or not the analysis
| Python | mit | senthil10/TACA,kate-v-stepanova/TACA,SciLifeLab/TACA,SciLifeLab/TACA,vezzi/TACA,guillermo-carrasco/TACA,senthil10/TACA,b97pla/TACA,kate-v-stepanova/TACA,SciLifeLab/TACA,b97pla/TACA,guillermo-carrasco/TACA,vezzi/TACA | import click
from taca.analysis import analysis as an
@click.group()
def analysis():
""" Analysis methods entry point """
pass
# analysis subcommands
@analysis.command()
@click.option('-r', '--run', type=click.Path(exists=True), default=None,
help='Demultiplex only a particular run')
def demultiplex(run):
""" Demultiplex all runs present in the data directories
"""
an.run_preprocessing(run)
@analysis.command()
+ @click.option('-a','--analysis', is_flag=True, help='Trigger the analysis for the transferred flowcell')
@click.argument('rundir')
- def transfer(rundir):
+ def transfer(rundir, analysis):
"""Transfers the run without qc"""
- an.transfer_run(rundir)
+ an.transfer_run(rundir, analysis=analysis)
| Add option for triggering or not the analysis | ## Code Before:
import click
from taca.analysis import analysis as an
@click.group()
def analysis():
""" Analysis methods entry point """
pass
# analysis subcommands
@analysis.command()
@click.option('-r', '--run', type=click.Path(exists=True), default=None,
help='Demultiplex only a particular run')
def demultiplex(run):
""" Demultiplex all runs present in the data directories
"""
an.run_preprocessing(run)
@analysis.command()
@click.argument('rundir')
def transfer(rundir):
"""Transfers the run without qc"""
an.transfer_run(rundir)
## Instruction:
Add option for triggering or not the analysis
## Code After:
import click
from taca.analysis import analysis as an
@click.group()
def analysis():
""" Analysis methods entry point """
pass
# analysis subcommands
@analysis.command()
@click.option('-r', '--run', type=click.Path(exists=True), default=None,
help='Demultiplex only a particular run')
def demultiplex(run):
""" Demultiplex all runs present in the data directories
"""
an.run_preprocessing(run)
@analysis.command()
@click.option('-a','--analysis', is_flag=True, help='Trigger the analysis for the transferred flowcell')
@click.argument('rundir')
def transfer(rundir, analysis):
"""Transfers the run without qc"""
an.transfer_run(rundir, analysis=analysis)
| import click
from taca.analysis import analysis as an
@click.group()
def analysis():
""" Analysis methods entry point """
pass
# analysis subcommands
@analysis.command()
@click.option('-r', '--run', type=click.Path(exists=True), default=None,
help='Demultiplex only a particular run')
def demultiplex(run):
""" Demultiplex all runs present in the data directories
"""
an.run_preprocessing(run)
@analysis.command()
+ @click.option('-a','--analysis', is_flag=True, help='Trigger the analysis for the transferred flowcell')
@click.argument('rundir')
- def transfer(rundir):
+ def transfer(rundir, analysis):
? ++++++++++
"""Transfers the run without qc"""
- an.transfer_run(rundir)
+ an.transfer_run(rundir, analysis=analysis) |
112ce320f351399a28e4d85ed88e1b71df4e7aef | magpie/config/__init__.py | magpie/config/__init__.py | from os import path
class ConfigPath(object):
def __getattr__(self, key):
return_path = path.join(path.dirname(__file__), key + '.cfg')
if not path.exists(return_path): return None
return return_path
config_path = ConfigPath()
| from os import path
class ConfigPath(object):
def __init__(self):
self.config_paths = [path.join(path.expanduser('~'), '.magpie'), path.dirname(__file__)]
def __getattr__(self, key):
for path in self.config_paths:
return_path = path.join(path, key + '.cfg')
if path.exists(return_path): return return_path
return None
config_path = ConfigPath()
| Enable configuration from home directory | Enable configuration from home directory
This adds the possibility of defining multiple locations for the config-files. The given example first searches in ~/.magpie and if it doesn't find any config-files there, it searches in the default path. This enables configuration for each individual user and fixes the problem that magpie must be run as root if it was installed as root. | Python | mit | damoeb/magpie,akarca/magpie,charlesthomas/magpie,jcda/magpie,akarca/magpie,damoeb/magpie,damoeb/magpie,beni55/magpie,jcda/magpie,beni55/magpie,jcda/magpie,beni55/magpie,charlesthomas/magpie,akarca/magpie,charlesthomas/magpie | from os import path
class ConfigPath(object):
+ def __init__(self):
+ self.config_paths = [path.join(path.expanduser('~'), '.magpie'), path.dirname(__file__)]
def __getattr__(self, key):
+ for path in self.config_paths:
- return_path = path.join(path.dirname(__file__), key + '.cfg')
+ return_path = path.join(path, key + '.cfg')
- if not path.exists(return_path): return None
+ if path.exists(return_path): return return_path
- return return_path
+ return None
config_path = ConfigPath()
| Enable configuration from home directory | ## Code Before:
from os import path
class ConfigPath(object):
def __getattr__(self, key):
return_path = path.join(path.dirname(__file__), key + '.cfg')
if not path.exists(return_path): return None
return return_path
config_path = ConfigPath()
## Instruction:
Enable configuration from home directory
## Code After:
from os import path
class ConfigPath(object):
def __init__(self):
self.config_paths = [path.join(path.expanduser('~'), '.magpie'), path.dirname(__file__)]
def __getattr__(self, key):
for path in self.config_paths:
return_path = path.join(path, key + '.cfg')
if path.exists(return_path): return return_path
return None
config_path = ConfigPath()
| from os import path
class ConfigPath(object):
+ def __init__(self):
+ self.config_paths = [path.join(path.expanduser('~'), '.magpie'), path.dirname(__file__)]
def __getattr__(self, key):
+ for path in self.config_paths:
- return_path = path.join(path.dirname(__file__), key + '.cfg')
? ------------------
+ return_path = path.join(path, key + '.cfg')
? ++++
- if not path.exists(return_path): return None
? ---- ^^ ^
+ if path.exists(return_path): return return_path
? ++++ ^^^^^ ^^^^^
- return return_path
+ return None
config_path = ConfigPath() |
3a2d2934f61c496654281da7144f74713a9dea6f | devicehive/api.py | devicehive/api.py | from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
| class Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
| Add Request, Response and ApiObject and Token classes | Add Request, Response and ApiObject and Token classes
| Python | apache-2.0 | devicehive/devicehive-python | - from devicehive.transport import Request
- from devicehive.transport import Response
+ class Request(object):
+ """Request class."""
+
+ def __init__(self, url, action, request, **params):
+ self.action = action
+ self.request = request
+ self.params = params
+ self.params['url'] = url
+ class Response(object):
+ """Response class."""
+
+ def __init__(self, response):
+ self.action = response.pop('action')
+ self.is_success = response.pop('status') == 'success'
+ self.code = response.pop('code', None)
+ self.error = response.pop('error', None)
+ self.data = response
+
+
- class Api(object):
+ class ApiObject(object):
- """Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
- response = self._transport.request(req.action, req.request,
+ resp = self._transport.request(req.action, req.request, **req.params)
- **req.params)
- return Response(response)
+ return Response(resp)
+
+ class Token(ApiObject):
+
+ def __init__(self, transport, refresh_toke, access_token=None):
+ ApiObject.__init__(self, transport)
- def refresh_token(self, refresh_token):
+ self._refresh_token = refresh_toke
+ self._access_token = access_token
+
+ def refresh(self):
url = 'token/refresh'
action = url
- request = {'refreshToken': refresh_token}
+ request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
- return self._request(url, action, request, **params)
+ response = self._request(url, action, request, **params)
+ self._access_token = response.data['accessToken']
+ def access_token(self):
+ return self._access_token
+
+
+ class Api(object):
+ """Api class."""
+
+ def __init__(self, transport):
+ self._transport = transport
+
+ def token(self, refresh_token, access_token):
+ return Token(self._transport, refresh_token, access_token)
+ | Add Request, Response and ApiObject and Token classes | ## Code Before:
from devicehive.transport import Request
from devicehive.transport import Response
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
response = self._transport.request(req.action, req.request,
**req.params)
return Response(response)
def refresh_token(self, refresh_token):
url = 'token/refresh'
action = url
request = {'refreshToken': refresh_token}
params = {'method': 'POST',
'merge_data': True}
return self._request(url, action, request, **params)
## Instruction:
Add Request, Response and ApiObject and Token classes
## Code After:
class Request(object):
"""Request class."""
def __init__(self, url, action, request, **params):
self.action = action
self.request = request
self.params = params
self.params['url'] = url
class Response(object):
"""Response class."""
def __init__(self, response):
self.action = response.pop('action')
self.is_success = response.pop('status') == 'success'
self.code = response.pop('code', None)
self.error = response.pop('error', None)
self.data = response
class ApiObject(object):
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
resp = self._transport.request(req.action, req.request, **req.params)
return Response(resp)
class Token(ApiObject):
def __init__(self, transport, refresh_toke, access_token=None):
ApiObject.__init__(self, transport)
self._refresh_token = refresh_toke
self._access_token = access_token
def refresh(self):
url = 'token/refresh'
action = url
request = {'refreshToken': self._refresh_token}
params = {'method': 'POST',
'merge_data': True}
response = self._request(url, action, request, **params)
self._access_token = response.data['accessToken']
def access_token(self):
return self._access_token
class Api(object):
"""Api class."""
def __init__(self, transport):
self._transport = transport
def token(self, refresh_token, access_token):
return Token(self._transport, refresh_token, access_token)
| - from devicehive.transport import Request
- from devicehive.transport import Response
+ class Request(object):
+ """Request class."""
+
+ def __init__(self, url, action, request, **params):
+ self.action = action
+ self.request = request
+ self.params = params
+ self.params['url'] = url
+ class Response(object):
+ """Response class."""
+
+ def __init__(self, response):
+ self.action = response.pop('action')
+ self.is_success = response.pop('status') == 'success'
+ self.code = response.pop('code', None)
+ self.error = response.pop('error', None)
+ self.data = response
+
+
- class Api(object):
+ class ApiObject(object):
? ++++++
- """Api class."""
def __init__(self, transport):
self._transport = transport
def is_http_transport(self):
return self._transport.name == 'http'
def is_websocket_transport(self):
return self._transport.name == 'websocket'
def _request(self, url, action, request, **params):
req = Request(url, action, request, **params)
- response = self._transport.request(req.action, req.request,
? ----
+ resp = self._transport.request(req.action, req.request, **req.params)
? ++++++++++++++
- **req.params)
- return Response(response)
? ----
+ return Response(resp)
- def refresh_token(self, refresh_token):
+
+ class Token(ApiObject):
+
+ def __init__(self, transport, refresh_toke, access_token=None):
+ ApiObject.__init__(self, transport)
+ self._refresh_token = refresh_toke
+ self._access_token = access_token
+
+ def refresh(self):
url = 'token/refresh'
action = url
- request = {'refreshToken': refresh_token}
+ request = {'refreshToken': self._refresh_token}
? ++++++
params = {'method': 'POST',
'merge_data': True}
- return self._request(url, action, request, **params)
? ^^^
+ response = self._request(url, action, request, **params)
? ^^^ ++++
+ self._access_token = response.data['accessToken']
+
+ def access_token(self):
+ return self._access_token
+
+
+ class Api(object):
+ """Api class."""
+
+ def __init__(self, transport):
+ self._transport = transport
+
+ def token(self, refresh_token, access_token):
+ return Token(self._transport, refresh_token, access_token) |
8b7aef341aadefb859790684f41453f561813083 | tmi/views/__init__.py | tmi/views/__init__.py | from flask import g
from flask.ext.login import current_user
from tmi.core import app
from tmi.assets import assets # noqa
from tmi.views.ui import ui # noqa
from tmi.views.auth import login, logout # noqa
from tmi.views.admin import admin # noqa
from tmi.views.cards_api import blueprint as cards_api
@app.before_request
def before_request():
g.user = current_user
app.register_blueprint(cards_api)
| from flask import g, request
from flask.ext.login import current_user
from werkzeug.exceptions import HTTPException
from tmi.core import app
from tmi.forms import Invalid
from tmi.util import jsonify
from tmi.assets import assets # noqa
from tmi.views.ui import ui # noqa
from tmi.views.auth import login, logout # noqa
from tmi.views.admin import admin # noqa
from tmi.views.cards_api import blueprint as cards_api
@app.before_request
def before_request():
g.user = current_user
app.register_blueprint(cards_api)
@app.errorhandler(401)
@app.errorhandler(403)
@app.errorhandler(404)
@app.errorhandler(410)
@app.errorhandler(500)
def handle_exceptions(exc):
if isinstance(exc, HTTPException):
message = exc.get_description(request.environ)
message = message.replace('<p>', '').replace('</p>', '')
body = {
'status': exc.code,
'name': exc.name,
'message': message
}
headers = exc.get_headers(request.environ)
else:
body = {
'status': 500,
'name': exc.__class__.__name__,
'message': unicode(exc)
}
headers = {}
return jsonify(body, status=body.get('status'),
headers=headers)
@app.errorhandler(Invalid)
def handle_invalid(exc):
body = {
'status': 400,
'name': 'Invalid Data',
'message': unicode(exc),
'errors': exc.asdict()
}
return jsonify(body, status=400)
| Handle errors with JSON messages. | Handle errors with JSON messages. | Python | mit | pudo/storyweb,pudo/storyweb | - from flask import g
+ from flask import g, request
from flask.ext.login import current_user
+ from werkzeug.exceptions import HTTPException
from tmi.core import app
+ from tmi.forms import Invalid
+ from tmi.util import jsonify
from tmi.assets import assets # noqa
from tmi.views.ui import ui # noqa
from tmi.views.auth import login, logout # noqa
from tmi.views.admin import admin # noqa
from tmi.views.cards_api import blueprint as cards_api
@app.before_request
def before_request():
g.user = current_user
app.register_blueprint(cards_api)
+
+ @app.errorhandler(401)
+ @app.errorhandler(403)
+ @app.errorhandler(404)
+ @app.errorhandler(410)
+ @app.errorhandler(500)
+ def handle_exceptions(exc):
+ if isinstance(exc, HTTPException):
+ message = exc.get_description(request.environ)
+ message = message.replace('<p>', '').replace('</p>', '')
+ body = {
+ 'status': exc.code,
+ 'name': exc.name,
+ 'message': message
+ }
+ headers = exc.get_headers(request.environ)
+ else:
+ body = {
+ 'status': 500,
+ 'name': exc.__class__.__name__,
+ 'message': unicode(exc)
+ }
+ headers = {}
+ return jsonify(body, status=body.get('status'),
+ headers=headers)
+
+
+ @app.errorhandler(Invalid)
+ def handle_invalid(exc):
+ body = {
+ 'status': 400,
+ 'name': 'Invalid Data',
+ 'message': unicode(exc),
+ 'errors': exc.asdict()
+ }
+ return jsonify(body, status=400)
+ | Handle errors with JSON messages. | ## Code Before:
from flask import g
from flask.ext.login import current_user
from tmi.core import app
from tmi.assets import assets # noqa
from tmi.views.ui import ui # noqa
from tmi.views.auth import login, logout # noqa
from tmi.views.admin import admin # noqa
from tmi.views.cards_api import blueprint as cards_api
@app.before_request
def before_request():
g.user = current_user
app.register_blueprint(cards_api)
## Instruction:
Handle errors with JSON messages.
## Code After:
from flask import g, request
from flask.ext.login import current_user
from werkzeug.exceptions import HTTPException
from tmi.core import app
from tmi.forms import Invalid
from tmi.util import jsonify
from tmi.assets import assets # noqa
from tmi.views.ui import ui # noqa
from tmi.views.auth import login, logout # noqa
from tmi.views.admin import admin # noqa
from tmi.views.cards_api import blueprint as cards_api
@app.before_request
def before_request():
g.user = current_user
app.register_blueprint(cards_api)
@app.errorhandler(401)
@app.errorhandler(403)
@app.errorhandler(404)
@app.errorhandler(410)
@app.errorhandler(500)
def handle_exceptions(exc):
if isinstance(exc, HTTPException):
message = exc.get_description(request.environ)
message = message.replace('<p>', '').replace('</p>', '')
body = {
'status': exc.code,
'name': exc.name,
'message': message
}
headers = exc.get_headers(request.environ)
else:
body = {
'status': 500,
'name': exc.__class__.__name__,
'message': unicode(exc)
}
headers = {}
return jsonify(body, status=body.get('status'),
headers=headers)
@app.errorhandler(Invalid)
def handle_invalid(exc):
body = {
'status': 400,
'name': 'Invalid Data',
'message': unicode(exc),
'errors': exc.asdict()
}
return jsonify(body, status=400)
| - from flask import g
+ from flask import g, request
? +++++++++
from flask.ext.login import current_user
+ from werkzeug.exceptions import HTTPException
from tmi.core import app
+ from tmi.forms import Invalid
+ from tmi.util import jsonify
from tmi.assets import assets # noqa
from tmi.views.ui import ui # noqa
from tmi.views.auth import login, logout # noqa
from tmi.views.admin import admin # noqa
from tmi.views.cards_api import blueprint as cards_api
@app.before_request
def before_request():
g.user = current_user
app.register_blueprint(cards_api)
+
+
+ @app.errorhandler(401)
+ @app.errorhandler(403)
+ @app.errorhandler(404)
+ @app.errorhandler(410)
+ @app.errorhandler(500)
+ def handle_exceptions(exc):
+ if isinstance(exc, HTTPException):
+ message = exc.get_description(request.environ)
+ message = message.replace('<p>', '').replace('</p>', '')
+ body = {
+ 'status': exc.code,
+ 'name': exc.name,
+ 'message': message
+ }
+ headers = exc.get_headers(request.environ)
+ else:
+ body = {
+ 'status': 500,
+ 'name': exc.__class__.__name__,
+ 'message': unicode(exc)
+ }
+ headers = {}
+ return jsonify(body, status=body.get('status'),
+ headers=headers)
+
+
+ @app.errorhandler(Invalid)
+ def handle_invalid(exc):
+ body = {
+ 'status': 400,
+ 'name': 'Invalid Data',
+ 'message': unicode(exc),
+ 'errors': exc.asdict()
+ }
+ return jsonify(body, status=400) |
ba2f2d7e53f0ffc58c882d78f1b8bc9a468eb164 | predicates.py | predicates.py | class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
return "one of %s" % ', '.join(self.members)
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
| class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
return "one of %s" % ', '.join(map(repr, self.members))
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
| Fix problem rendering oneof() predicate when the members aren't strings | Fix problem rendering oneof() predicate when the members aren't strings
| Python | mit | mrozekma/pytypecheck | class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
- return "one of %s" % ', '.join(self.members)
+ return "one of %s" % ', '.join(map(repr, self.members))
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
| Fix problem rendering oneof() predicate when the members aren't strings | ## Code Before:
class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
return "one of %s" % ', '.join(self.members)
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
## Instruction:
Fix problem rendering oneof() predicate when the members aren't strings
## Code After:
class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
return "one of %s" % ', '.join(map(repr, self.members))
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end)
| class OneOf:
def __init__(self, members):
self.members = members
def __call__(self, candidate):
if candidate in self.members:
return True
return "%s not in %s" % (candidate, self.members)
def __repr__(self):
- return "one of %s" % ', '.join(self.members)
+ return "one of %s" % ', '.join(map(repr, self.members))
? ++++++++++ +
def oneof(*members):
return OneOf(members)
class InRange:
def __init__(self, start, end):
self.start = start
self.end = end
def __call__(self, candidate):
if self.start <= candidate <= self.end:
return True
return "%s not between %s and %s" % (candidate, self.start, self.end)
def __repr__(self):
return "between %s and %s" % (self.start, self.end)
def inrange(start, end):
return InRange(start, end) |
a2005d98cd81424a2a43277cae0595806751e9dc | swh/web/ui/controller/service.py | swh/web/ui/controller/service.py |
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
if output:
h_res = json.loads(output, cls=SWHJSONDecoder)
if h_res:
return h_res['found']
return None
return False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
|
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
return json.loads(output, cls=SWHJSONDecoder) if output else False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
| Refactor - Simplify the call to api | Refactor - Simplify the call to api
| Python | agpl-3.0 | SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui,SoftwareHeritage/swh-web-ui |
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
- if output:
- h_res = json.loads(output, cls=SWHJSONDecoder)
+ return json.loads(output, cls=SWHJSONDecoder) if output else False
- if h_res:
- return h_res['found']
- return None
- return False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
| Refactor - Simplify the call to api | ## Code Before:
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
if output:
h_res = json.loads(output, cls=SWHJSONDecoder)
if h_res:
return h_res['found']
return None
return False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
## Instruction:
Refactor - Simplify the call to api
## Code After:
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
return json.loads(output, cls=SWHJSONDecoder) if output else False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result)
|
from swh.web.ui.back import http, api_query
from swh.core.json import SWHJSONDecoder
import json
def search(base_url, hashes):
"""Search a content with given hashes.
Args:
hashes, dictionary of hash indexed by key, sha1, sha256, etc...
Returns:
None if no content is found.
An enriched content if the content is found.
Raises:
OSError (no route to host), etc... Network issues in general
"""
def unserialize_result(res):
if res.ok:
output = res.content.decode('utf-8')
- if output:
- h_res = json.loads(output, cls=SWHJSONDecoder)
? ------ ^^^
+ return json.loads(output, cls=SWHJSONDecoder) if output else False
? ^^^^ +++++++++++++++++++++
- if h_res:
- return h_res['found']
- return None
- return False
return False
q = api_query.api_storage_content_present({'content': hashes})
return http.execute(base_url, q, result_fn=unserialize_result) |
d0ec3ee9b974fb6956c32e8dfdd6d20ea4da7cff | pwndbg/inthook.py | pwndbg/inthook.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import gdb
import pwndbg.typeinfo
if sys.version_info < (3,0):
import __builtin__ as builtins
_int = builtins.int
# We need this class to get isinstance(7, xint) to return True
class IsAnInt(type):
def __instancecheck__(self, other):
return isinstance(other, _int)
class xint(builtins.int):
__metaclass__ = IsAnInt
def __new__(cls, value, *a, **kw):
if isinstance(value, gdb.Value):
if pwndbg.typeinfo.is_pointer(value):
value = value.cast(pwndbg.typeinfo.ulong)
else:
value = value.cast(pwndbg.typeinfo.long)
return _int(_int(value, *a, **kw))
builtins.int = xint
globals()['int'] = xint
# Additionally, we need to compensate for Python2
else:
import builtins
builtins.long = int
globals()['long'] = int
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import gdb
import pwndbg.typeinfo
if sys.version_info < (3,0):
import __builtin__ as builtins
else:
import builtins
_int = builtins.int
# We need this class to get isinstance(7, xint) to return True
class IsAnInt(type):
def __instancecheck__(self, other):
return isinstance(other, _int)
class xint(builtins.int):
__metaclass__ = IsAnInt
def __new__(cls, value, *a, **kw):
if isinstance(value, gdb.Value):
if pwndbg.typeinfo.is_pointer(value):
value = value.cast(pwndbg.typeinfo.ulong)
else:
value = value.cast(pwndbg.typeinfo.long)
return _int(_int(value, *a, **kw))
builtins.int = xint
globals()['int'] = xint
if sys.version_info >= (3,0):
builtins.long = xint
globals()['long'] = xint
| Add int hook to Python3 | Add int hook to Python3
Fixes #120
| Python | mit | pwndbg/pwndbg,cebrusfs/217gdb,cebrusfs/217gdb,pwndbg/pwndbg,cebrusfs/217gdb,chubbymaggie/pwndbg,disconnect3d/pwndbg,disconnect3d/pwndbg,pwndbg/pwndbg,0xddaa/pwndbg,zachriggle/pwndbg,0xddaa/pwndbg,disconnect3d/pwndbg,anthraxx/pwndbg,chubbymaggie/pwndbg,0xddaa/pwndbg,cebrusfs/217gdb,anthraxx/pwndbg,zachriggle/pwndbg,anthraxx/pwndbg,pwndbg/pwndbg,anthraxx/pwndbg | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import gdb
import pwndbg.typeinfo
if sys.version_info < (3,0):
import __builtin__ as builtins
- _int = builtins.int
-
- # We need this class to get isinstance(7, xint) to return True
- class IsAnInt(type):
- def __instancecheck__(self, other):
- return isinstance(other, _int)
-
- class xint(builtins.int):
- __metaclass__ = IsAnInt
- def __new__(cls, value, *a, **kw):
- if isinstance(value, gdb.Value):
- if pwndbg.typeinfo.is_pointer(value):
- value = value.cast(pwndbg.typeinfo.ulong)
- else:
- value = value.cast(pwndbg.typeinfo.long)
- return _int(_int(value, *a, **kw))
-
- builtins.int = xint
- globals()['int'] = xint
-
- # Additionally, we need to compensate for Python2
else:
import builtins
- builtins.long = int
- globals()['long'] = int
+ _int = builtins.int
+
+ # We need this class to get isinstance(7, xint) to return True
+ class IsAnInt(type):
+ def __instancecheck__(self, other):
+ return isinstance(other, _int)
+
+ class xint(builtins.int):
+ __metaclass__ = IsAnInt
+ def __new__(cls, value, *a, **kw):
+ if isinstance(value, gdb.Value):
+ if pwndbg.typeinfo.is_pointer(value):
+ value = value.cast(pwndbg.typeinfo.ulong)
+ else:
+ value = value.cast(pwndbg.typeinfo.long)
+ return _int(_int(value, *a, **kw))
+
+ builtins.int = xint
+ globals()['int'] = xint
+
+ if sys.version_info >= (3,0):
+ builtins.long = xint
+ globals()['long'] = xint
+
+ | Add int hook to Python3 | ## Code Before:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import gdb
import pwndbg.typeinfo
if sys.version_info < (3,0):
import __builtin__ as builtins
_int = builtins.int
# We need this class to get isinstance(7, xint) to return True
class IsAnInt(type):
def __instancecheck__(self, other):
return isinstance(other, _int)
class xint(builtins.int):
__metaclass__ = IsAnInt
def __new__(cls, value, *a, **kw):
if isinstance(value, gdb.Value):
if pwndbg.typeinfo.is_pointer(value):
value = value.cast(pwndbg.typeinfo.ulong)
else:
value = value.cast(pwndbg.typeinfo.long)
return _int(_int(value, *a, **kw))
builtins.int = xint
globals()['int'] = xint
# Additionally, we need to compensate for Python2
else:
import builtins
builtins.long = int
globals()['long'] = int
## Instruction:
Add int hook to Python3
## Code After:
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import gdb
import pwndbg.typeinfo
if sys.version_info < (3,0):
import __builtin__ as builtins
else:
import builtins
_int = builtins.int
# We need this class to get isinstance(7, xint) to return True
class IsAnInt(type):
def __instancecheck__(self, other):
return isinstance(other, _int)
class xint(builtins.int):
__metaclass__ = IsAnInt
def __new__(cls, value, *a, **kw):
if isinstance(value, gdb.Value):
if pwndbg.typeinfo.is_pointer(value):
value = value.cast(pwndbg.typeinfo.ulong)
else:
value = value.cast(pwndbg.typeinfo.long)
return _int(_int(value, *a, **kw))
builtins.int = xint
globals()['int'] = xint
if sys.version_info >= (3,0):
builtins.long = xint
globals()['long'] = xint
| from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import sys
import gdb
import pwndbg.typeinfo
if sys.version_info < (3,0):
import __builtin__ as builtins
- _int = builtins.int
-
- # We need this class to get isinstance(7, xint) to return True
- class IsAnInt(type):
- def __instancecheck__(self, other):
- return isinstance(other, _int)
-
- class xint(builtins.int):
- __metaclass__ = IsAnInt
- def __new__(cls, value, *a, **kw):
- if isinstance(value, gdb.Value):
- if pwndbg.typeinfo.is_pointer(value):
- value = value.cast(pwndbg.typeinfo.ulong)
- else:
- value = value.cast(pwndbg.typeinfo.long)
- return _int(_int(value, *a, **kw))
-
- builtins.int = xint
- globals()['int'] = xint
-
- # Additionally, we need to compensate for Python2
else:
import builtins
+
+ _int = builtins.int
+
+ # We need this class to get isinstance(7, xint) to return True
+ class IsAnInt(type):
+ def __instancecheck__(self, other):
+ return isinstance(other, _int)
+
+ class xint(builtins.int):
+ __metaclass__ = IsAnInt
+ def __new__(cls, value, *a, **kw):
+ if isinstance(value, gdb.Value):
+ if pwndbg.typeinfo.is_pointer(value):
+ value = value.cast(pwndbg.typeinfo.ulong)
+ else:
+ value = value.cast(pwndbg.typeinfo.long)
+ return _int(_int(value, *a, **kw))
+
+ builtins.int = xint
+ globals()['int'] = xint
+
+ if sys.version_info >= (3,0):
- builtins.long = int
+ builtins.long = xint
? +
- globals()['long'] = int
+ globals()['long'] = xint
? +
+ |
7629afde2627457b4f4b19e1542a87e695c1837d | tests/events/test_models.py | tests/events/test_models.py | """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
| """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
| Make sure slug gets updated on date change | Make sure slug gets updated on date change
| Python | mit | FlowFX/reggae-cdmx,FlowFX/reggae-cdmx | """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
+
+ def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
+ # GIVEN an event
+ e = EventFactory.create(
+ date=datetime.date(2018, 1, 1),
+ venue=None,
+ )
+
+ # WHEN changing the date
+ assert e.slug.startswith('2018-01-01')
+ e.date = datetime.date(2018, 1, 2)
+ e.save()
+
+ # THEN the slug changes to reflect the new date
+ assert e.slug.startswith('2018-01-02')
+ | Make sure slug gets updated on date change | ## Code Before:
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
## Instruction:
Make sure slug gets updated on date change
## Code After:
"""Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
# GIVEN an event
e = EventFactory.create(
date=datetime.date(2018, 1, 1),
venue=None,
)
# WHEN changing the date
assert e.slug.startswith('2018-01-01')
e.date = datetime.date(2018, 1, 2)
e.save()
# THEN the slug changes to reflect the new date
assert e.slug.startswith('2018-01-02')
| """Unit tests for events models."""
import datetime
from app.events.factories import EventFactory
from app.events.models import Event
def test_event_factory(db): # noqa: D103
# GIVEN an empty database
assert Event.objects.count() == 0
# WHEN saving a new event instance to the database
EventFactory.create(title='five')
# THEN it's there
assert Event.objects.count() == 1
def test_event_has_all_the_attributes(): # noqa: D103
# GIVEN an event
e = EventFactory.build()
# THEN it has …
assert e.title
assert e.date
assert e.venue
assert e.description
assert e.fb_event_url
def test_event_has_slug(db): # noqa: D103
# GIVEN an event
e = EventFactory.build(
title='One Happy Family',
date=datetime.date(2018, 1, 1),
venue=None,
)
assert e.slug == ''
# WHEN saving the event
e.save()
# THEN it gets a slug generated from its date and title
assert e.slug == '2018-01-01-one-happy-family'
+
+
+ def test_event_slug_gets_updated_on_date_change(db): # noqa: D103
+ # GIVEN an event
+ e = EventFactory.create(
+ date=datetime.date(2018, 1, 1),
+ venue=None,
+ )
+
+ # WHEN changing the date
+ assert e.slug.startswith('2018-01-01')
+ e.date = datetime.date(2018, 1, 2)
+ e.save()
+
+ # THEN the slug changes to reflect the new date
+ assert e.slug.startswith('2018-01-02') |
4d1d2e12d8882084ce8deb80c3b3e162cc71b20b | osmaxx-py/osmaxx/excerptexport/forms/new_excerpt_form.py | osmaxx-py/osmaxx/excerptexport/forms/new_excerpt_form.py | from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
| from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
| Allow private excerpts (form validation) | Bugfix: Allow private excerpts (form validation)
| Python | mit | geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend | from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
- new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
+ new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
| Allow private excerpts (form validation) | ## Code Before:
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
## Instruction:
Allow private excerpts (form validation)
## Code After:
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
| from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
- new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
+ new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
? ++++++++++++++++
|
50f2acfcfe482c5452a80243b186ec411f672afc | boundaryservice/urls.py | boundaryservice/urls.py | from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-sets/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-sets/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundaries/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundaries/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| Use plural names for resource types in URLs | Use plural names for resource types in URLs
| Python | mit | datamade/represent-boundaries,opencorato/represent-boundaries,opencorato/represent-boundaries,datamade/represent-boundaries,datamade/represent-boundaries,opencorato/represent-boundaries | from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
- url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
+ url(r'^boundary-sets/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
- url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
+ url(r'^boundary-sets/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
- url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
+ url(r'^boundaries/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
- url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
+ url(r'^boundaries/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| Use plural names for resource types in URLs | ## Code Before:
from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
## Instruction:
Use plural names for resource types in URLs
## Code After:
from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
url(r'^boundary-sets/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
url(r'^boundary-sets/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
url(r'^boundaries/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundaries/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
)
| from django.conf.urls.defaults import patterns, include, url
from boundaryservice.views import *
urlpatterns = patterns('',
- url(r'^boundary-set/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
+ url(r'^boundary-sets/$', BoundarySetListView.as_view(), name='boundaryservice_set_list'),
? +
- url(r'^boundary-set/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
+ url(r'^boundary-sets/(?P<slug>[\w_-]+)/$', BoundarySetDetailView.as_view(), name='boundaryservice_set_detail'),
? +
- url(r'^boundary/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
? ^
+ url(r'^boundaries/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
? ^^^
- url(r'^boundary/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
? ^
+ url(r'^boundaries/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
? ^^^
- url(r'^boundary/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
? ^
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/$', BoundaryListView.as_view(), name='boundaryservice_boundary_list'),
? ^^^
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
? ^
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryListView.as_view()),
? ^^^
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
? ^
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/$', BoundaryDetailView.as_view(), name='boundaryservice_boundary_detail'),
? ^^^
- url(r'^boundary/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
? ^
+ url(r'^boundaries/(?P<set_slug>[\w_-]+)/(?P<slug>[\w_-]+)/(?P<geo_field>shape|simple_shape|centroid)$', BoundaryGeoDetailView.as_view()),
? ^^^
) |
c14bdaf3043cb38571073db7162a0899a35778ed | app/utils.py | app/utils.py | from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.name + '.index')
| from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.index)
| Fix index_for_role function to use index field in Role class. | Fix index_for_role function to use index field in Role class.
| Python | mit | hack4impact/reading-terminal-market,hack4impact/reading-terminal-market,hack4impact/reading-terminal-market | from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
- return url_for(role.name + '.index')
+ return url_for(role.index)
| Fix index_for_role function to use index field in Role class. | ## Code Before:
from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.name + '.index')
## Instruction:
Fix index_for_role function to use index field in Role class.
## Code After:
from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
return url_for(role.index)
| from flask import url_for
def register_template_utils(app):
"""Register Jinja 2 helpers (called from __init__.py)."""
@app.template_test()
def equalto(value, other):
return value == other
@app.template_global()
def is_hidden_field(field):
from wtforms.fields import HiddenField
return isinstance(field, HiddenField)
app.add_template_global(index_for_role)
def index_for_role(role):
- return url_for(role.name + '.index')
? --------- -
+ return url_for(role.index) |
d301a0635578550ededd1bca7ac34e841366b0ef | devito/foreign/__init__.py | devito/foreign/__init__.py |
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
|
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
from devito.types import CacheManager # noqa
| Add leftover import due to disfunctional testing | Add leftover import due to disfunctional testing
| Python | mit | opesci/devito,opesci/devito |
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
+ from devito.types import CacheManager # noqa
| Add leftover import due to disfunctional testing | ## Code Before:
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
## Instruction:
Add leftover import due to disfunctional testing
## Code After:
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
from devito.types import CacheManager # noqa
|
# The following used by backends.backendSelector
from devito.function import Constant, Function, TimeFunction, SparseFunction # noqa
from devito.foreign.operator import Operator # noqa
+ from devito.types import CacheManager # noqa |
2611476df6f362cd59e4aad38a243fc8f6cbf8a8 | devincachu/purger.py | devincachu/purger.py | import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
| import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestrante)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestrante)
| Purge da página de palestra quando salva palestrante | Purge da página de palestra quando salva palestrante
| Python | bsd-2-clause | devincachu/devincachu-2013,devincachu/devincachu-2013,devincachu/devincachu-2014,devincachu/devincachu-2014,devincachu/devincachu-2014,devincachu/devincachu-2013,devincachu/devincachu-2013 | import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
+ roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestrante)
+ roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestrante)
| Purge da página de palestra quando salva palestrante | ## Code Before:
import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
## Instruction:
Purge da página de palestra quando salva palestrante
## Code After:
import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestrante)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestrante)
| import roan
from django.contrib.flatpages import models
from palestras import models as pmodels
def connect():
flatpages = models.FlatPage.objects.all()
for f in flatpages:
roan.purge(f.url).on_save(models.FlatPage)
palestras = pmodels.Palestra.objects.all()
for p in palestras:
roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestra)
roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestra)
+ roan.purge(p.get_absolute_url_and_link_title()['url']).on_save(pmodels.Palestrante)
+ roan.purge(p.get_absolute_url_and_link_title()['url']).on_delete(pmodels.Palestrante) |
03a54ea1920a3716e9c8d326f5c4c408f45b7d08 | apps/api/urls.py | apps/api/urls.py | from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
| from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
| Add option to verify jwt token | Add option to verify jwt token
| Python | bsd-3-clause | lucifurtun/myquotes,lucifurtun/myquotes,lucifurtun/myquotes,lucifurtun/myquotes | from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
- url(r'^token/new/$', jwt_views.obtain_jwt_token),
+ url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
- url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
+ url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
+ url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
| Add option to verify jwt token | ## Code Before:
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.obtain_jwt_token),
url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
## Instruction:
Add option to verify jwt token
## Code After:
from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
]
| from django.conf.urls import url, include
from rest_framework.routers import DefaultRouter
from rest_framework_jwt import views as jwt_views
from . import views
router = DefaultRouter()
router.register(r'quotes', views.QuoteViewSet)
router.register(r'authors', views.AuthorViewSet)
router.register(r'categories', views.CategoryViewSet)
router.register(r'tags', views.TagViewSet)
urlpatterns = [
url(r'^docs/$', views.schema_view),
url(r'^', include(router.urls)),
- url(r'^token/new/$', jwt_views.obtain_jwt_token),
? ^ ^^^^^^
+ url(r'^token/new/$', jwt_views.ObtainJSONWebToken.as_view()),
? ^ ^^^^^^^^ ++++++++++
- url(r'^token/refresh/$', jwt_views.refresh_jwt_token),
? ^ ^^^^^^
+ url(r'^token/refresh/$', jwt_views.RefreshJSONWebToken.as_view()),
? ^ ^^^^^^^^ ++++++++++
+ url(r'^token/verify/$', jwt_views.VerifyJSONWebToken.as_view()),
url(r'^filters/$', views.FiltersOptionsView.as_view()),
url(r'^templates/(?P<page>[-\w]+.html)/$', views.AngularTemplateView.as_view()),
] |
d73654fd4d11a2bf5730c6fbf4bc2167593f7cc4 | queue_timings.py | queue_timings.py |
import os.path
import platform
if int(platform.python_version_tuple()[0]) == 2:
import cPickle as pickle
elif int(platform.python_version_tuple()[0]) == 3:
import pickle
else:
raise EnvironmentError("Invalid or Incompatible Python Version: %s" % platform.python_version())
def main():
if os.path.isfile("bodyfetcherQueueTimings.p"):
try:
with open("bodyfetcherQueueTimings.p", "rb") as f:
queue_data = pickle.load(f)
except EOFError:
print("Hit EOFError while reading file. Smokey handles this by deleting the file.")
resp = input("Delete? (y/n)").lower()
if resp == "y":
os.remove("bodyfetcherQueueTimings.p")
return # If we don't return, we run into an error in the for loop below.
for site, times in queue_data.iteritems():
print("{0}: min {1}, max {2}, avg {3}".format(site.split(".")[0], min(times), max(times),
sum(times) / len(times)))
else:
print("bodyfetcherQueueTimings.p doesn't exist. No data to analyse.")
if __name__ == "__main__":
main()
|
import os.path
import cPickle as pickle
def main():
if os.path.isfile("bodyfetcherQueueTimings.p"):
try:
with open("bodyfetcherQueueTimings.p", "rb") as f:
queue_data = pickle.load(f)
except EOFError:
print("Hit EOFError while reading file. Smokey handles this by deleting the file.")
resp = input("Delete? (y/n)").lower()
if resp == "y":
os.remove("bodyfetcherQueueTimings.p")
for site, times in queue_data.iteritems():
print("{0}: min {1}, max {2}, avg {3}".format(site.split(".")[0], min(times), max(times),
sum(times) / len(times)))
else:
print("bodyfetcherQueueTimings.p doesn't exist. No data to analyse.")
if __name__ == "__main__":
main()
| Revert "Python2/3 Reverse Compat functionality, also 'return' if EOFError" | Revert "Python2/3 Reverse Compat functionality, also 'return' if EOFError"
This reverts commit f604590ca7a704ef941db5342bae3cef5c60cf2e.
| Python | apache-2.0 | Charcoal-SE/SmokeDetector,Charcoal-SE/SmokeDetector |
import os.path
- import platform
- if int(platform.python_version_tuple()[0]) == 2:
- import cPickle as pickle
+ import cPickle as pickle
- elif int(platform.python_version_tuple()[0]) == 3:
- import pickle
- else:
- raise EnvironmentError("Invalid or Incompatible Python Version: %s" % platform.python_version())
def main():
if os.path.isfile("bodyfetcherQueueTimings.p"):
try:
with open("bodyfetcherQueueTimings.p", "rb") as f:
queue_data = pickle.load(f)
except EOFError:
print("Hit EOFError while reading file. Smokey handles this by deleting the file.")
resp = input("Delete? (y/n)").lower()
if resp == "y":
os.remove("bodyfetcherQueueTimings.p")
- return # If we don't return, we run into an error in the for loop below.
for site, times in queue_data.iteritems():
print("{0}: min {1}, max {2}, avg {3}".format(site.split(".")[0], min(times), max(times),
sum(times) / len(times)))
else:
print("bodyfetcherQueueTimings.p doesn't exist. No data to analyse.")
if __name__ == "__main__":
main()
| Revert "Python2/3 Reverse Compat functionality, also 'return' if EOFError" | ## Code Before:
import os.path
import platform
if int(platform.python_version_tuple()[0]) == 2:
import cPickle as pickle
elif int(platform.python_version_tuple()[0]) == 3:
import pickle
else:
raise EnvironmentError("Invalid or Incompatible Python Version: %s" % platform.python_version())
def main():
if os.path.isfile("bodyfetcherQueueTimings.p"):
try:
with open("bodyfetcherQueueTimings.p", "rb") as f:
queue_data = pickle.load(f)
except EOFError:
print("Hit EOFError while reading file. Smokey handles this by deleting the file.")
resp = input("Delete? (y/n)").lower()
if resp == "y":
os.remove("bodyfetcherQueueTimings.p")
return # If we don't return, we run into an error in the for loop below.
for site, times in queue_data.iteritems():
print("{0}: min {1}, max {2}, avg {3}".format(site.split(".")[0], min(times), max(times),
sum(times) / len(times)))
else:
print("bodyfetcherQueueTimings.p doesn't exist. No data to analyse.")
if __name__ == "__main__":
main()
## Instruction:
Revert "Python2/3 Reverse Compat functionality, also 'return' if EOFError"
## Code After:
import os.path
import cPickle as pickle
def main():
if os.path.isfile("bodyfetcherQueueTimings.p"):
try:
with open("bodyfetcherQueueTimings.p", "rb") as f:
queue_data = pickle.load(f)
except EOFError:
print("Hit EOFError while reading file. Smokey handles this by deleting the file.")
resp = input("Delete? (y/n)").lower()
if resp == "y":
os.remove("bodyfetcherQueueTimings.p")
for site, times in queue_data.iteritems():
print("{0}: min {1}, max {2}, avg {3}".format(site.split(".")[0], min(times), max(times),
sum(times) / len(times)))
else:
print("bodyfetcherQueueTimings.p doesn't exist. No data to analyse.")
if __name__ == "__main__":
main()
|
import os.path
- import platform
- if int(platform.python_version_tuple()[0]) == 2:
- import cPickle as pickle
? ----
+ import cPickle as pickle
- elif int(platform.python_version_tuple()[0]) == 3:
- import pickle
- else:
- raise EnvironmentError("Invalid or Incompatible Python Version: %s" % platform.python_version())
def main():
if os.path.isfile("bodyfetcherQueueTimings.p"):
try:
with open("bodyfetcherQueueTimings.p", "rb") as f:
queue_data = pickle.load(f)
except EOFError:
print("Hit EOFError while reading file. Smokey handles this by deleting the file.")
resp = input("Delete? (y/n)").lower()
if resp == "y":
os.remove("bodyfetcherQueueTimings.p")
- return # If we don't return, we run into an error in the for loop below.
for site, times in queue_data.iteritems():
print("{0}: min {1}, max {2}, avg {3}".format(site.split(".")[0], min(times), max(times),
sum(times) / len(times)))
else:
print("bodyfetcherQueueTimings.p doesn't exist. No data to analyse.")
if __name__ == "__main__":
main() |
548bdb45796e7e12a1c4294b49dc1ac1fb3fe647 | launch_pyslvs.py | launch_pyslvs.py | from os import _exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
_exit(app.exec())
except:
import logging
logging.exception("Exception Happened.")
_exit(1)
| from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
ExitCode = app.exec()
except:
import logging
logging.exception("Exception Happened.")
ExitCode = 1
finally: exit(ExitCode)
| Change the way of exit application. | Change the way of exit application.
| Python | agpl-3.0 | 40323230/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5 | - from os import _exit
+ from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
- _exit(app.exec())
+ ExitCode = app.exec()
except:
import logging
logging.exception("Exception Happened.")
- _exit(1)
+ ExitCode = 1
+ finally: exit(ExitCode)
| Change the way of exit application. | ## Code Before:
from os import _exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
_exit(app.exec())
except:
import logging
logging.exception("Exception Happened.")
_exit(1)
## Instruction:
Change the way of exit application.
## Code After:
from sys import exit
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
ExitCode = app.exec()
except:
import logging
logging.exception("Exception Happened.")
ExitCode = 1
finally: exit(ExitCode)
| - from os import _exit
? ^ -
+ from sys import exit
? ^^
if __name__=='__main__':
try:
from core.info.info import show_info, Pyslvs_Splash
args = show_info()
from PyQt5.QtWidgets import QApplication
from core.main import MainWindow
if args.fusion: QApplication.setStyle('fusion')
app = QApplication(list(vars(args).values()))
splash = Pyslvs_Splash()
splash.show()
run = MainWindow(args)
run.show()
splash.finish(run)
- _exit(app.exec())
? ^^ ^ -
+ ExitCode = app.exec()
? ^ ^^^^^^^
except:
import logging
logging.exception("Exception Happened.")
- _exit(1)
+ ExitCode = 1
+ finally: exit(ExitCode) |
2cb406cac1a6faf1f2f79c1376ceac39871fb96e | pony_barn/build-django.py | pony_barn/build-django.py | import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| Make it so that django build actually uses it's own code. | Make it so that django build actually uses it's own code.
| Python | mit | ericholscher/pony_barn,ericholscher/pony_barn | import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
- pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
+ pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| Make it so that django build actually uses it's own code. | ## Code Before:
import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
## Instruction:
Make it so that django build actually uses it's own code.
## Code After:
import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv))
| import os
import sys
from base import BaseBuild
from pony_build import client as pony
class DjangoBuild(BaseBuild):
def __init__(self):
super(DjangoBuild, self).__init__()
self.directory = os.path.dirname(os.path.abspath(__file__))
self.repo_url = 'git://github.com/django/django.git'
self.name = "django"
def define_commands(self):
self.commands = [
pony.GitClone(self.repo_url),
- pony.TestCommand([self.context.python, '../tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests', run_cwd='django')
? --- ------------------
+ pony.TestCommand([self.context.python, 'tests/runtests.py', '--settings', 'django_pony_test_settings'], name='run tests')
]
def setup(self):
# Create the settings file
dest_dir = os.path.join(self.context.tempdir, 'lib', self.py_name, 'site-packages')
settings_dest = os.path.join(dest_dir, 'django_pony_test_settings.py')
init_dest = os.path.join(dest_dir, '__init__.py')
open(settings_dest, 'w').write("DATABASE_ENGINE='sqlite3'")
open(init_dest, 'w').write('#OMG')
sys.path.insert(0, dest_dir)
if __name__ == '__main__':
build = DjangoBuild()
sys.exit(build.execute(sys.argv)) |
bbfb09205974efa969fc636b6e1079a84dad3619 | mcstatus/application.py | mcstatus/application.py | from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
def returnStatus():
try:
query = MinecraftQuery("mc.voltaire.sh", 25565, 10, 3)
basicQuery = query.get_status()
fullQuery = query.get_rules()
except socket.error as e:
if not options.quiet:
return "Server is down or unreachable:\n" + e.message
if not options.quiet:
numOnline = 'The server has %d players filling %d total slots. There are %d free slots.' % (basicQuery['numplayers'], basicQuery['maxplayers'], basicQuery['maxplayers'] - basic_status['numplayers'])
playersOnline = 'Online now: %s' % (fullQuery['players'])
return numOnline + "\n" + playersOnline
return "ermahgerd"
if __name__ == '__main__':
app.run() | from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
def returnStatus():
query = MinecraftQuery("142.54.162.42", 25565)
basic_status = query.get_status()
all_status = query.get_rules()
server_info = 'The server has %d / %d players.' % (basic_status['numplayers'], basic_status['maxplayers'])
status_info = 'Online now: %s' % (all_status['players'])
return "<pre>" + server_info + "\n" + status_info + "</pre>"
if __name__ == '__main__':
app.run()
| Revert "check for connection failure" | Revert "check for connection failure"
This reverts commit cf4bd49e150f5542a5a7abba908ca81ebe1b9e75.
| Python | bsd-3-clause | voltaire/minecraft-site-old,voltaire/minecraft-site-old | from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
-
def returnStatus():
+ query = MinecraftQuery("142.54.162.42", 25565)
-
- try:
- query = MinecraftQuery("mc.voltaire.sh", 25565, 10, 3)
- basicQuery = query.get_status()
+ basic_status = query.get_status()
- fullQuery = query.get_rules()
+ all_status = query.get_rules()
+ server_info = 'The server has %d / %d players.' % (basic_status['numplayers'], basic_status['maxplayers'])
+ status_info = 'Online now: %s' % (all_status['players'])
+ return "<pre>" + server_info + "\n" + status_info + "</pre>"
-
- except socket.error as e:
- if not options.quiet:
- return "Server is down or unreachable:\n" + e.message
-
- if not options.quiet:
- numOnline = 'The server has %d players filling %d total slots. There are %d free slots.' % (basicQuery['numplayers'], basicQuery['maxplayers'], basicQuery['maxplayers'] - basic_status['numplayers'])
- playersOnline = 'Online now: %s' % (fullQuery['players'])
- return numOnline + "\n" + playersOnline
-
- return "ermahgerd"
if __name__ == '__main__':
app.run()
+ | Revert "check for connection failure" | ## Code Before:
from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
def returnStatus():
try:
query = MinecraftQuery("mc.voltaire.sh", 25565, 10, 3)
basicQuery = query.get_status()
fullQuery = query.get_rules()
except socket.error as e:
if not options.quiet:
return "Server is down or unreachable:\n" + e.message
if not options.quiet:
numOnline = 'The server has %d players filling %d total slots. There are %d free slots.' % (basicQuery['numplayers'], basicQuery['maxplayers'], basicQuery['maxplayers'] - basic_status['numplayers'])
playersOnline = 'Online now: %s' % (fullQuery['players'])
return numOnline + "\n" + playersOnline
return "ermahgerd"
if __name__ == '__main__':
app.run()
## Instruction:
Revert "check for connection failure"
## Code After:
from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
def returnStatus():
query = MinecraftQuery("142.54.162.42", 25565)
basic_status = query.get_status()
all_status = query.get_rules()
server_info = 'The server has %d / %d players.' % (basic_status['numplayers'], basic_status['maxplayers'])
status_info = 'Online now: %s' % (all_status['players'])
return "<pre>" + server_info + "\n" + status_info + "</pre>"
if __name__ == '__main__':
app.run()
| from flask import Flask
from minecraft_query import MinecraftQuery
app = Flask(__name__)
@app.route('/mcstatus')
-
def returnStatus():
+ query = MinecraftQuery("142.54.162.42", 25565)
-
- try:
- query = MinecraftQuery("mc.voltaire.sh", 25565, 10, 3)
- basicQuery = query.get_status()
? ---- ^ ^^^
+ basic_status = query.get_status()
? ^^^^^ ^
- fullQuery = query.get_rules()
? ^^^^^^ ^ ^^^
+ all_status = query.get_rules()
? ^ ^^^^^ ^
+ server_info = 'The server has %d / %d players.' % (basic_status['numplayers'], basic_status['maxplayers'])
+ status_info = 'Online now: %s' % (all_status['players'])
+ return "<pre>" + server_info + "\n" + status_info + "</pre>"
-
- except socket.error as e:
- if not options.quiet:
- return "Server is down or unreachable:\n" + e.message
-
- if not options.quiet:
- numOnline = 'The server has %d players filling %d total slots. There are %d free slots.' % (basicQuery['numplayers'], basicQuery['maxplayers'], basicQuery['maxplayers'] - basic_status['numplayers'])
- playersOnline = 'Online now: %s' % (fullQuery['players'])
- return numOnline + "\n" + playersOnline
-
- return "ermahgerd"
if __name__ == '__main__':
app.run() |
beeae2daf35da275d5f9e1ad01516c917319bf00 | gapipy/resources/geo/state.py | gapipy/resources/geo/state.py | from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [('country', 'Country')]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
| from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [
('country', 'Country'),
('place', 'Place'),
]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
| Add Place reference to State model | Add Place reference to State model
| Python | mit | gadventures/gapipy | from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
- _resource_fields = [('country', 'Country')]
+ _resource_fields = [
+ ('country', 'Country'),
+ ('place', 'Place'),
+ ]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
| Add Place reference to State model | ## Code Before:
from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [('country', 'Country')]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
## Instruction:
Add Place reference to State model
## Code After:
from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
_resource_fields = [
('country', 'Country'),
('place', 'Place'),
]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name)
| from __future__ import unicode_literals
from ..base import Resource
from ...utils import enforce_string_type
class State(Resource):
_resource_name = 'states'
_as_is_fields = ['id', 'href', 'name']
- _resource_fields = [('country', 'Country')]
+ _resource_fields = [
+ ('country', 'Country'),
+ ('place', 'Place'),
+ ]
@enforce_string_type
def __repr__(self):
return '<{}: {}>'.format(self.__class__.__name__, self.name) |
d1008437dcf618700bce53913f3450aceda8a23f | djangoautoconf/auto_conf_admin_utils.py | djangoautoconf/auto_conf_admin_utils.py | from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | Remove xadmin as it will not work with guardian. | Remove xadmin as it will not work with guardian.
| Python | bsd-3-clause | weijia/djangoautoconf,weijia/djangoautoconf | from guardian.admin import GuardedModelAdmin
- #from django.contrib import admin
+ from django.contrib import admin
+ #The following not work with guardian?
- import xadmin as admin
+ #import xadmin as admin
- def register_to_sys(class_inst, admin_class = None):
+ def register_to_sys(class_inst, admin_class=None):
+
if admin_class is None:
- admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
+ admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
+
+ #admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
+
user_admin_site.register(class_inst, admin_class)
except:
pass
- #register(class_inst)
+ #register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | Remove xadmin as it will not work with guardian. | ## Code Before:
from guardian.admin import GuardedModelAdmin
#from django.contrib import admin
import xadmin as admin
def register_to_sys(class_inst, admin_class = None):
if admin_class is None:
admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i)
## Instruction:
Remove xadmin as it will not work with guardian.
## Code After:
from guardian.admin import GuardedModelAdmin
from django.contrib import admin
#The following not work with guardian?
#import xadmin as admin
def register_to_sys(class_inst, admin_class=None):
if admin_class is None:
admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
try:
admin.site.register(class_inst, admin_class)
except:
pass
#admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
user_admin_site.register(class_inst, admin_class)
except:
pass
#register(class_inst)
def register_all(class_list):
for i in class_list:
register_to_sys(i) | from guardian.admin import GuardedModelAdmin
- #from django.contrib import admin
? -
+ from django.contrib import admin
+ #The following not work with guardian?
- import xadmin as admin
+ #import xadmin as admin
? +
- def register_to_sys(class_inst, admin_class = None):
? - -
+ def register_to_sys(class_inst, admin_class=None):
+
if admin_class is None:
- admin_class = type(class_inst.__name__+"Admin", (GuardedModelAdmin, ), {})
+ admin_class = type(class_inst.__name__ + "Admin", (GuardedModelAdmin, ), {})
? + +
try:
admin.site.register(class_inst, admin_class)
except:
pass
+
+ #admin.site.register(class_inst)
try:
from normal_admin.admin import user_admin_site
+
user_admin_site.register(class_inst, admin_class)
except:
pass
- #register(class_inst)
+ #register(class_inst)
? ++++
def register_all(class_list):
for i in class_list:
register_to_sys(i) |
4b88dff3df0c82392314efe9c48379e1ad2b1500 | vinotes/apps/api/serializers.py | vinotes/apps/api/serializers.py | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
class Meta:
model = Trait
fields = ('id', 'name')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
color_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
nose_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
taste_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
finish_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = Trait
fields = ('id', 'name', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') | Add trait's wines to serializer. | Add trait's wines to serializer.
| Python | unlicense | rcutmore/vinotes-api,rcutmore/vinotes-api | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
+ color_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+ nose_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+ taste_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+ finish_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+
+
class Meta:
model = Trait
- fields = ('id', 'name')
+ fields = ('id', 'name', 'color_traits',
+ 'nose_traits', 'taste_traits', 'finish_traits')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') | Add trait's wines to serializer. | ## Code Before:
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
class Meta:
model = Trait
fields = ('id', 'name')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes')
## Instruction:
Add trait's wines to serializer.
## Code After:
from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
color_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
nose_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
taste_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
finish_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = Trait
fields = ('id', 'name', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') | from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Note, Trait, Wine, Winery
class WinerySerializer(serializers.ModelSerializer):
wines = serializers.PrimaryKeyRelatedField(many=True, queryset=Wine.objects.all())
class Meta:
model = Winery
fields = ('id', 'name', 'wines')
class WineSerializer(serializers.ModelSerializer):
class Meta:
model = Wine
fields = ('id', 'winery', 'name', 'vintage')
class TraitSerializer(serializers.ModelSerializer):
+ color_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+ nose_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+ taste_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+ finish_traits = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
+
+
class Meta:
model = Trait
- fields = ('id', 'name')
? ^
+ fields = ('id', 'name', 'color_traits',
? ^^^^^^^^^^^^^^^^^^
+ 'nose_traits', 'taste_traits', 'finish_traits')
class NoteSerializer(serializers.ModelSerializer):
taster = serializers.ReadOnlyField(source='taster.username')
class Meta:
model = Note
fields = ('id', 'taster', 'tasted', 'wine', 'color_traits',
'nose_traits', 'taste_traits', 'finish_traits', 'rating')
class UserSerializer(serializers.ModelSerializer):
notes = serializers.PrimaryKeyRelatedField(many=True, queryset=Note.objects.all())
class Meta:
model = User
fields = ('id', 'username', 'email', 'notes') |
317eaa7dd37638f233d8968fb55ed596bc2b8502 | pycroscopy/io/translators/__init__.py | pycroscopy/io/translators/__init__.py | from . import be_odf
from . import be_odf_relaxation
from . import beps_ndf
from . import general_dynamic_mode
from . import gmode_iv
from . import gmode_line
from . import image
from . import ndata_translator
from . import numpy_translator
from . import oneview
from . import ptychography
from . import sporc
from . import time_series
from . import translator
from . import utils
from . import df_utils
from .be_odf import BEodfTranslator
from .be_odf_relaxation import BEodfRelaxationTranslator
from .beps_ndf import BEPSndfTranslator
from .general_dynamic_mode import GDMTranslator
from .gmode_iv import GIVTranslator
from .gmode_line import GLineTranslator
from .igor_ibw import IgorIBWTranslator
from .image import ImageTranslator
from .ndata_translator import NDataTranslator
from .numpy_translator import NumpyTranslator
from .oneview import OneViewTranslator
from .ptychography import PtychographyTranslator
from .sporc import SporcTranslator
from .time_series import MovieTranslator
from .translator import Translator
__all__ = ['Translator', 'BEodfTranslator', 'BEPSndfTranslator', 'BEodfRelaxationTranslator',
'GIVTranslator', 'GLineTranslator', 'GDMTranslator', 'PtychographyTranslator',
'SporcTranslator', 'MovieTranslator', 'IgorIBWTranslator', 'NumpyTranslator',
'OneViewTranslator', 'ImageTranslator', 'NDataTranslator']
| from . import be_odf
from . import be_odf_relaxation
from . import beps_ndf
from . import general_dynamic_mode
from . import gmode_iv
from . import gmode_line
from . import image
from . import ndata_translator
from . import numpy_translator
from . import igor_ibw
from . import oneview
from . import ptychography
from . import sporc
from . import time_series
from . import translator
from . import utils
from . import df_utils
from .be_odf import BEodfTranslator
from .be_odf_relaxation import BEodfRelaxationTranslator
from .beps_ndf import BEPSndfTranslator
from .general_dynamic_mode import GDMTranslator
from .gmode_iv import GIVTranslator
from .gmode_line import GLineTranslator
from .igor_ibw import IgorIBWTranslator
from .image import ImageTranslator
from .ndata_translator import NDataTranslator
from .numpy_translator import NumpyTranslator
from .oneview import OneViewTranslator
from .ptychography import PtychographyTranslator
from .sporc import SporcTranslator
from .time_series import MovieTranslator
from .translator import Translator
__all__ = ['Translator', 'BEodfTranslator', 'BEPSndfTranslator', 'BEodfRelaxationTranslator',
'GIVTranslator', 'GLineTranslator', 'GDMTranslator', 'PtychographyTranslator',
'SporcTranslator', 'MovieTranslator', 'IgorIBWTranslator', 'NumpyTranslator',
'OneViewTranslator', 'ImageTranslator', 'NDataTranslator']
| Add missing import statement for igor translator | Add missing import statement for igor translator
| Python | mit | anugrah-saxena/pycroscopy,pycroscopy/pycroscopy | from . import be_odf
from . import be_odf_relaxation
from . import beps_ndf
from . import general_dynamic_mode
from . import gmode_iv
from . import gmode_line
from . import image
from . import ndata_translator
from . import numpy_translator
+ from . import igor_ibw
from . import oneview
from . import ptychography
from . import sporc
from . import time_series
from . import translator
from . import utils
from . import df_utils
from .be_odf import BEodfTranslator
from .be_odf_relaxation import BEodfRelaxationTranslator
from .beps_ndf import BEPSndfTranslator
from .general_dynamic_mode import GDMTranslator
from .gmode_iv import GIVTranslator
from .gmode_line import GLineTranslator
from .igor_ibw import IgorIBWTranslator
from .image import ImageTranslator
from .ndata_translator import NDataTranslator
from .numpy_translator import NumpyTranslator
from .oneview import OneViewTranslator
from .ptychography import PtychographyTranslator
from .sporc import SporcTranslator
from .time_series import MovieTranslator
from .translator import Translator
__all__ = ['Translator', 'BEodfTranslator', 'BEPSndfTranslator', 'BEodfRelaxationTranslator',
'GIVTranslator', 'GLineTranslator', 'GDMTranslator', 'PtychographyTranslator',
'SporcTranslator', 'MovieTranslator', 'IgorIBWTranslator', 'NumpyTranslator',
'OneViewTranslator', 'ImageTranslator', 'NDataTranslator']
| Add missing import statement for igor translator | ## Code Before:
from . import be_odf
from . import be_odf_relaxation
from . import beps_ndf
from . import general_dynamic_mode
from . import gmode_iv
from . import gmode_line
from . import image
from . import ndata_translator
from . import numpy_translator
from . import oneview
from . import ptychography
from . import sporc
from . import time_series
from . import translator
from . import utils
from . import df_utils
from .be_odf import BEodfTranslator
from .be_odf_relaxation import BEodfRelaxationTranslator
from .beps_ndf import BEPSndfTranslator
from .general_dynamic_mode import GDMTranslator
from .gmode_iv import GIVTranslator
from .gmode_line import GLineTranslator
from .igor_ibw import IgorIBWTranslator
from .image import ImageTranslator
from .ndata_translator import NDataTranslator
from .numpy_translator import NumpyTranslator
from .oneview import OneViewTranslator
from .ptychography import PtychographyTranslator
from .sporc import SporcTranslator
from .time_series import MovieTranslator
from .translator import Translator
__all__ = ['Translator', 'BEodfTranslator', 'BEPSndfTranslator', 'BEodfRelaxationTranslator',
'GIVTranslator', 'GLineTranslator', 'GDMTranslator', 'PtychographyTranslator',
'SporcTranslator', 'MovieTranslator', 'IgorIBWTranslator', 'NumpyTranslator',
'OneViewTranslator', 'ImageTranslator', 'NDataTranslator']
## Instruction:
Add missing import statement for igor translator
## Code After:
from . import be_odf
from . import be_odf_relaxation
from . import beps_ndf
from . import general_dynamic_mode
from . import gmode_iv
from . import gmode_line
from . import image
from . import ndata_translator
from . import numpy_translator
from . import igor_ibw
from . import oneview
from . import ptychography
from . import sporc
from . import time_series
from . import translator
from . import utils
from . import df_utils
from .be_odf import BEodfTranslator
from .be_odf_relaxation import BEodfRelaxationTranslator
from .beps_ndf import BEPSndfTranslator
from .general_dynamic_mode import GDMTranslator
from .gmode_iv import GIVTranslator
from .gmode_line import GLineTranslator
from .igor_ibw import IgorIBWTranslator
from .image import ImageTranslator
from .ndata_translator import NDataTranslator
from .numpy_translator import NumpyTranslator
from .oneview import OneViewTranslator
from .ptychography import PtychographyTranslator
from .sporc import SporcTranslator
from .time_series import MovieTranslator
from .translator import Translator
__all__ = ['Translator', 'BEodfTranslator', 'BEPSndfTranslator', 'BEodfRelaxationTranslator',
'GIVTranslator', 'GLineTranslator', 'GDMTranslator', 'PtychographyTranslator',
'SporcTranslator', 'MovieTranslator', 'IgorIBWTranslator', 'NumpyTranslator',
'OneViewTranslator', 'ImageTranslator', 'NDataTranslator']
| from . import be_odf
from . import be_odf_relaxation
from . import beps_ndf
from . import general_dynamic_mode
from . import gmode_iv
from . import gmode_line
from . import image
from . import ndata_translator
from . import numpy_translator
+ from . import igor_ibw
from . import oneview
from . import ptychography
from . import sporc
from . import time_series
from . import translator
from . import utils
from . import df_utils
from .be_odf import BEodfTranslator
from .be_odf_relaxation import BEodfRelaxationTranslator
from .beps_ndf import BEPSndfTranslator
from .general_dynamic_mode import GDMTranslator
from .gmode_iv import GIVTranslator
from .gmode_line import GLineTranslator
from .igor_ibw import IgorIBWTranslator
from .image import ImageTranslator
from .ndata_translator import NDataTranslator
from .numpy_translator import NumpyTranslator
from .oneview import OneViewTranslator
from .ptychography import PtychographyTranslator
from .sporc import SporcTranslator
from .time_series import MovieTranslator
from .translator import Translator
__all__ = ['Translator', 'BEodfTranslator', 'BEPSndfTranslator', 'BEodfRelaxationTranslator',
'GIVTranslator', 'GLineTranslator', 'GDMTranslator', 'PtychographyTranslator',
'SporcTranslator', 'MovieTranslator', 'IgorIBWTranslator', 'NumpyTranslator',
'OneViewTranslator', 'ImageTranslator', 'NDataTranslator'] |
6cf8bad4faa15bcbc149db678e2ec232ce82b72a | utils/efushell/SocketDriver.py | utils/efushell/SocketDriver.py | import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
| import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
| Move string formatting onto two lines for readability | Move string formatting onto two lines for readability
| Python | bsd-2-clause | ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit,ess-dmsc/event-formation-unit | import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
+ cmd += '\n'
- self.sock.send('{}\n'.format(cmd).encode('utf-8'))
+ self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
+ cmd += '\n'
- self.sock.send('{}\n'.format(cmd).encode('utf-8'))
+ self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
| Move string formatting onto two lines for readability | ## Code Before:
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
self.sock.send('{}\n'.format(cmd).encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
## Instruction:
Move string formatting onto two lines for readability
## Code After:
import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
cmd += '\n'
self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply
| import string
import socket
import sys
import time
import threading
class SimpleSocket:
def __init__(self, hostname="localhost", port=8888, timeout=2):
self.access_semaphor = threading.Semaphore(1)
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
except socket.error:
sys.stderr.write(
"socket() [Socket connection error] Cannot connect to %s, error: %s\n" % (hostname, socket.error))
sys.exit(1)
self.sock.settimeout(timeout)
try:
self.sock.connect((hostname, port))
except socket.error:
sys.stderr.write("connect() [Socket connection error] Cannot connect to %s:%d, error: %s\n" % (
hostname, port, socket.error))
sys.exit(2)
def SendCommand(self, cmd):
self.access_semaphor.acquire()
+ cmd += '\n'
- self.sock.send('{}\n'.format(cmd).encode('utf-8'))
? -------------- -
+ self.sock.send(cmd.encode('utf-8'))
self.access_semaphor.release()
def Ask(self, cmd):
self.access_semaphor.acquire()
+ cmd += '\n'
- self.sock.send('{}\n'.format(cmd).encode('utf-8'))
? -------------- -
+ self.sock.send(cmd.encode('utf-8'))
reply = self.sock.recv(2048).strip(b'\n')
self.access_semaphor.release()
return reply |
0b797d14a609172d4965320aa30eae9e9c1f892e | tests/test_strutils.py | tests/test_strutils.py |
from boltons import strutils
def test_asciify():
ref = u'Beyoncé'
b = strutils.asciify(ref)
assert len(b) == len(b)
assert b[-1:].decode('ascii') == 'e'
def test_indent():
to_indent = '\nabc\ndef\n\nxyz\n'
ref = '\n abc\n def\n\n xyz\n'
assert strutils.indent(to_indent, ' ') == ref
|
import uuid
from boltons import strutils
def test_asciify():
ref = u'Beyoncé'
b = strutils.asciify(ref)
assert len(b) == len(b)
assert b[-1:].decode('ascii') == 'e'
def test_indent():
to_indent = '\nabc\ndef\n\nxyz\n'
ref = '\n abc\n def\n\n xyz\n'
assert strutils.indent(to_indent, ' ') == ref
def test_is_uuid():
assert strutils.is_uuid(uuid.uuid4()) == True
assert strutils.is_uuid(uuid.uuid4(), version=1) == False
assert strutils.is_uuid(str(uuid.uuid4())) == True
assert strutils.is_uuid(str(uuid.uuid4()), version=1) == False
assert strutils.is_uuid(set('garbage')) == False
| Add is_uuid unit-tests, including garbage types. | Add is_uuid unit-tests, including garbage types.
| Python | bsd-3-clause | zeroSteiner/boltons,doublereedkurt/boltons,markrwilliams/boltons | +
+ import uuid
from boltons import strutils
def test_asciify():
ref = u'Beyoncé'
b = strutils.asciify(ref)
assert len(b) == len(b)
assert b[-1:].decode('ascii') == 'e'
def test_indent():
to_indent = '\nabc\ndef\n\nxyz\n'
ref = '\n abc\n def\n\n xyz\n'
assert strutils.indent(to_indent, ' ') == ref
+
+ def test_is_uuid():
+ assert strutils.is_uuid(uuid.uuid4()) == True
+ assert strutils.is_uuid(uuid.uuid4(), version=1) == False
+ assert strutils.is_uuid(str(uuid.uuid4())) == True
+ assert strutils.is_uuid(str(uuid.uuid4()), version=1) == False
+ assert strutils.is_uuid(set('garbage')) == False
+ | Add is_uuid unit-tests, including garbage types. | ## Code Before:
from boltons import strutils
def test_asciify():
ref = u'Beyoncé'
b = strutils.asciify(ref)
assert len(b) == len(b)
assert b[-1:].decode('ascii') == 'e'
def test_indent():
to_indent = '\nabc\ndef\n\nxyz\n'
ref = '\n abc\n def\n\n xyz\n'
assert strutils.indent(to_indent, ' ') == ref
## Instruction:
Add is_uuid unit-tests, including garbage types.
## Code After:
import uuid
from boltons import strutils
def test_asciify():
ref = u'Beyoncé'
b = strutils.asciify(ref)
assert len(b) == len(b)
assert b[-1:].decode('ascii') == 'e'
def test_indent():
to_indent = '\nabc\ndef\n\nxyz\n'
ref = '\n abc\n def\n\n xyz\n'
assert strutils.indent(to_indent, ' ') == ref
def test_is_uuid():
assert strutils.is_uuid(uuid.uuid4()) == True
assert strutils.is_uuid(uuid.uuid4(), version=1) == False
assert strutils.is_uuid(str(uuid.uuid4())) == True
assert strutils.is_uuid(str(uuid.uuid4()), version=1) == False
assert strutils.is_uuid(set('garbage')) == False
| +
+ import uuid
from boltons import strutils
def test_asciify():
ref = u'Beyoncé'
b = strutils.asciify(ref)
assert len(b) == len(b)
assert b[-1:].decode('ascii') == 'e'
def test_indent():
to_indent = '\nabc\ndef\n\nxyz\n'
ref = '\n abc\n def\n\n xyz\n'
assert strutils.indent(to_indent, ' ') == ref
+
+
+ def test_is_uuid():
+ assert strutils.is_uuid(uuid.uuid4()) == True
+ assert strutils.is_uuid(uuid.uuid4(), version=1) == False
+ assert strutils.is_uuid(str(uuid.uuid4())) == True
+ assert strutils.is_uuid(str(uuid.uuid4()), version=1) == False
+ assert strutils.is_uuid(set('garbage')) == False |
c1b96a3ee94c25cfbe3d66eec76052badacfb38e | udata/tests/organization/test_notifications.py | udata/tests/organization/test_notifications.py | from __future__ import unicode_literals, absolute_import
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
from .. import TestCase, DBTestMixin
class OrganizationNotificationsTest(TestCase, DBTestMixin):
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
self.assertEqual(len(membership_request_notifications(applicant)), 0)
self.assertEqual(len(membership_request_notifications(editor)), 0)
notifications = membership_request_notifications(admin)
self.assertEqual(len(notifications), 1)
dt, details = notifications[0]
self.assertEqualDates(dt, request.created)
self.assertEqual(details['id'], request.id)
self.assertEqual(details['organization'], org.id)
self.assertEqual(details['user']['id'], applicant.id)
self.assertEqual(details['user']['fullname'], applicant.fullname)
self.assertEqual(details['user']['avatar'], str(applicant.avatar))
| from __future__ import unicode_literals, absolute_import
import pytest
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
from udata.tests.helpers import assert_equal_dates
@pytest.mark.usefixtures('clean_db')
class OrganizationNotificationsTest:
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
assert len(membership_request_notifications(applicant)) is 0
assert len(membership_request_notifications(editor)) is 0
notifications = membership_request_notifications(admin)
assert len(notifications) is 1
dt, details = notifications[0]
assert_equal_dates(dt, request.created)
assert details['id'] == request.id
assert details['organization'] == org.id
assert details['user']['id'] == applicant.id
assert details['user']['fullname'] == applicant.fullname
assert details['user']['avatar'] == str(applicant.avatar)
| Migrate org notif tests to pytest | Migrate org notif tests to pytest
| Python | agpl-3.0 | opendatateam/udata,etalab/udata,etalab/udata,opendatateam/udata,opendatateam/udata,etalab/udata | from __future__ import unicode_literals, absolute_import
+
+ import pytest
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
- from .. import TestCase, DBTestMixin
+ from udata.tests.helpers import assert_equal_dates
+ @pytest.mark.usefixtures('clean_db')
- class OrganizationNotificationsTest(TestCase, DBTestMixin):
+ class OrganizationNotificationsTest:
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
- self.assertEqual(len(membership_request_notifications(applicant)), 0)
+ assert len(membership_request_notifications(applicant)) is 0
- self.assertEqual(len(membership_request_notifications(editor)), 0)
+ assert len(membership_request_notifications(editor)) is 0
notifications = membership_request_notifications(admin)
- self.assertEqual(len(notifications), 1)
+ assert len(notifications) is 1
dt, details = notifications[0]
- self.assertEqualDates(dt, request.created)
+ assert_equal_dates(dt, request.created)
- self.assertEqual(details['id'], request.id)
+ assert details['id'] == request.id
- self.assertEqual(details['organization'], org.id)
+ assert details['organization'] == org.id
- self.assertEqual(details['user']['id'], applicant.id)
+ assert details['user']['id'] == applicant.id
- self.assertEqual(details['user']['fullname'], applicant.fullname)
+ assert details['user']['fullname'] == applicant.fullname
- self.assertEqual(details['user']['avatar'], str(applicant.avatar))
+ assert details['user']['avatar'] == str(applicant.avatar)
| Migrate org notif tests to pytest | ## Code Before:
from __future__ import unicode_literals, absolute_import
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
from .. import TestCase, DBTestMixin
class OrganizationNotificationsTest(TestCase, DBTestMixin):
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
self.assertEqual(len(membership_request_notifications(applicant)), 0)
self.assertEqual(len(membership_request_notifications(editor)), 0)
notifications = membership_request_notifications(admin)
self.assertEqual(len(notifications), 1)
dt, details = notifications[0]
self.assertEqualDates(dt, request.created)
self.assertEqual(details['id'], request.id)
self.assertEqual(details['organization'], org.id)
self.assertEqual(details['user']['id'], applicant.id)
self.assertEqual(details['user']['fullname'], applicant.fullname)
self.assertEqual(details['user']['avatar'], str(applicant.avatar))
## Instruction:
Migrate org notif tests to pytest
## Code After:
from __future__ import unicode_literals, absolute_import
import pytest
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
from udata.tests.helpers import assert_equal_dates
@pytest.mark.usefixtures('clean_db')
class OrganizationNotificationsTest:
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
assert len(membership_request_notifications(applicant)) is 0
assert len(membership_request_notifications(editor)) is 0
notifications = membership_request_notifications(admin)
assert len(notifications) is 1
dt, details = notifications[0]
assert_equal_dates(dt, request.created)
assert details['id'] == request.id
assert details['organization'] == org.id
assert details['user']['id'] == applicant.id
assert details['user']['fullname'] == applicant.fullname
assert details['user']['avatar'] == str(applicant.avatar)
| from __future__ import unicode_literals, absolute_import
+
+ import pytest
from udata.models import MembershipRequest, Member
from udata.core.user.factories import UserFactory
from udata.core.organization.factories import OrganizationFactory
from udata.core.organization.notifications import (
membership_request_notifications
)
- from .. import TestCase, DBTestMixin
+ from udata.tests.helpers import assert_equal_dates
+ @pytest.mark.usefixtures('clean_db')
- class OrganizationNotificationsTest(TestCase, DBTestMixin):
? -----------------------
+ class OrganizationNotificationsTest:
def test_pending_membership_requests(self):
admin = UserFactory()
editor = UserFactory()
applicant = UserFactory()
request = MembershipRequest(user=applicant, comment='test')
members = [
Member(user=editor, role='editor'),
Member(user=admin, role='admin')
]
org = OrganizationFactory(members=members, requests=[request])
- self.assertEqual(len(membership_request_notifications(applicant)), 0)
? ----- ^^^^^^ ^ -
+ assert len(membership_request_notifications(applicant)) is 0
? ^ ^^^
- self.assertEqual(len(membership_request_notifications(editor)), 0)
? ----- ^^^^^^ ^ -
+ assert len(membership_request_notifications(editor)) is 0
? ^ ^^^
notifications = membership_request_notifications(admin)
- self.assertEqual(len(notifications), 1)
? ----- ^^^^^^ ^ -
+ assert len(notifications) is 1
? ^ ^^^
dt, details = notifications[0]
- self.assertEqualDates(dt, request.created)
? ----- ^ ^
+ assert_equal_dates(dt, request.created)
? ^^ ^^
- self.assertEqual(details['id'], request.id)
? ----- ^^^^^^ ^ -
+ assert details['id'] == request.id
? ^ ^^^
- self.assertEqual(details['organization'], org.id)
? ----- ^^^^^^ ^ -
+ assert details['organization'] == org.id
? ^ ^^^
- self.assertEqual(details['user']['id'], applicant.id)
? ----- ^^^^^^ ^ -
+ assert details['user']['id'] == applicant.id
? ^ ^^^
- self.assertEqual(details['user']['fullname'], applicant.fullname)
? ----- ^^^^^^ ^ -
+ assert details['user']['fullname'] == applicant.fullname
? ^ ^^^
- self.assertEqual(details['user']['avatar'], str(applicant.avatar))
? ----- ^^^^^^ ^ -
+ assert details['user']['avatar'] == str(applicant.avatar)
? ^ ^^^
|
e6d4ca44f3f71468c40842c53e3963b425ac2527 | mss/factory.py | mss/factory.py |
import platform
from typing import TYPE_CHECKING
from .exception import ScreenShotError
if TYPE_CHECKING:
from typing import Any # noqa
from .base import MSSMixin # noqa
def mss(**kwargs):
# type: (Any) -> MSSMixin
""" Factory returning a proper MSS class instance.
It detects the plateform we are running on
and choose the most adapted mss_class to take
screenshots.
It then proxies its arguments to the class for
instantiation.
"""
os_ = platform.system().lower()
if os_ == "darwin":
from . import darwin
return darwin.MSS(**kwargs)
if os_ == "linux":
from . import linux
return linux.MSS(**kwargs)
if os_ == "windows":
from . import windows
return windows.MSS(**kwargs)
raise ScreenShotError("System {!r} not (yet?) implemented.".format(os_))
|
import platform
from typing import TYPE_CHECKING
from .exception import ScreenShotError
if TYPE_CHECKING:
from typing import Any # noqa
from .base import MSSMixin # noqa
def mss(**kwargs):
# type: (Any) -> MSSMixin
""" Factory returning a proper MSS class instance.
It detects the plateform we are running on
and choose the most adapted mss_class to take
screenshots.
It then proxies its arguments to the class for
instantiation.
"""
# pylint: disable=import-outside-toplevel
os_ = platform.system().lower()
if os_ == "darwin":
from . import darwin
return darwin.MSS(**kwargs)
if os_ == "linux":
from . import linux
return linux.MSS(**kwargs)
if os_ == "windows":
from . import windows
return windows.MSS(**kwargs)
raise ScreenShotError("System {!r} not (yet?) implemented.".format(os_))
| Fix pylint: Import outside toplevel (%s) (import-outside-toplevel) | MSS: Fix pylint: Import outside toplevel (%s) (import-outside-toplevel)
| Python | mit | BoboTiG/python-mss |
import platform
from typing import TYPE_CHECKING
from .exception import ScreenShotError
if TYPE_CHECKING:
from typing import Any # noqa
from .base import MSSMixin # noqa
def mss(**kwargs):
# type: (Any) -> MSSMixin
""" Factory returning a proper MSS class instance.
It detects the plateform we are running on
and choose the most adapted mss_class to take
screenshots.
It then proxies its arguments to the class for
instantiation.
"""
+ # pylint: disable=import-outside-toplevel
os_ = platform.system().lower()
if os_ == "darwin":
from . import darwin
return darwin.MSS(**kwargs)
if os_ == "linux":
from . import linux
return linux.MSS(**kwargs)
if os_ == "windows":
from . import windows
return windows.MSS(**kwargs)
raise ScreenShotError("System {!r} not (yet?) implemented.".format(os_))
| Fix pylint: Import outside toplevel (%s) (import-outside-toplevel) | ## Code Before:
import platform
from typing import TYPE_CHECKING
from .exception import ScreenShotError
if TYPE_CHECKING:
from typing import Any # noqa
from .base import MSSMixin # noqa
def mss(**kwargs):
# type: (Any) -> MSSMixin
""" Factory returning a proper MSS class instance.
It detects the plateform we are running on
and choose the most adapted mss_class to take
screenshots.
It then proxies its arguments to the class for
instantiation.
"""
os_ = platform.system().lower()
if os_ == "darwin":
from . import darwin
return darwin.MSS(**kwargs)
if os_ == "linux":
from . import linux
return linux.MSS(**kwargs)
if os_ == "windows":
from . import windows
return windows.MSS(**kwargs)
raise ScreenShotError("System {!r} not (yet?) implemented.".format(os_))
## Instruction:
Fix pylint: Import outside toplevel (%s) (import-outside-toplevel)
## Code After:
import platform
from typing import TYPE_CHECKING
from .exception import ScreenShotError
if TYPE_CHECKING:
from typing import Any # noqa
from .base import MSSMixin # noqa
def mss(**kwargs):
# type: (Any) -> MSSMixin
""" Factory returning a proper MSS class instance.
It detects the plateform we are running on
and choose the most adapted mss_class to take
screenshots.
It then proxies its arguments to the class for
instantiation.
"""
# pylint: disable=import-outside-toplevel
os_ = platform.system().lower()
if os_ == "darwin":
from . import darwin
return darwin.MSS(**kwargs)
if os_ == "linux":
from . import linux
return linux.MSS(**kwargs)
if os_ == "windows":
from . import windows
return windows.MSS(**kwargs)
raise ScreenShotError("System {!r} not (yet?) implemented.".format(os_))
|
import platform
from typing import TYPE_CHECKING
from .exception import ScreenShotError
if TYPE_CHECKING:
from typing import Any # noqa
from .base import MSSMixin # noqa
def mss(**kwargs):
# type: (Any) -> MSSMixin
""" Factory returning a proper MSS class instance.
It detects the plateform we are running on
and choose the most adapted mss_class to take
screenshots.
It then proxies its arguments to the class for
instantiation.
"""
+ # pylint: disable=import-outside-toplevel
os_ = platform.system().lower()
if os_ == "darwin":
from . import darwin
return darwin.MSS(**kwargs)
if os_ == "linux":
from . import linux
return linux.MSS(**kwargs)
if os_ == "windows":
from . import windows
return windows.MSS(**kwargs)
raise ScreenShotError("System {!r} not (yet?) implemented.".format(os_)) |
ebc5831cf8cd3a87c6d663c28afb94a952f4e42f | mint/scripts/db2db/migrate.py | mint/scripts/db2db/migrate.py |
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
|
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| Fix another use of the rbuilder postgres user | Fix another use of the rbuilder postgres user
| Python | apache-2.0 | sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint,sassoftware/mint |
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
- destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
+ destTuple = ('postgresql', 'postgres@localhost:5439/mint')
- finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
+ finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
| Fix another use of the rbuilder postgres user | ## Code Before:
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
## Instruction:
Fix another use of the rbuilder postgres user
## Code After:
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
destTuple = ('postgresql', 'postgres@localhost:5439/mint')
finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig()
|
import logging
from conary import dbstore
from mint.scripts.db2db import db2db
log = logging.getLogger(__name__)
def switchToPostgres(cfg):
if cfg.dbDriver in ('postgresql', 'pgpool'):
return
sourceTuple = (cfg.dbDriver, cfg.dbPath)
- destTuple = ('postgresql', 'rbuilder@localhost:5439/mint')
? ----- ^
+ destTuple = ('postgresql', 'postgres@localhost:5439/mint')
? +++++ ^
- finalTuple = ('pgpool', 'rbuilder@localhost.localdomain:6432/mint')
? ----- ^
+ finalTuple = ('pgpool', 'postgres@localhost.localdomain:6432/mint')
? +++++ ^
log.info("Migrating mint database from %s::%s to %s::%s",
*(sourceTuple + destTuple))
db2db.move_database(sourceTuple, destTuple)
# Update rbuilder-generated.conf
log.info("Changing configured mint database to %s::%s", *finalTuple)
cfg.dbDriver = finalTuple[0]
cfg.dbPath = finalTuple[1]
cfg.writeGeneratedConfig() |
2ace9ce514d7299a8f3e8dca134a6e4eb3284937 | parser2.py | parser2.py | from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
| from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
| Move parsing loop into the class itself. | Move parsing loop into the class itself.
| Python | mit | zimolzak/Ignition-poker-parser | from pprint import pprint
- input = open('example_ignition.txt').read()
- hands = input.split('\n\n\n')
class Hand:
- def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
+ def __init__(self, string):
+ segments = "seats preflop flop turn river".split()
- self.seats = se
+ self.seats = None
- self.preflop = p
+ self.preflop = None
- self.flop = f
+ self.flop = None
- self.turn = t
+ self.turn = None
- self.river = r
+ self.river = None
- self.summary = su
+ self.summary = None
+ ## step 2: split each hand into segments
+ s = string.split('\n*** ')
+ while len(s) > 1:
+ # We don't always have flop, turn, riv, but last element is
+ # always Summary.
+ k = segments.pop(0)
+ v = s.pop(0).splitlines()
+ self.__dict__[k] = v
+ ## step 3: split each segment into lines
+ self.summary = s.pop(0).splitlines()
+ assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
+
+ ## main
+
+ input = open('example_ignition.txt').read()
+
+ ## step 1: split flat file into hands
+ hands = input.split('\n\n\n')
+
for i, h in enumerate(hands):
- segments = "seats preflop flop turn river".split()
- s = h.split('\n*** ')
- hands[i] = Hand()
+ hands[i] = Hand(h)
- while len(s) > 1:
- # We don't always have flop, turn, riv, but last element is
- # always Summary.
- k = segments.pop(0)
- v = s.pop(0).splitlines()
- hands[i].__dict__[k] = v
- hands[i].summary = s.pop(0).splitlines()
- assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
| Move parsing loop into the class itself. | ## Code Before:
from pprint import pprint
input = open('example_ignition.txt').read()
hands = input.split('\n\n\n')
class Hand:
def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
self.seats = se
self.preflop = p
self.flop = f
self.turn = t
self.river = r
self.summary = su
def __repr__(self):
return str(self.__dict__)
for i, h in enumerate(hands):
segments = "seats preflop flop turn river".split()
s = h.split('\n*** ')
hands[i] = Hand()
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
hands[i].__dict__[k] = v
hands[i].summary = s.pop(0).splitlines()
assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
## Instruction:
Move parsing loop into the class itself.
## Code After:
from pprint import pprint
class Hand:
def __init__(self, string):
segments = "seats preflop flop turn river".split()
self.seats = None
self.preflop = None
self.flop = None
self.turn = None
self.river = None
self.summary = None
## step 2: split each hand into segments
s = string.split('\n*** ')
while len(s) > 1:
# We don't always have flop, turn, riv, but last element is
# always Summary.
k = segments.pop(0)
v = s.pop(0).splitlines()
self.__dict__[k] = v
## step 3: split each segment into lines
self.summary = s.pop(0).splitlines()
assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
## main
input = open('example_ignition.txt').read()
## step 1: split flat file into hands
hands = input.split('\n\n\n')
for i, h in enumerate(hands):
hands[i] = Hand(h)
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0])
| from pprint import pprint
- input = open('example_ignition.txt').read()
- hands = input.split('\n\n\n')
class Hand:
- def __init__(self, se=None, p=None, f=None, t=None, r=None, su=None):
+ def __init__(self, string):
+ segments = "seats preflop flop turn river".split()
- self.seats = se
? ^
+ self.seats = None
? ^^^
- self.preflop = p
? ^
+ self.preflop = None
? ^^^^
- self.flop = f
? ^
+ self.flop = None
? ^^^^
- self.turn = t
? ^
+ self.turn = None
? ^^^^
- self.river = r
? ^
+ self.river = None
? ^^^^
- self.summary = su
? ^^
+ self.summary = None
? ^^^^
+ ## step 2: split each hand into segments
+ s = string.split('\n*** ')
+ while len(s) > 1:
+ # We don't always have flop, turn, riv, but last element is
+ # always Summary.
+ k = segments.pop(0)
+ v = s.pop(0).splitlines()
+ self.__dict__[k] = v
+ ## step 3: split each segment into lines
+ self.summary = s.pop(0).splitlines()
+ assert len(s) == 0
def __repr__(self):
return str(self.__dict__)
+
+ ## main
+
+ input = open('example_ignition.txt').read()
+
+ ## step 1: split flat file into hands
+ hands = input.split('\n\n\n')
+
for i, h in enumerate(hands):
- segments = "seats preflop flop turn river".split()
- s = h.split('\n*** ')
- hands[i] = Hand()
+ hands[i] = Hand(h)
? +
- while len(s) > 1:
- # We don't always have flop, turn, riv, but last element is
- # always Summary.
- k = segments.pop(0)
- v = s.pop(0).splitlines()
- hands[i].__dict__[k] = v
- hands[i].summary = s.pop(0).splitlines()
- assert len(s) == 0
## [ { s:[] p:[] f:[] s:[] } { s:[] p:[] f:[] t:[] r:[] s:[] } {} {} ]
print(hands[0]) |
b0029cffae96e25611d7387e699774de4d9682d3 | corehq/apps/es/tests/utils.py | corehq/apps/es/tests/utils.py | import json
from nose.plugins.attrib import attr
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
| import json
from nose.plugins.attrib import attr
from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
@nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
| Mark es_test decorator as nottest | Mark es_test decorator as nottest
Second try... | Python | bsd-3-clause | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | import json
from nose.plugins.attrib import attr
+ from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
+ @nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
| Mark es_test decorator as nottest | ## Code Before:
import json
from nose.plugins.attrib import attr
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
## Instruction:
Mark es_test decorator as nottest
## Code After:
import json
from nose.plugins.attrib import attr
from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
@nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test)
| import json
from nose.plugins.attrib import attr
+ from nose.tools import nottest
class ElasticTestMixin(object):
def checkQuery(self, query, json_output, is_raw_query=False):
if is_raw_query:
raw_query = query
else:
raw_query = query.raw_query
msg = "Expected Query:\n{}\nGenerated Query:\n{}".format(
json.dumps(json_output, indent=4),
json.dumps(raw_query, indent=4),
)
# NOTE: This method thinks [a, b, c] != [b, c, a]
self.assertEqual(raw_query, json_output, msg=msg)
+ @nottest
def es_test(test):
"""Decorator for tagging ElasticSearch tests
:param test: A test class, method, or function.
"""
return attr(es_test=True)(test) |
87856b925d436df302eed4a65eac139ee394b427 | setup.py | setup.py |
from distutils.core import setup
from afnumpy import __version__
setup (name = 'afnumpy',
version = __version__,
author = "Filipe Maia",
author_email = "filipe.c.maia@gmail.com",
url = 'https://github.com/FilipeMaia/afnumpy',
download_url = 'https://github.com/afnumpy/tarball/'+__version__,
keywords = ['arrayfire', 'numpy', 'GPU'],
description = """A GPU-ready drop-in replacement for numpy""",
packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"],
install_requires=['arrayfire', 'numpy'],
)
|
from distutils.core import setup
from afnumpy import __version__
setup (name = 'afnumpy',
version = __version__,
author = "Filipe Maia",
author_email = "filipe.c.maia@gmail.com",
url = 'https://github.com/FilipeMaia/afnumpy',
download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__,
keywords = ['arrayfire', 'numpy', 'GPU'],
description = """A GPU-ready drop-in replacement for numpy""",
packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"],
install_requires=['arrayfire', 'numpy'],
)
| Correct the pip download URL | Correct the pip download URL
| Python | bsd-2-clause | FilipeMaia/afnumpy,daurer/afnumpy |
from distutils.core import setup
from afnumpy import __version__
setup (name = 'afnumpy',
version = __version__,
author = "Filipe Maia",
author_email = "filipe.c.maia@gmail.com",
url = 'https://github.com/FilipeMaia/afnumpy',
- download_url = 'https://github.com/afnumpy/tarball/'+__version__,
+ download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__,
keywords = ['arrayfire', 'numpy', 'GPU'],
description = """A GPU-ready drop-in replacement for numpy""",
packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"],
install_requires=['arrayfire', 'numpy'],
)
| Correct the pip download URL | ## Code Before:
from distutils.core import setup
from afnumpy import __version__
setup (name = 'afnumpy',
version = __version__,
author = "Filipe Maia",
author_email = "filipe.c.maia@gmail.com",
url = 'https://github.com/FilipeMaia/afnumpy',
download_url = 'https://github.com/afnumpy/tarball/'+__version__,
keywords = ['arrayfire', 'numpy', 'GPU'],
description = """A GPU-ready drop-in replacement for numpy""",
packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"],
install_requires=['arrayfire', 'numpy'],
)
## Instruction:
Correct the pip download URL
## Code After:
from distutils.core import setup
from afnumpy import __version__
setup (name = 'afnumpy',
version = __version__,
author = "Filipe Maia",
author_email = "filipe.c.maia@gmail.com",
url = 'https://github.com/FilipeMaia/afnumpy',
download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__,
keywords = ['arrayfire', 'numpy', 'GPU'],
description = """A GPU-ready drop-in replacement for numpy""",
packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"],
install_requires=['arrayfire', 'numpy'],
)
|
from distutils.core import setup
from afnumpy import __version__
setup (name = 'afnumpy',
version = __version__,
author = "Filipe Maia",
author_email = "filipe.c.maia@gmail.com",
url = 'https://github.com/FilipeMaia/afnumpy',
- download_url = 'https://github.com/afnumpy/tarball/'+__version__,
? - ^^^^
+ download_url = 'https://github.com/FilipeMaia/afnumpy/archive/'+__version__,
? +++++++++++ ^^^^^
keywords = ['arrayfire', 'numpy', 'GPU'],
description = """A GPU-ready drop-in replacement for numpy""",
packages = ["afnumpy", "afnumpy/core", "afnumpy/lib", "afnumpy/linalg"],
install_requires=['arrayfire', 'numpy'],
) |
ca8349a897c233d72ea74128dabdd1311f00c13c | tests/unittest.py | tests/unittest.py |
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
|
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
| Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs | Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs
| Python | apache-2.0 | illicitonion/synapse,TribeMedia/synapse,howethomas/synapse,iot-factory/synapse,howethomas/synapse,TribeMedia/synapse,rzr/synapse,rzr/synapse,illicitonion/synapse,illicitonion/synapse,illicitonion/synapse,TribeMedia/synapse,TribeMedia/synapse,iot-factory/synapse,rzr/synapse,rzr/synapse,matrix-org/synapse,iot-factory/synapse,howethomas/synapse,matrix-org/synapse,howethomas/synapse,iot-factory/synapse,matrix-org/synapse,matrix-org/synapse,illicitonion/synapse,howethomas/synapse,TribeMedia/synapse,rzr/synapse,matrix-org/synapse,matrix-org/synapse,iot-factory/synapse |
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
- pass
+ def __init__(self, *args, **kwargs):
+ super(TestCase, self).__init__(*args, **kwargs)
+ level = getattr(self, "loglevel", NEVER)
+
+ orig_setUp = self.setUp
+
+ def setUp():
+ old_level = logging.getLogger().level
+
+ if old_level != level:
+ orig_tearDown = self.tearDown
+
+ def tearDown():
+ ret = orig_tearDown()
+ logging.getLogger().setLevel(old_level)
+ return ret
+ self.tearDown = tearDown
+
+ logging.getLogger().setLevel(level)
+ return orig_setUp()
+ self.setUp = setUp
+ | Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs | ## Code Before:
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
## Instruction:
Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs
## Code After:
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
|
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
- pass
+ def __init__(self, *args, **kwargs):
+ super(TestCase, self).__init__(*args, **kwargs)
+
+ level = getattr(self, "loglevel", NEVER)
+
+ orig_setUp = self.setUp
+
+ def setUp():
+ old_level = logging.getLogger().level
+
+ if old_level != level:
+ orig_tearDown = self.tearDown
+
+ def tearDown():
+ ret = orig_tearDown()
+ logging.getLogger().setLevel(old_level)
+ return ret
+ self.tearDown = tearDown
+
+ logging.getLogger().setLevel(level)
+ return orig_setUp()
+ self.setUp = setUp |
b26a92d1e1480a73de4ce5ebe6ea4630fb3bfbc8 | main.py | main.py | """`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
| """`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
| Add custom 500 error handler so app handler errors aren't supressed | Add custom 500 error handler so app handler errors aren't supressed
| Python | apache-2.0 | psykidellic/appengine-flask-skeleton,STEMgirlsChina/flask-tools,susnata1981/lendingclub,wink-app/wink,googlearchive/appengine-flask-skeleton,igorg1312/googlepythonsskeleton,lchans/ArcAudit,bruxr/Sirius2,waprin/appengine-flask-skeleton,jonparrott/flask-ferris-example,waprin/appengine-flask-skeleton,jsatch/twitclass,susnata1981/lendingclub,lchans/ArcAudit,igorg1312/googlepythonsskeleton,psykidellic/appengine-flask-skeleton,googlearchive/appengine-flask-skeleton,hefox/ttm,giantoak/memex-cluster-analysis,jholkeboer/tau-graphical-crawler,ThomasMarcel/tom-schneider-flask,klenwell/mushpup-demo,psykidellic/appengine-flask-skeleton,susnata1981/lendingclub,pwojt/beer_app_414,aaleotti-unimore/ComicsScraper,aaleotti-unimore/ComicsScraper,aaleotti-unimore/ComicsScraper,n8henrie/icw,ThomasMarcel/tom-schneider-flask,jonparrott/App-Engine-Flask-Restful-Example,psykidellic/appengine-flask-skeleton,bruxr/Sirius2,wink-app/wink,VipinDevineni/lendingclub,lchans/ArcAudit,kellielu/q,jholkeboer/tau-graphical-crawler,igorg1312/googlepythonsskeleton,kellielu/q,VipinDevineni/lendingclub,VipinDevineni/lendingclub,ThomasMarcel/tom-schneider-flask,wink-app/wink,wd15/rot13,thedataincubator/GAE-Timing,psykidellic/appengine-flask-skeleton,kellielu/q,wink-app/wink,wd15/wiki,jholkeboer/tau-graphical-crawler,hammertoe/didactic-spork,hammertoe/didactic-spork,HeewonLee/asekfubweibfuisdbf,n8henrie/icw,djw8605/GratiaChromeShareApp,googlearchive/appengine-flask-skeleton,hefox/ttm,STEMgirlsChina/flask-tools,ashishthedev/appengine-python-flask-skeleton,hammertoe/didactic-spork,sin111014/asdfasdf,igorg1312/googlepythonsskeleton,googlearchive/appengine-flask-skeleton,welyjesch/gae-flask,hammertoe/didactic-spork,ThomasMarcel/tom-schneider-flask,mnrozhkov/appengine-python-flask-skeleton,jonparrott/flask-ferris-example,n8henrie/icw,jholkeboer/tau-graphical-crawler,n8henrie/icw,jholkeboer/tau-graphical-crawler,welyjesch/gae-flask,n8henrie/icw,klenwell/mushpup-demo,waprin/appengine-flask-skeleton,klenwell/mushpup-demo,bruxr/Sirius2,STEMgirlsChina/flask-tools,waprin/appengine-flask-skeleton,aaleotti-unimore/ComicsScraper,kellielu/q,aaleotti-unimore/ComicsScraper | """`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
+
+ @app.errorhandler(500)
+ def page_not_found(e):
+ """Return a custom 500 error."""
+ return 'Sorry, unexpected error: {}'.format(e), 500
+ | Add custom 500 error handler so app handler errors aren't supressed | ## Code Before:
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
## Instruction:
Add custom 500 error handler so app handler errors aren't supressed
## Code After:
"""`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
@app.errorhandler(500)
def page_not_found(e):
"""Return a custom 500 error."""
return 'Sorry, unexpected error: {}'.format(e), 500
| """`main` is the top level module for your Flask application."""
# Import the Flask Framework
from flask import Flask
app = Flask(__name__)
# Note: We don't need to call run() since our application is embedded within
# the App Engine WSGI application server.
@app.route('/')
def hello():
"""Return a friendly HTTP greeting."""
return 'Hello World!'
@app.errorhandler(404)
def page_not_found(e):
"""Return a custom 404 error."""
return 'Sorry, Nothing at this URL.', 404
+
+
+ @app.errorhandler(500)
+ def page_not_found(e):
+ """Return a custom 500 error."""
+ return 'Sorry, unexpected error: {}'.format(e), 500 |
90d079928eaf48e370d21417e4d6e649ec0f5f6f | taskwiki/taskwiki.py | taskwiki/taskwiki.py | import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
| import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
| Update tasks and evaluate viewports on saving | Taskwiki: Update tasks and evaluate viewports on saving
| Python | mit | phha/taskwiki,Spirotot/taskwiki | import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
+ cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
+ cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
| Update tasks and evaluate viewports on saving | ## Code Before:
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.save_tasks()
cache.update_buffer()
if __name__ == '__main__':
update_from_tw()
## Instruction:
Update tasks and evaluate viewports on saving
## Code After:
import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw()
| import sys
import re
import vim
from tasklib.task import TaskWarrior, Task
# Insert the taskwiki on the python path
sys.path.insert(0, vim.eval("s:plugin_path") + '/taskwiki')
from regexp import *
from task import VimwikiTask
from cache import TaskCache
"""
How this plugin works:
1.) On startup, it reads all the tasks and syncs info TW -> Vimwiki file. Task is identified by their
uuid.
2.) When saving, the opposite sync is performed (Vimwiki -> TW direction).
a) if task is marked as subtask by indentation, the dependency is created between
"""
tw = TaskWarrior()
cache = TaskCache(tw)
def update_from_tw():
"""
Updates all the incomplete tasks in the vimwiki file if the info from TW is different.
"""
cache.load_buffer()
cache.update_tasks()
cache.update_buffer()
cache.evaluate_viewports()
def update_to_tw():
"""
Updates all tasks that differ from their TaskWarrior representation.
"""
cache.reset()
cache.load_buffer()
+ cache.update_tasks()
cache.save_tasks()
cache.update_buffer()
+ cache.evaluate_viewports()
if __name__ == '__main__':
update_from_tw() |
0ec6bebb4665185854ccf58c99229bae41ef74d4 | pybtex/tests/bibtex_parser_test.py | pybtex/tests/bibtex_parser_test.py | from pybtex.database import BibliographyData
from pybtex.core import Entry
from pybtex.database.input.bibtex import Parser
from cStringIO import StringIO
test_data = [
(
'''
''',
BibliographyData(),
),
(
'''@ARTICLE{
test,
title={Polluted
with {DDT}.
},
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Polluted with {DDT}.'})}),
),
]
def _test(bibtex_input, correct_result):
parser = Parser(encoding='UTF-8')
parser.parse_stream(StringIO(bibtex_input))
result = parser.data
assert result == correct_result
def test_bibtex_parser():
for bibtex_input, correct_result in test_data:
_test(bibtex_input, correct_result)
| from pybtex.database import BibliographyData
from pybtex.core import Entry
from pybtex.database.input.bibtex import Parser
from cStringIO import StringIO
test_data = [
(
'''
''',
BibliographyData(),
),
(
'''@ARTICLE{
test,
title={Polluted
with {DDT}.
},
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Polluted with {DDT}.'})}),
),
(
'''@ARTICLE{
test,
title="Nested braces and {"quotes"}",
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Nested braces and {"quotes"}'})}),
),
]
def _test(bibtex_input, correct_result):
parser = Parser(encoding='UTF-8')
parser.parse_stream(StringIO(bibtex_input))
result = parser.data
assert result == correct_result
def test_bibtex_parser():
for bibtex_input, correct_result in test_data:
_test(bibtex_input, correct_result)
| Add a test for quoted strings with {"quotes"} in .bib files. | Add a test for quoted strings with {"quotes"} in .bib files.
| Python | mit | live-clones/pybtex | from pybtex.database import BibliographyData
from pybtex.core import Entry
from pybtex.database.input.bibtex import Parser
from cStringIO import StringIO
test_data = [
(
'''
''',
BibliographyData(),
),
(
'''@ARTICLE{
test,
title={Polluted
with {DDT}.
},
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Polluted with {DDT}.'})}),
),
+ (
+ '''@ARTICLE{
+ test,
+ title="Nested braces and {"quotes"}",
+ }''',
+ BibliographyData({u'test': Entry('article', {u'title': 'Nested braces and {"quotes"}'})}),
+ ),
]
def _test(bibtex_input, correct_result):
parser = Parser(encoding='UTF-8')
parser.parse_stream(StringIO(bibtex_input))
result = parser.data
assert result == correct_result
def test_bibtex_parser():
for bibtex_input, correct_result in test_data:
_test(bibtex_input, correct_result)
| Add a test for quoted strings with {"quotes"} in .bib files. | ## Code Before:
from pybtex.database import BibliographyData
from pybtex.core import Entry
from pybtex.database.input.bibtex import Parser
from cStringIO import StringIO
test_data = [
(
'''
''',
BibliographyData(),
),
(
'''@ARTICLE{
test,
title={Polluted
with {DDT}.
},
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Polluted with {DDT}.'})}),
),
]
def _test(bibtex_input, correct_result):
parser = Parser(encoding='UTF-8')
parser.parse_stream(StringIO(bibtex_input))
result = parser.data
assert result == correct_result
def test_bibtex_parser():
for bibtex_input, correct_result in test_data:
_test(bibtex_input, correct_result)
## Instruction:
Add a test for quoted strings with {"quotes"} in .bib files.
## Code After:
from pybtex.database import BibliographyData
from pybtex.core import Entry
from pybtex.database.input.bibtex import Parser
from cStringIO import StringIO
test_data = [
(
'''
''',
BibliographyData(),
),
(
'''@ARTICLE{
test,
title={Polluted
with {DDT}.
},
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Polluted with {DDT}.'})}),
),
(
'''@ARTICLE{
test,
title="Nested braces and {"quotes"}",
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Nested braces and {"quotes"}'})}),
),
]
def _test(bibtex_input, correct_result):
parser = Parser(encoding='UTF-8')
parser.parse_stream(StringIO(bibtex_input))
result = parser.data
assert result == correct_result
def test_bibtex_parser():
for bibtex_input, correct_result in test_data:
_test(bibtex_input, correct_result)
| from pybtex.database import BibliographyData
from pybtex.core import Entry
from pybtex.database.input.bibtex import Parser
from cStringIO import StringIO
test_data = [
(
'''
''',
BibliographyData(),
),
(
'''@ARTICLE{
test,
title={Polluted
with {DDT}.
},
}''',
BibliographyData({u'test': Entry('article', {u'title': 'Polluted with {DDT}.'})}),
),
+ (
+ '''@ARTICLE{
+ test,
+ title="Nested braces and {"quotes"}",
+ }''',
+ BibliographyData({u'test': Entry('article', {u'title': 'Nested braces and {"quotes"}'})}),
+ ),
]
def _test(bibtex_input, correct_result):
parser = Parser(encoding='UTF-8')
parser.parse_stream(StringIO(bibtex_input))
result = parser.data
assert result == correct_result
def test_bibtex_parser():
for bibtex_input, correct_result in test_data:
_test(bibtex_input, correct_result) |
ea3deb560aaddab4d66a84e840e10854cfad581d | nass/__init__.py | nass/__init__.py |
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
from .api import NassApi
|
from .api import NassApi
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
| Make package-level import at the top (pep8) | Make package-level import at the top (pep8)
| Python | mit | nickfrostatx/nass | +
+ from .api import NassApi
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
- from .api import NassApi
- | Make package-level import at the top (pep8) | ## Code Before:
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
from .api import NassApi
## Instruction:
Make package-level import at the top (pep8)
## Code After:
from .api import NassApi
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
| +
+ from .api import NassApi
__author__ = 'Nick Frost'
__version__ = '0.1.1'
__license__ = 'MIT'
-
- from .api import NassApi |
fab561da9c54e278e7762380bf043a2fe03e39da | xerox/darwin.py | xerox/darwin.py |
import subprocess
import commands
from .base import *
def copy(string):
"""Copy given string into system clipboard."""
try:
subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string)))
except OSError as why:
raise XcodeNotFound
return
def paste():
"""Returns system clipboard contents."""
try:
return unicode(commands.getoutput('pbpaste'))
except OSError as why:
raise XcodeNotFound
|
import subprocess
from .base import *
def copy(string):
"""Copy given string into system clipboard."""
try:
subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string)))
except OSError as why:
raise XcodeNotFound
return
def paste():
"""Returns system clipboard contents."""
try:
return unicode(subprocess.check_output('pbpaste'))
except OSError as why:
raise XcodeNotFound
| Use `subprocess.check_output` rather than `commands.getoutput`. | Use `subprocess.check_output` rather than `commands.getoutput`.
`commands` is deprecated.
| Python | mit | solarce/xerox,kennethreitz/xerox |
import subprocess
- import commands
from .base import *
def copy(string):
"""Copy given string into system clipboard."""
try:
subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string)))
except OSError as why:
raise XcodeNotFound
return
def paste():
"""Returns system clipboard contents."""
try:
- return unicode(commands.getoutput('pbpaste'))
+ return unicode(subprocess.check_output('pbpaste'))
except OSError as why:
raise XcodeNotFound
| Use `subprocess.check_output` rather than `commands.getoutput`. | ## Code Before:
import subprocess
import commands
from .base import *
def copy(string):
"""Copy given string into system clipboard."""
try:
subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string)))
except OSError as why:
raise XcodeNotFound
return
def paste():
"""Returns system clipboard contents."""
try:
return unicode(commands.getoutput('pbpaste'))
except OSError as why:
raise XcodeNotFound
## Instruction:
Use `subprocess.check_output` rather than `commands.getoutput`.
## Code After:
import subprocess
from .base import *
def copy(string):
"""Copy given string into system clipboard."""
try:
subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string)))
except OSError as why:
raise XcodeNotFound
return
def paste():
"""Returns system clipboard contents."""
try:
return unicode(subprocess.check_output('pbpaste'))
except OSError as why:
raise XcodeNotFound
|
import subprocess
- import commands
from .base import *
def copy(string):
"""Copy given string into system clipboard."""
try:
subprocess.Popen(['pbcopy'], stdin=subprocess.PIPE).communicate(str(unicode(string)))
except OSError as why:
raise XcodeNotFound
return
def paste():
"""Returns system clipboard contents."""
try:
- return unicode(commands.getoutput('pbpaste'))
? ^^^^^^ ^ ^
+ return unicode(subprocess.check_output('pbpaste'))
? ++++++ ^^ ^^ ^^^
except OSError as why:
raise XcodeNotFound
|
ecab0066c8ecd63c1aae85ffd04b970539eae71b | genderator/utils.py | genderator/utils.py | from unidecode import unidecode
class Normalizer:
def normalize(text):
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
# text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
return unidecode(text) | from unidecode import unidecode
class Normalizer:
def normalize(text):
"""
Normalize a given text applying all normalizations.
Params:
text: The text to be processed.
Returns:
The text normalized.
"""
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
"""
Remove hyphens from input text.
Params:
text: The text to be processed.
Returns:
The text without hyphens.
"""
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
"""
Remove extra whitespaces from input text.
This function removes whitespaces from the beginning and the end of
the string, but also duplicated whitespaces between words.
Params:
text: The text to be processed.
Returns:
The text without extra whitespaces.
"""
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
"""
Remove accent marks from input text.
Params:
text: The text to be processed.
Returns:
The text without accent marks.
"""
return unidecode(text) | Add accent marks normalization and missing docstrings | Add accent marks normalization and missing docstrings
| Python | mit | davidmogar/genderator | from unidecode import unidecode
class Normalizer:
def normalize(text):
+ """
+ Normalize a given text applying all normalizations.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text normalized.
+ """
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
- # text = Normalizer.remove_accent_marks(text)
+ text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
+ """
+ Remove hyphens from input text.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text without hyphens.
+ """
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
+ """
+ Remove extra whitespaces from input text.
+
+ This function removes whitespaces from the beginning and the end of
+ the string, but also duplicated whitespaces between words.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text without extra whitespaces.
+ """
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
+ """
+ Remove accent marks from input text.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text without accent marks.
+ """
return unidecode(text) | Add accent marks normalization and missing docstrings | ## Code Before:
from unidecode import unidecode
class Normalizer:
def normalize(text):
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
# text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
return unidecode(text)
## Instruction:
Add accent marks normalization and missing docstrings
## Code After:
from unidecode import unidecode
class Normalizer:
def normalize(text):
"""
Normalize a given text applying all normalizations.
Params:
text: The text to be processed.
Returns:
The text normalized.
"""
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
"""
Remove hyphens from input text.
Params:
text: The text to be processed.
Returns:
The text without hyphens.
"""
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
"""
Remove extra whitespaces from input text.
This function removes whitespaces from the beginning and the end of
the string, but also duplicated whitespaces between words.
Params:
text: The text to be processed.
Returns:
The text without extra whitespaces.
"""
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
"""
Remove accent marks from input text.
Params:
text: The text to be processed.
Returns:
The text without accent marks.
"""
return unidecode(text) | from unidecode import unidecode
class Normalizer:
def normalize(text):
+ """
+ Normalize a given text applying all normalizations.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text normalized.
+ """
text = Normalizer.remove_extra_whitespaces(text)
text = Normalizer.replace_hyphens(text)
- # text = Normalizer.remove_accent_marks(text)
? --
+ text = Normalizer.remove_accent_marks(text)
return text.lower()
@staticmethod
def replace_hyphens(text):
+ """
+ Remove hyphens from input text.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text without hyphens.
+ """
return text.replace('-', ' ')
@staticmethod
def remove_extra_whitespaces(text):
+ """
+ Remove extra whitespaces from input text.
+
+ This function removes whitespaces from the beginning and the end of
+ the string, but also duplicated whitespaces between words.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text without extra whitespaces.
+ """
return ' '.join(text.strip().split());
@staticmethod
def remove_accent_marks(text):
+ """
+ Remove accent marks from input text.
+
+ Params:
+ text: The text to be processed.
+
+ Returns:
+ The text without accent marks.
+ """
return unidecode(text) |
b6139583bf5074c73c0de6626391b6f128ed6e34 | export_jars.py | export_jars.py |
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
|
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
call_unsafe('rm *.jar')
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
| Remove existing JARs before building new ones | Remove existing JARs before building new ones
| Python | mit | swstack/Bean-Android-SDK,PunchThrough/bean-sdk-android,colus001/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,hongbinz/Bean-Android-SDK,androidgrl/Bean-Android-SDK,PunchThrough/Bean-Android-SDK,swstack/Bean-Android-SDK,PunchThrough/bean-sdk-android,androidgrl/Bean-Android-SDK,hongbinz/Bean-Android-SDK,colus001/Bean-Android-SDK |
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
+ call_unsafe('rm *.jar')
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
| Remove existing JARs before building new ones | ## Code Before:
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
## Instruction:
Remove existing JARs before building new ones
## Code After:
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
call_unsafe('rm *.jar')
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .')
|
import os
import shutil
from glob import glob
from subprocess import call, check_output
OUTPUT_DIR_NAME = 'jars'
def call_unsafe(*args, **kwargs):
kwargs['shell'] = True
call(*args, **kwargs)
call_unsafe('./gradlew clean javadocRelease jarRelease')
try:
os.mkdir(OUTPUT_DIR_NAME)
except OSError:
pass
os.chdir(OUTPUT_DIR_NAME)
+ call_unsafe('rm *.jar')
call_unsafe('cp ../beansdk/build/libs/*.jar .')
commit = check_output(['git', 'rev-parse', 'HEAD'])[:7]
for src in glob('*.jar'):
name, ext = os.path.splitext(src)
dest = name + '-' + commit + ext
shutil.move(src, dest)
call_unsafe('open .') |
347853290ebc4f5c47430ffce7d603eb4fead2d9 | cpt/test/integration/update_python_reqs_test.py | cpt/test/integration/update_python_reqs_test.py | import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
| import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
| Fix pyreq test on Windows | Fix pyreq test on Windows
Signed-off-by: Uilian Ries <d4bad57018205bdda203549c36d3feb0bfe416a7@gmail.com>
| Python | mit | conan-io/conan-package-tools | import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
- self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
+ self.assertIn("pyreq/1.0.0@user/", client.out)
+ self.assertIn(": 123,234", client.out)
| Fix pyreq test on Windows | ## Code Before:
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
## Instruction:
Fix pyreq test on Windows
## Code After:
import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
self.assertIn("pyreq/1.0.0@user/", client.out)
self.assertIn(": 123,234", client.out)
| import unittest
from conans.test.utils.tools import TestClient
from cpt.test.test_client.tools import get_patched_multipackager
class PythonRequiresTest(unittest.TestCase):
def test_python_requires(self):
base_conanfile = """from conans import ConanFile
myvar = 123
def myfunct():
return 234
class Pkg(ConanFile):
pass
"""
conanfile = """from conans import ConanFile
class Pkg(ConanFile):
name = "pyreq"
version = "1.0.0"
python_requires = "pyreq_base/0.1@user/channel"
def build(self):
v = self.python_requires["pyreq_base"].module.myvar
f = self.python_requires["pyreq_base"].module.myfunct()
self.output.info("%s,%s" % (v, f))
"""
client = TestClient()
client.save({"conanfile_base.py": base_conanfile})
client.run("export conanfile_base.py pyreq_base/0.1@user/channel")
client.save({"conanfile.py": conanfile})
mulitpackager = get_patched_multipackager(client, username="user",
channel="testing",
exclude_vcvars_precommand=True)
mulitpackager.add({}, {})
mulitpackager.run()
- self.assertIn("pyreq/1.0.0@user/testing: 123,234", client.out)
? ----------------
+ self.assertIn("pyreq/1.0.0@user/", client.out)
+ self.assertIn(": 123,234", client.out) |
fe98a627943c235ba24fc6de781deec69e7fd02e | relayer/__init__.py | relayer/__init__.py | from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
producer = KafkaProducer(bootstrap_servers=kafka_hosts)
emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self.emitter.flush()
| from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(self._emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self._emitter.flush()
| Save event emitter y producer reference in relayer instance | Save event emitter y producer reference in relayer instance
| Python | mit | wizeline/relayer | from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
- producer = KafkaProducer(bootstrap_servers=kafka_hosts)
+ self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
- emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
+ self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
- self.context = context_handler_class(emitter)
+ self.context = context_handler_class(self._emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
- self.emitter.flush()
+ self._emitter.flush()
| Save event emitter y producer reference in relayer instance | ## Code Before:
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
producer = KafkaProducer(bootstrap_servers=kafka_hosts)
emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self.emitter.flush()
## Instruction:
Save event emitter y producer reference in relayer instance
## Code After:
from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
self.context = context_handler_class(self._emitter)
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
self._emitter.flush()
| from kafka import KafkaProducer
from .event_emitter import EventEmitter
from .exceptions import ConfigurationError
__version__ = '0.1.3'
class Relayer(object):
def __init__(self, logging_topic, context_handler_class, kafka_hosts=None, topic_prefix='', topic_suffix='', source=''):
self.logging_topic = logging_topic
if not kafka_hosts:
raise ConfigurationError()
if source == '':
self.source = '{0}{1}{2}'.format(topic_prefix, logging_topic, topic_suffix)
else:
self.source = source
- producer = KafkaProducer(bootstrap_servers=kafka_hosts)
+ self._producer = KafkaProducer(bootstrap_servers=kafka_hosts)
? ++++++
- emitter = EventEmitter(producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
+ self._emitter = EventEmitter(self._producer, topic_prefix=topic_prefix, topic_suffix=topic_suffix)
? ++++++ ++++++
- self.context = context_handler_class(emitter)
+ self.context = context_handler_class(self._emitter)
? ++++++
def emit(self, event_type, event_subtype, payload, partition_key=None):
payload = {
'source': self.source,
'event_type': event_type,
'event_subtype': event_subtype,
'payload': payload
}
self.context.emit(event_type, payload, partition_key)
def emit_raw(self, topic, message, partition_key=None):
self.context.emit(topic, message, partition_key)
def log(self, log_level, payload):
message = {
'log_level': log_level,
'payload': payload
}
self.context.log(message)
def flush(self):
- self.emitter.flush()
+ self._emitter.flush()
? +
|
41c6b1820e8b23079d9098526854c9a60859d128 | gcloud_expenses/test_views.py | gcloud_expenses/test_views.py | import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
def test_my_view(self):
from pyramid import testing
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info['project'], 'foo')
| import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
def test_home_page(self):
from pyramid import testing
from .views import home_page
request = testing.DummyRequest()
info = home_page(request)
self.assertEqual(info, {})
| Fix test broken in rename. | Fix test broken in rename.
| Python | apache-2.0 | GoogleCloudPlatform/google-cloud-python-expenses-demo,GoogleCloudPlatform/google-cloud-python-expenses-demo | import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
- def test_my_view(self):
+ def test_home_page(self):
from pyramid import testing
- from .views import my_view
+ from .views import home_page
request = testing.DummyRequest()
- info = my_view(request)
+ info = home_page(request)
- self.assertEqual(info['project'], 'foo')
+ self.assertEqual(info, {})
| Fix test broken in rename. | ## Code Before:
import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
def test_my_view(self):
from pyramid import testing
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info['project'], 'foo')
## Instruction:
Fix test broken in rename.
## Code After:
import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
def test_home_page(self):
from pyramid import testing
from .views import home_page
request = testing.DummyRequest()
info = home_page(request)
self.assertEqual(info, {})
| import unittest
class ViewTests(unittest.TestCase):
def setUp(self):
from pyramid import testing
self.config = testing.setUp()
def tearDown(self):
from pyramid import testing
testing.tearDown()
- def test_my_view(self):
? ^ ^^ -
+ def test_home_page(self):
? ++ ^ ^^^
from pyramid import testing
- from .views import my_view
? ^ ^^ -
+ from .views import home_page
? ++ ^ ^^^
request = testing.DummyRequest()
- info = my_view(request)
? ^ ^^ -
+ info = home_page(request)
? ++ ^ ^^^
- self.assertEqual(info['project'], 'foo')
? ----------- ^^^^^
+ self.assertEqual(info, {})
? ^^
|
20d7c4113a96c92f8353761da2c2a00ed7a35e0e | gym_ple/__init__.py | gym_ple/__init__.py | from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
| from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
| Replace the timestep_limit call with the new tags api. | Replace the timestep_limit call with the new tags api.
| Python | mit | lusob/gym-ple | from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
- timestep_limit=10000,
+ tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
| Replace the timestep_limit call with the new tags api. | ## Code Before:
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
timestep_limit=10000,
nondeterministic=nondeterministic,
)
## Instruction:
Replace the timestep_limit call with the new tags api.
## Code After:
from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
)
| from gym.envs.registration import registry, register, make, spec
from gym_ple.ple_env import PLEEnv
# Pygame
# ----------------------------------------
for game in ['Catcher', 'MonsterKong', 'FlappyBird', 'PixelCopter', 'PuckWorld', 'RaycastMaze', 'Snake', 'WaterWorld']:
nondeterministic = False
register(
id='{}-v0'.format(game),
entry_point='gym_ple:PLEEnv',
kwargs={'game_name': game, 'display_screen':False},
- timestep_limit=10000,
+ tags={'wrapper_config.TimeLimit.max_episode_steps': 10000},
nondeterministic=nondeterministic,
) |
978b9450346f4de687ed3c23bc11c970538e948b | mosecom_air/api/parser.py | mosecom_air/api/parser.py |
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
|
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
| Use tuple in namedtuples initization | Use tuple in namedtuples initization
| Python | mit | elsid/mosecom-air,elsid/mosecom-air,elsid/mosecom-air |
from collections import namedtuple
- Substance = namedtuple('Substance', 'name alias')
+ Substance = namedtuple('Substance', ('name', 'alias'))
- Measurement = namedtuple('Measurement', 'substance unit performed value')
+ Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
+ 'value'))
- Result = namedtuple('Result', 'measurements substances units station_alias')
+ Result = namedtuple('Result', ('measurements', 'substances', 'units',
+ 'station_alias'))
| Use tuple in namedtuples initization | ## Code Before:
from collections import namedtuple
Substance = namedtuple('Substance', 'name alias')
Measurement = namedtuple('Measurement', 'substance unit performed value')
Result = namedtuple('Result', 'measurements substances units station_alias')
## Instruction:
Use tuple in namedtuples initization
## Code After:
from collections import namedtuple
Substance = namedtuple('Substance', ('name', 'alias'))
Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
'value'))
Result = namedtuple('Result', ('measurements', 'substances', 'units',
'station_alias'))
|
from collections import namedtuple
- Substance = namedtuple('Substance', 'name alias')
+ Substance = namedtuple('Substance', ('name', 'alias'))
? + ++ + +
- Measurement = namedtuple('Measurement', 'substance unit performed value')
? ------ ^
+ Measurement = namedtuple('Measurement', ('substance', 'unit', 'performed',
? + ++ + ++ + ^
+ 'value'))
- Result = namedtuple('Result', 'measurements substances units station_alias')
? -------------- ^
+ Result = namedtuple('Result', ('measurements', 'substances', 'units',
? + ++ + ++ + ^
+ 'station_alias')) |
df58b36b6f62c39030d6ff28c6fb67c11f112df0 | pyxrf/gui_module/main_window.py | pyxrf/gui_module/main_window.py | from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
| from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
self.setWindowTitle("PyXRF window title") | Test window title on Mac | Test window title on Mac
| Python | bsd-3-clause | NSLS-II-HXN/PyXRF,NSLS-II/PyXRF,NSLS-II-HXN/PyXRF | from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
+ self.setWindowTitle("PyXRF window title") | Test window title on Mac | ## Code Before:
from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
## Instruction:
Test window title on Mac
## Code After:
from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
self.setWindowTitle("PyXRF window title") | from PyQt5.QtWidgets import QMainWindow
_main_window_geometry = {
"initial_height": 800,
"initial_width": 1000,
"min_height": 400,
"min_width": 500,
}
class MainWindow(QMainWindow):
def __init__(self):
super().__init__()
self.initialize()
def initialize(self):
self.resize(_main_window_geometry["initial_width"],
_main_window_geometry["initial_height"])
self.setMinimumWidth(_main_window_geometry["min_width"])
self.setMinimumHeight(_main_window_geometry["min_height"])
+
+ self.setWindowTitle("PyXRF window title") |
202fba50c287d3df99b22a4f30a96a3d8d9c8141 | tests/test_pypi.py | tests/test_pypi.py | from unittest import TestCase
from semantic_release.pypi import upload_to_pypi
from . import mock
class PypiTests(TestCase):
@mock.patch('semantic_release.pypi.run')
def test_upload_without_arguments(self, mock_run):
upload_to_pypi(username='username', password='password')
self.assertEqual(
mock_run.call_args_list,
[
mock.call('python setup.py sdist bdist_wheel'),
mock.call('twine upload -u username -p password dist/*'),
mock.call('rm -rf build dist')
]
)
| from unittest import TestCase
from semantic_release.pypi import upload_to_pypi
from . import mock
class PypiTests(TestCase):
@mock.patch('semantic_release.pypi.run')
def test_upload_without_arguments(self, mock_run):
upload_to_pypi(username='username', password='password')
self.assertEqual(
mock_run.call_args_list,
[
mock.call('rm -rf build dist'),
mock.call('python setup.py sdist bdist_wheel'),
mock.call('twine upload -u username -p password dist/*'),
mock.call('rm -rf build dist')
]
)
| Update test after adding cleaning of dist | test: Update test after adding cleaning of dist
| Python | mit | relekang/python-semantic-release,relekang/python-semantic-release | from unittest import TestCase
from semantic_release.pypi import upload_to_pypi
from . import mock
class PypiTests(TestCase):
@mock.patch('semantic_release.pypi.run')
def test_upload_without_arguments(self, mock_run):
upload_to_pypi(username='username', password='password')
self.assertEqual(
mock_run.call_args_list,
[
+ mock.call('rm -rf build dist'),
mock.call('python setup.py sdist bdist_wheel'),
mock.call('twine upload -u username -p password dist/*'),
mock.call('rm -rf build dist')
]
)
| Update test after adding cleaning of dist | ## Code Before:
from unittest import TestCase
from semantic_release.pypi import upload_to_pypi
from . import mock
class PypiTests(TestCase):
@mock.patch('semantic_release.pypi.run')
def test_upload_without_arguments(self, mock_run):
upload_to_pypi(username='username', password='password')
self.assertEqual(
mock_run.call_args_list,
[
mock.call('python setup.py sdist bdist_wheel'),
mock.call('twine upload -u username -p password dist/*'),
mock.call('rm -rf build dist')
]
)
## Instruction:
Update test after adding cleaning of dist
## Code After:
from unittest import TestCase
from semantic_release.pypi import upload_to_pypi
from . import mock
class PypiTests(TestCase):
@mock.patch('semantic_release.pypi.run')
def test_upload_without_arguments(self, mock_run):
upload_to_pypi(username='username', password='password')
self.assertEqual(
mock_run.call_args_list,
[
mock.call('rm -rf build dist'),
mock.call('python setup.py sdist bdist_wheel'),
mock.call('twine upload -u username -p password dist/*'),
mock.call('rm -rf build dist')
]
)
| from unittest import TestCase
from semantic_release.pypi import upload_to_pypi
from . import mock
class PypiTests(TestCase):
@mock.patch('semantic_release.pypi.run')
def test_upload_without_arguments(self, mock_run):
upload_to_pypi(username='username', password='password')
self.assertEqual(
mock_run.call_args_list,
[
+ mock.call('rm -rf build dist'),
mock.call('python setup.py sdist bdist_wheel'),
mock.call('twine upload -u username -p password dist/*'),
mock.call('rm -rf build dist')
]
) |
bd0800d46126d963f1ae107924a632752bc94173 | indra/sources/bel/__init__.py | indra/sources/bel/__init__.py | from .api import process_ndex_neighborhood
from .api import process_belrdf
from .api import process_belscript
from .api import process_pybel_graph
from .api import process_json_file
from .api import process_pybel_neighborhood
| from .api import process_ndex_neighborhood, process_belrdf, \
process_belscript, process_pybel_graph, process_json_file, \
process_pybel_neighborhood, process_cbn_jgif_file
| Add all endpoints to BEL API | Add all endpoints to BEL API
| Python | bsd-2-clause | johnbachman/indra,sorgerlab/indra,bgyori/indra,bgyori/indra,johnbachman/indra,sorgerlab/indra,johnbachman/belpy,bgyori/indra,sorgerlab/belpy,sorgerlab/belpy,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,johnbachman/indra,sorgerlab/indra | - from .api import process_ndex_neighborhood
+ from .api import process_ndex_neighborhood, process_belrdf, \
+ process_belscript, process_pybel_graph, process_json_file, \
+ process_pybel_neighborhood, process_cbn_jgif_file
- from .api import process_belrdf
- from .api import process_belscript
- from .api import process_pybel_graph
- from .api import process_json_file
- from .api import process_pybel_neighborhood
| Add all endpoints to BEL API | ## Code Before:
from .api import process_ndex_neighborhood
from .api import process_belrdf
from .api import process_belscript
from .api import process_pybel_graph
from .api import process_json_file
from .api import process_pybel_neighborhood
## Instruction:
Add all endpoints to BEL API
## Code After:
from .api import process_ndex_neighborhood, process_belrdf, \
process_belscript, process_pybel_graph, process_json_file, \
process_pybel_neighborhood, process_cbn_jgif_file
| - from .api import process_ndex_neighborhood
+ from .api import process_ndex_neighborhood, process_belrdf, \
? +++++++++++++++++++
+ process_belscript, process_pybel_graph, process_json_file, \
+ process_pybel_neighborhood, process_cbn_jgif_file
- from .api import process_belrdf
- from .api import process_belscript
- from .api import process_pybel_graph
- from .api import process_json_file
- from .api import process_pybel_neighborhood |
d1bd82008c21942dee0ed29ba6d4f9eb54f2af33 | issues/signals.py | issues/signals.py | from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal(providing_args=('request', 'issue'))
| from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal() # Provides arguments: ('request', 'issue')
| Remove documenting argument from Signal | Remove documenting argument from Signal
| Python | mit | 6aika/issue-reporting,6aika/issue-reporting,6aika/issue-reporting | from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
- issue_posted = Signal(providing_args=('request', 'issue'))
+ issue_posted = Signal() # Provides arguments: ('request', 'issue')
| Remove documenting argument from Signal | ## Code Before:
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal(providing_args=('request', 'issue'))
## Instruction:
Remove documenting argument from Signal
## Code After:
from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
issue_posted = Signal() # Provides arguments: ('request', 'issue')
| from django.dispatch import Signal
#: Signal fired when a new issue is posted via the API.
- issue_posted = Signal(providing_args=('request', 'issue'))
? ^ ^^^^ ^ -
+ issue_posted = Signal() # Provides arguments: ('request', 'issue')
? ^^^^^^ ^^^ +++++ ^^
|
226a4c1af180f0bf1924a84c76d1d2b300557e9b | instana/instrumentation/urllib3.py | instana/instrumentation/urllib3.py | from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
| from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
| Make sure to finish span when there is an exception | Make sure to finish span when there is an exception
| Python | mit | instana/python-sensor,instana/python-sensor | from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
+ span.finish()
raise
else:
span.finish()
return rv
| Make sure to finish span when there is an exception | ## Code Before:
from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
raise
else:
span.finish()
return rv
## Instruction:
Make sure to finish span when there is an exception
## Code After:
from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
span.finish()
raise
else:
span.finish()
return rv
| from __future__ import absolute_import
import opentracing.ext.tags as ext
import instana
import opentracing
import wrapt
@wrapt.patch_function_wrapper('urllib3', 'PoolManager.urlopen')
def urlopen_with_instana(wrapped, instance, args, kwargs):
try:
span = instana.internal_tracer.start_span("urllib3")
span.set_tag(ext.HTTP_URL, args[1])
span.set_tag(ext.HTTP_METHOD, args[0])
instana.internal_tracer.inject(span.context, opentracing.Format.HTTP_HEADERS, kwargs["headers"])
rv = wrapped(*args, **kwargs)
span.set_tag(ext.HTTP_STATUS_CODE, rv.status)
if 500 <= rv.status <= 599:
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
except Exception as e:
span.log_kv({'message': e})
span.set_tag("error", True)
ec = span.tags.get('ec', 0)
span.set_tag("ec", ec+1)
+ span.finish()
raise
else:
span.finish()
return rv |
e2cecaa99bae3635fcaa58ea57d67bce7dc83768 | src/psd2svg/rasterizer/batik_rasterizer.py | src/psd2svg/rasterizer/batik_rasterizer.py | from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
| from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
| Add bg option in batik rasterizer | Add bg option in batik rasterizer
| Python | mit | kyamagu/psd2svg | from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
- cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
+ cmd = ["java", "-Djava.awt.headless=true",
+ "-jar", self.jar_path,
+ "-bg", "0,255,255,255",
+ "-d", output_file,
- "{}".format(url), "-d", output_file]
+ "{}".format(url),
+ ]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
| Add bg option in batik rasterizer | ## Code Before:
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
"{}".format(url), "-d", output_file]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
## Instruction:
Add bg option in batik rasterizer
## Code After:
from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
cmd = ["java", "-Djava.awt.headless=true",
"-jar", self.jar_path,
"-bg", "0,255,255,255",
"-d", output_file,
"{}".format(url),
]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file)
| from __future__ import absolute_import, unicode_literals
from PIL import Image
import logging
import os
import subprocess
from psd2svg.utils import temporary_directory
logger = logging.getLogger(__name__)
BATIK_PATH = os.environ.get(
'BATIK_PATH', "/usr/share/java/batik-rasterizer.jar")
class BatikRasterizer(object):
"""Batik rasterizer."""
def __init__(self, jar_path=None, **kwargs):
self.jar_path = jar_path if jar_path else BATIK_PATH
assert os.path.exists(self.jar_path)
def rasterize(self, url, size=None, format="png"):
with temporary_directory() as d:
output_file = os.path.join(d, "output.{}".format(format))
- cmd = ["java", "-Djava.awt.headless=true", "-jar", self.jar_path,
? -----------------------
+ cmd = ["java", "-Djava.awt.headless=true",
+ "-jar", self.jar_path,
+ "-bg", "0,255,255,255",
+ "-d", output_file,
- "{}".format(url), "-d", output_file]
? -------------------
+ "{}".format(url),
+ ]
if size:
cmd += ["-w", size[0], "-h", size[1]]
subprocess.check_call(cmd, stdout=subprocess.PIPE)
assert os.path.exists(output_file)
return Image.open(output_file) |
371be140dfbecff72d72cda580cd299badc6bc15 | aws_list_all/client.py | aws_list_all/client.py | import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| Use us-east-1 to query route53 | Use us-east-1 to query route53
Route53 is a global service so doesn't belong to a region, but the API endpoint is in us-east-1.
This makes various listings now work, but not record sets.
Updates #4.
| Python | mit | JohannesEbke/aws_list_all | import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
+ if service == "route53":
+ return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| Use us-east-1 to query route53 | ## Code Before:
import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
## Instruction:
Use us-east-1 to query route53
## Code After:
import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
if service == "route53":
return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)]
| import boto3
_CLIENTS = {}
def get_regions_for_service(service, regions=()):
"""Given a service name, return a list of region names where this service can have resources,
restricted by a possible set of regions."""
if service == "s3":
return ['us-east-1'] # s3 ListBuckets is a global request, so no region required.
+ if service == "route53":
+ return ['us-east-1'] # route53 is a global service, but the endpoint is in us-east-1.
service_regions = boto3.Session().get_available_regions(service)
if regions:
# If regions were passed, return the intersecion.
return [r for r in regions if r in service_regions]
else:
return service_regions
def get_client(service, region=None):
"""Return (cached) boto3 clients for this service and this region"""
if (service, region) not in _CLIENTS:
_CLIENTS[(service, region)] = boto3.Session(region_name=region).client(service)
return _CLIENTS[(service, region)] |
2094f2ef5a47703a881643b8ca25a632fe54e892 | under_overfitting.py | under_overfitting.py | import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([("polynomial_features", poly_features),
("linear_regression", model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([('polynomial_features', poly_features),
('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring='mean_squared_error', cv=10)
print('Degree {:>2}: mse = {}, std = {}'.format(
d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| Complete walk of polynomial degrees to find most balance between under and overfitting | Complete walk of polynomial degrees to find most balance between under and overfitting
| Python | mit | noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit,noelevans/sandpit | import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
- pipeline = Pipeline([("polynomial_features", poly_features),
+ pipeline = Pipeline([('polynomial_features', poly_features),
- ("linear_regression", model)])
+ ('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
- scoring="mean_squared_error", cv=10)
+ scoring='mean_squared_error', cv=10)
- print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
+ print('Degree {:>2}: mse = {}, std = {}'.format(
- degrees[i], -scores.mean(), scores.std()))
+ d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| Complete walk of polynomial degrees to find most balance between under and overfitting | ## Code Before:
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([("polynomial_features", poly_features),
("linear_regression", model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring="mean_squared_error", cv=10)
print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
degrees[i], -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
## Instruction:
Complete walk of polynomial degrees to find most balance between under and overfitting
## Code After:
import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
pipeline = Pipeline([('polynomial_features', poly_features),
('linear_regression', model)])
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
scoring='mean_squared_error', cv=10)
print('Degree {:>2}: mse = {}, std = {}'.format(
d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main()
| import numpy as np
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import PolynomialFeatures
from sklearn.linear_model import LinearRegression
from sklearn.cross_validation import cross_val_score
def main():
np.random.seed(0)
n_samples = 30
degrees = range(1, 16)
true_fn = lambda X: np.cos(1.5 * np.pi * X)
X = np.sort(np.random.rand(n_samples))
y = true_fn(X) + np.random.randn(n_samples) * 0.1
for d in degrees:
poly_features = PolynomialFeatures(degree=d, include_bias=False)
model = LinearRegression()
- pipeline = Pipeline([("polynomial_features", poly_features),
? ^ ^
+ pipeline = Pipeline([('polynomial_features', poly_features),
? ^ ^
- ("linear_regression", model)])
? ^ ^
+ ('linear_regression', model)])
? ^ ^
pipeline.fit(X[:, np.newaxis], y)
scores = cross_val_score(pipeline, X[:, np.newaxis], y,
- scoring="mean_squared_error", cv=10)
? ^ ^
+ scoring='mean_squared_error', cv=10)
? ^ ^
- print("Degree {}\nMSE = {:.2e}(+/- {:.2e})".format(
+ print('Degree {:>2}: mse = {}, std = {}'.format(
- degrees[i], -scores.mean(), scores.std()))
? ---------
+ d, -scores.mean(), scores.std()))
if __name__ == '__main__':
main() |
a5f60d664e7758b113abc31b405657952dd5eccd | tests/conftest.py | tests/conftest.py | import os
import pytest
from pywatson.watson import Watson
@pytest.fixture
def config():
"""Get Watson configuration from the environment
:return: dict with keys 'url', 'username', and 'password'
"""
try:
return {
'url': os.environ['WATSON_URL'],
'username': os.environ['WATSON_USERNAME'],
'password': os.environ['WATSON_PASSWORD']
}
except KeyError as err:
raise Exception('You must set the environment variable {}'.format(err.args[0]))
@pytest.fixture
def watson(config):
return Watson(url=config['url'], username=config['username'], password=config['password'])
| import json
import os
import pytest
from pywatson.watson import Watson
@pytest.fixture
def config():
"""Get Watson configuration from the environment
:return: dict with keys 'url', 'username', and 'password'
"""
try:
return {
'url': os.environ['WATSON_URL'],
'username': os.environ['WATSON_USERNAME'],
'password': os.environ['WATSON_PASSWORD']
}
except KeyError as err:
raise Exception('You must set the environment variable {}'.format(err.args[0]))
@pytest.fixture
def watson(config):
return Watson(url=config['url'], username=config['username'], password=config['password'])
@pytest.fixture
def questions():
qs = []
for root, dirs, files in os.walk('tests/json/questions'):
for filename in files:
filepath = os.path.join(root, filename)
try:
qs.append(json.load(open(filepath)))
except ValueError:
raise ValueError('Expected {} to contain valid JSON'.format(filepath))
return qs
| Implement test data JSON loader | Implement test data JSON loader
| Python | mit | sherlocke/pywatson | + import json
import os
import pytest
from pywatson.watson import Watson
@pytest.fixture
def config():
"""Get Watson configuration from the environment
:return: dict with keys 'url', 'username', and 'password'
"""
try:
return {
'url': os.environ['WATSON_URL'],
'username': os.environ['WATSON_USERNAME'],
'password': os.environ['WATSON_PASSWORD']
}
except KeyError as err:
raise Exception('You must set the environment variable {}'.format(err.args[0]))
@pytest.fixture
def watson(config):
return Watson(url=config['url'], username=config['username'], password=config['password'])
+
+ @pytest.fixture
+ def questions():
+ qs = []
+
+ for root, dirs, files in os.walk('tests/json/questions'):
+ for filename in files:
+ filepath = os.path.join(root, filename)
+ try:
+ qs.append(json.load(open(filepath)))
+ except ValueError:
+ raise ValueError('Expected {} to contain valid JSON'.format(filepath))
+
+ return qs
+ | Implement test data JSON loader | ## Code Before:
import os
import pytest
from pywatson.watson import Watson
@pytest.fixture
def config():
"""Get Watson configuration from the environment
:return: dict with keys 'url', 'username', and 'password'
"""
try:
return {
'url': os.environ['WATSON_URL'],
'username': os.environ['WATSON_USERNAME'],
'password': os.environ['WATSON_PASSWORD']
}
except KeyError as err:
raise Exception('You must set the environment variable {}'.format(err.args[0]))
@pytest.fixture
def watson(config):
return Watson(url=config['url'], username=config['username'], password=config['password'])
## Instruction:
Implement test data JSON loader
## Code After:
import json
import os
import pytest
from pywatson.watson import Watson
@pytest.fixture
def config():
"""Get Watson configuration from the environment
:return: dict with keys 'url', 'username', and 'password'
"""
try:
return {
'url': os.environ['WATSON_URL'],
'username': os.environ['WATSON_USERNAME'],
'password': os.environ['WATSON_PASSWORD']
}
except KeyError as err:
raise Exception('You must set the environment variable {}'.format(err.args[0]))
@pytest.fixture
def watson(config):
return Watson(url=config['url'], username=config['username'], password=config['password'])
@pytest.fixture
def questions():
qs = []
for root, dirs, files in os.walk('tests/json/questions'):
for filename in files:
filepath = os.path.join(root, filename)
try:
qs.append(json.load(open(filepath)))
except ValueError:
raise ValueError('Expected {} to contain valid JSON'.format(filepath))
return qs
| + import json
import os
import pytest
from pywatson.watson import Watson
@pytest.fixture
def config():
"""Get Watson configuration from the environment
:return: dict with keys 'url', 'username', and 'password'
"""
try:
return {
'url': os.environ['WATSON_URL'],
'username': os.environ['WATSON_USERNAME'],
'password': os.environ['WATSON_PASSWORD']
}
except KeyError as err:
raise Exception('You must set the environment variable {}'.format(err.args[0]))
@pytest.fixture
def watson(config):
return Watson(url=config['url'], username=config['username'], password=config['password'])
+
+
+ @pytest.fixture
+ def questions():
+ qs = []
+
+ for root, dirs, files in os.walk('tests/json/questions'):
+ for filename in files:
+ filepath = os.path.join(root, filename)
+ try:
+ qs.append(json.load(open(filepath)))
+ except ValueError:
+ raise ValueError('Expected {} to contain valid JSON'.format(filepath))
+
+ return qs |
4f3646e07d592d5da214977732298b680b8fdee7 | zou/app/blueprints/crud/task_status.py | zou/app/blueprints/crud/task_status.py | from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
| from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
def pre_update(self, instance_dict, data):
if data.get("is_default", False):
status = TaskStatus.get_by(is_default=True)
status.update({"is_default": None})
return instance_dict
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
| Allow to modify the is_default flag | [tasks] Allow to modify the is_default flag
| Python | agpl-3.0 | cgwire/zou | from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
+ def pre_update(self, instance_dict, data):
+ if data.get("is_default", False):
+ status = TaskStatus.get_by(is_default=True)
+ status.update({"is_default": None})
+ return instance_dict
+
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
| Allow to modify the is_default flag | ## Code Before:
from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
## Instruction:
Allow to modify the is_default flag
## Code After:
from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
def pre_update(self, instance_dict, data):
if data.get("is_default", False):
status = TaskStatus.get_by(is_default=True)
status.update({"is_default": None})
return instance_dict
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
| from zou.app.models.task_status import TaskStatus
from zou.app.services import tasks_service
from .base import BaseModelResource, BaseModelsResource
class TaskStatusesResource(BaseModelsResource):
def __init__(self):
BaseModelsResource.__init__(self, TaskStatus)
def check_read_permissions(self):
return True
def post_creation(self, instance):
tasks_service.clear_task_status_cache(str(instance.id))
return instance.serialize()
class TaskStatusResource(BaseModelResource):
def __init__(self):
BaseModelResource.__init__(self, TaskStatus)
def check_read_permissions(self, instance):
return True
+ def pre_update(self, instance_dict, data):
+ if data.get("is_default", False):
+ status = TaskStatus.get_by(is_default=True)
+ status.update({"is_default": None})
+ return instance_dict
+
def post_update(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict
def post_delete(self, instance_dict):
tasks_service.clear_task_status_cache(instance_dict["id"])
return instance_dict |
b3889bbdab80fb502c74b99b61cf36bae112ce2c | node/node.py | node/node.py | from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
def name(self):
return self._name
def labels(self):
return self._labels
| from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
@property
def name(self):
return self._name
@property
def labels(self):
return self._labels
| Add property decorator to getters | Add property decorator to getters
| Python | apache-2.0 | PressLabs/cobalt,PressLabs/cobalt | from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
+ @property
def name(self):
return self._name
+ @property
def labels(self):
return self._labels
| Add property decorator to getters | ## Code Before:
from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
def name(self):
return self._name
def labels(self):
return self._labels
## Instruction:
Add property decorator to getters
## Code After:
from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
@property
def name(self):
return self._name
@property
def labels(self):
return self._labels
| from configparser import ConfigParser
from driver import BTRFSDriver
class Node:
"""
# Dummy config example
[bk1-z3.presslabs.net]
ssd = True
"""
def __init__(self, context):
self._conf_path = context['node']['conf_path']
self._driver = BTRFSDriver(context['volume_path'])
self._name, self._labels = '', {}
config = ConfigParser()
config.read(self._conf_path)
try:
self._name = config.sections()[0]
for label, value in config[self._name].iteritems():
self._labels[label] = value
except IndexError:
pass
def get_subvolumes(self):
return self._driver.get_all()
+ @property
def name(self):
return self._name
+ @property
def labels(self):
return self._labels
|
39461a97ef6e6b988466f41ddfee17687dd59ee1 | notifications/match_score.py | notifications/match_score.py | from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class MatchScoreNotification(BaseNotification):
def __init__(self, match):
self.match = match
self.event = match.event.get()
self._event_feed = self.event.key_name
self._district_feed = self.event.event_district_enum
@property
def _type(self):
return NotificationType.MATCH_SCORE
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match'] = ModelToDict.matchConverter(self.match)
return data
| from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class MatchScoreNotification(BaseNotification):
def __init__(self, match):
self.match = match
self.event = match.event.get()
self._event_feed = self.event.key_name
self._district_feed = self.event.event_district_enum
@property
def _type(self):
return NotificationType.MATCH_SCORE
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['event_key'] = self.event.key_name
data['message_data']['match'] = ModelToDict.matchConverter(self.match)
return data
| Add event key to match score notification | Add event key to match score notification | Python | mit | bdaroz/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,tsteward/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance | from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class MatchScoreNotification(BaseNotification):
def __init__(self, match):
self.match = match
self.event = match.event.get()
self._event_feed = self.event.key_name
self._district_feed = self.event.event_district_enum
@property
def _type(self):
return NotificationType.MATCH_SCORE
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
+ data['message_data']['event_key'] = self.event.key_name
data['message_data']['match'] = ModelToDict.matchConverter(self.match)
return data
| Add event key to match score notification | ## Code Before:
from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class MatchScoreNotification(BaseNotification):
def __init__(self, match):
self.match = match
self.event = match.event.get()
self._event_feed = self.event.key_name
self._district_feed = self.event.event_district_enum
@property
def _type(self):
return NotificationType.MATCH_SCORE
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['match'] = ModelToDict.matchConverter(self.match)
return data
## Instruction:
Add event key to match score notification
## Code After:
from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class MatchScoreNotification(BaseNotification):
def __init__(self, match):
self.match = match
self.event = match.event.get()
self._event_feed = self.event.key_name
self._district_feed = self.event.event_district_enum
@property
def _type(self):
return NotificationType.MATCH_SCORE
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
data['message_data']['event_key'] = self.event.key_name
data['message_data']['match'] = ModelToDict.matchConverter(self.match)
return data
| from consts.notification_type import NotificationType
from helpers.model_to_dict import ModelToDict
from notifications.base_notification import BaseNotification
class MatchScoreNotification(BaseNotification):
def __init__(self, match):
self.match = match
self.event = match.event.get()
self._event_feed = self.event.key_name
self._district_feed = self.event.event_district_enum
@property
def _type(self):
return NotificationType.MATCH_SCORE
def _build_dict(self):
data = {}
data['message_type'] = NotificationType.type_names[self._type]
data['message_data'] = {}
data['message_data']['event_name'] = self.event.name
+ data['message_data']['event_key'] = self.event.key_name
data['message_data']['match'] = ModelToDict.matchConverter(self.match)
return data |
675c7442b6fcee3fd9bd57d7a4ef68c7de23d48c | reddit_adzerk/adzerkkeywords.py | reddit_adzerk/adzerkkeywords.py |
import adzerk_api
import json
from pylons import app_globals as g
def update_global_keywords():
active_flights = adzerk_api.Flight.list(is_active=True)
keyword_target = set()
# Count the number of flights targeting each sub/keyword
for flight in active_flights:
for keyword_list in flight.Keywords.split('\n'):
for keyword in keyword_list.split(','):
ks = keyword.strip()
if ks.startswith('k.') or ks.startswith('!k.'):
keyword_target.add(ks)
# Store results in zookeeper
if g.zookeeper:
g.zookeeper.set("/keyword-targets", json.dumps(list(keyword_target)))
|
import adzerk_api
import json
from pylons import app_globals as g
KEYWORD_NODE = "/keyword-targets"
def update_global_keywords():
active_flights = adzerk_api.Flight.list(is_active=True)
keyword_target = set()
# Count the number of flights targeting each sub/keyword
for flight in active_flights:
for keyword_list in flight.Keywords.split('\n'):
for keyword in keyword_list.split(','):
ks = keyword.strip()
if ks.startswith('k.') or ks.startswith('!k.'):
keyword_target.add(ks)
# Store results in zookeeper
if g.zookeeper:
g.zookeeper.ensure_path(KEYWORD_NODE)
g.zookeeper.set(KEYWORD_NODE, json.dumps(list(keyword_target)))
| Create zookeeper node if it doesn't exist | Create zookeeper node if it doesn't exist
| Python | bsd-3-clause | madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk,madbook/reddit-plugin-adzerk |
import adzerk_api
import json
from pylons import app_globals as g
+
+ KEYWORD_NODE = "/keyword-targets"
def update_global_keywords():
active_flights = adzerk_api.Flight.list(is_active=True)
keyword_target = set()
# Count the number of flights targeting each sub/keyword
for flight in active_flights:
for keyword_list in flight.Keywords.split('\n'):
for keyword in keyword_list.split(','):
ks = keyword.strip()
if ks.startswith('k.') or ks.startswith('!k.'):
keyword_target.add(ks)
# Store results in zookeeper
if g.zookeeper:
+ g.zookeeper.ensure_path(KEYWORD_NODE)
- g.zookeeper.set("/keyword-targets", json.dumps(list(keyword_target)))
+ g.zookeeper.set(KEYWORD_NODE, json.dumps(list(keyword_target)))
| Create zookeeper node if it doesn't exist | ## Code Before:
import adzerk_api
import json
from pylons import app_globals as g
def update_global_keywords():
active_flights = adzerk_api.Flight.list(is_active=True)
keyword_target = set()
# Count the number of flights targeting each sub/keyword
for flight in active_flights:
for keyword_list in flight.Keywords.split('\n'):
for keyword in keyword_list.split(','):
ks = keyword.strip()
if ks.startswith('k.') or ks.startswith('!k.'):
keyword_target.add(ks)
# Store results in zookeeper
if g.zookeeper:
g.zookeeper.set("/keyword-targets", json.dumps(list(keyword_target)))
## Instruction:
Create zookeeper node if it doesn't exist
## Code After:
import adzerk_api
import json
from pylons import app_globals as g
KEYWORD_NODE = "/keyword-targets"
def update_global_keywords():
active_flights = adzerk_api.Flight.list(is_active=True)
keyword_target = set()
# Count the number of flights targeting each sub/keyword
for flight in active_flights:
for keyword_list in flight.Keywords.split('\n'):
for keyword in keyword_list.split(','):
ks = keyword.strip()
if ks.startswith('k.') or ks.startswith('!k.'):
keyword_target.add(ks)
# Store results in zookeeper
if g.zookeeper:
g.zookeeper.ensure_path(KEYWORD_NODE)
g.zookeeper.set(KEYWORD_NODE, json.dumps(list(keyword_target)))
|
import adzerk_api
import json
from pylons import app_globals as g
+
+ KEYWORD_NODE = "/keyword-targets"
def update_global_keywords():
active_flights = adzerk_api.Flight.list(is_active=True)
keyword_target = set()
# Count the number of flights targeting each sub/keyword
for flight in active_flights:
for keyword_list in flight.Keywords.split('\n'):
for keyword in keyword_list.split(','):
ks = keyword.strip()
if ks.startswith('k.') or ks.startswith('!k.'):
keyword_target.add(ks)
# Store results in zookeeper
if g.zookeeper:
+ g.zookeeper.ensure_path(KEYWORD_NODE)
- g.zookeeper.set("/keyword-targets", json.dumps(list(keyword_target)))
? ^^^^^^^^^^^^^^^^^^
+ g.zookeeper.set(KEYWORD_NODE, json.dumps(list(keyword_target)))
? ^^^^^^^^^^^^
|
ab41fe934ce241a4dbe5f73f648858f6f9351d5c | tests/settings.py | tests/settings.py | import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
| import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
| Fix TEMPLATES warning on Django 1.9 | Fix TEMPLATES warning on Django 1.9
| Python | bsd-2-clause | incuna/incuna-test-utils,incuna/incuna-test-utils | import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
+ TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [],
+ 'APP_DIRS': True,
+ 'OPTIONS': {},
+ },
+ ]
| Fix TEMPLATES warning on Django 1.9 | ## Code Before:
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
## Instruction:
Fix TEMPLATES warning on Django 1.9
## Code After:
import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {},
},
]
| import dj_database_url
DATABASES = {
'default': dj_database_url.config(
default='postgres://localhost/test_utils',
),
}
INSTALLED_APPS = (
'incuna_test_utils',
'tests',
'feincms.module.page',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
)
MIDDLEWARE_CLASSES = ()
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests.urls'
SECRET_KEY = 'test'
FEINCMS_USE_PAGE_ADMIN = False
+ TEMPLATES = [
+ {
+ 'BACKEND': 'django.template.backends.django.DjangoTemplates',
+ 'DIRS': [],
+ 'APP_DIRS': True,
+ 'OPTIONS': {},
+ },
+ ] |
6b8f66ed0bcaa62b3afd9fea7d749916d768847d | scripts/midnightRun.py | scripts/midnightRun.py | from recover.models import *
from recover.patient_data import *
from datetime import date
def midnightRun():
physicians = User.objects()
for physician in physicians:
patients = physician.patients
for patient in patients:
data = PatientData(patient)
last_synced = patient.date_last_synced.isoformat()
last_synced = last_synced[0:10]
data.get_heart_rate_data_for_date_range(last_synced)
data.get_activity_data_for_date_range(last_synced)
| from recover.models import *
from recover.patient_data import *
import datetime
def midnightRun():
physicians = User.objects()
for physician in physicians:
patients = physician.patients
for patient in patients:
data = PatientData(patient)
last_synced = patient.date_last_synced.isoformat()
last_synced = last_synced[0:10]
data.get_heart_rate_data_for_date_range(last_synced)
data.get_activity_data_for_date_range(last_synced)
patient.date_last_synced = datetime.datetime.now()
| Update 'date_last_synced' field on each patient after midnight fetching | Update 'date_last_synced' field on each patient after midnight fetching
| Python | mit | SLU-Capstone/Recover,SLU-Capstone/Recover,SLU-Capstone/Recover | from recover.models import *
from recover.patient_data import *
- from datetime import date
+ import datetime
+
def midnightRun():
physicians = User.objects()
for physician in physicians:
patients = physician.patients
for patient in patients:
data = PatientData(patient)
last_synced = patient.date_last_synced.isoformat()
last_synced = last_synced[0:10]
data.get_heart_rate_data_for_date_range(last_synced)
data.get_activity_data_for_date_range(last_synced)
+ patient.date_last_synced = datetime.datetime.now()
| Update 'date_last_synced' field on each patient after midnight fetching | ## Code Before:
from recover.models import *
from recover.patient_data import *
from datetime import date
def midnightRun():
physicians = User.objects()
for physician in physicians:
patients = physician.patients
for patient in patients:
data = PatientData(patient)
last_synced = patient.date_last_synced.isoformat()
last_synced = last_synced[0:10]
data.get_heart_rate_data_for_date_range(last_synced)
data.get_activity_data_for_date_range(last_synced)
## Instruction:
Update 'date_last_synced' field on each patient after midnight fetching
## Code After:
from recover.models import *
from recover.patient_data import *
import datetime
def midnightRun():
physicians = User.objects()
for physician in physicians:
patients = physician.patients
for patient in patients:
data = PatientData(patient)
last_synced = patient.date_last_synced.isoformat()
last_synced = last_synced[0:10]
data.get_heart_rate_data_for_date_range(last_synced)
data.get_activity_data_for_date_range(last_synced)
patient.date_last_synced = datetime.datetime.now()
| from recover.models import *
from recover.patient_data import *
- from datetime import date
+ import datetime
+
def midnightRun():
physicians = User.objects()
for physician in physicians:
patients = physician.patients
for patient in patients:
data = PatientData(patient)
last_synced = patient.date_last_synced.isoformat()
last_synced = last_synced[0:10]
data.get_heart_rate_data_for_date_range(last_synced)
data.get_activity_data_for_date_range(last_synced)
+ patient.date_last_synced = datetime.datetime.now() |
e54fa97cb44557454655efd24380da5223a1c5ae | tests/random_object_id/random_object_id_test.py | tests/random_object_id/random_object_id_test.py | import contextlib
import re
import sys
import mock
from six.moves import cStringIO
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
| import contextlib
import re
import sys
import mock
import six
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
old_out = sys.stdout
try:
sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
| Change how StringIO is imported | Change how StringIO is imported
| Python | mit | mxr/random-object-id | import contextlib
import re
import sys
import mock
- from six.moves import cStringIO
+ import six
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
- new_out = StringIO()
old_out = sys.stdout
try:
- sys.stdout = new_out
+ sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
| Change how StringIO is imported | ## Code Before:
import contextlib
import re
import sys
import mock
from six.moves import cStringIO
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
new_out = StringIO()
old_out = sys.stdout
try:
sys.stdout = new_out
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
## Instruction:
Change how StringIO is imported
## Code After:
import contextlib
import re
import sys
import mock
import six
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
old_out = sys.stdout
try:
sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue())
| import contextlib
import re
import sys
import mock
- from six.moves import cStringIO
+ import six
from random_object_id.random_object_id import \
gen_random_object_id, parse_args, main
@contextlib.contextmanager
def captured_output():
- new_out = StringIO()
old_out = sys.stdout
try:
- sys.stdout = new_out
+ sys.stdout = six.StringIO()
yield sys.stdout
finally:
sys.stdout = old_out
def test_gen_random_object_id():
assert re.match('[0-9a-f]{24}', gen_random_object_id())
def test_gen_random_object_id_time():
with mock.patch('time.time') as mock_time:
mock_time.return_value = 1429506585.786924
object_id = gen_random_object_id()
assert re.match('55348a19', object_id)
def test_parse_args():
assert parse_args(['-l']).long_form
def test_main():
with mock.patch('sys.argv', ['random_object_id']):
with captured_output() as output:
main()
assert re.match('[0-9a-f]{24}\n', output.getvalue())
def test_main_l():
with mock.patch('sys.argv', ['random_object_id', '-l']):
with captured_output() as output:
main()
assert re.match('ObjectId\("[0-9a-f]{24}"\)\n', output.getvalue()) |
8f14e64701fb26da8e4a614da6129964f29be16d | testapp/testapp/testmain/models.py | testapp/testapp/testmain/models.py | from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
| from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
| Add new testing model `School` | Add new testing model `School`
Issue #43
| Python | mit | applegrew/django-select2,dulaccc/django-select2,strongriley/django-select2,Feria/https-github.com-applegrew-django-select2,hobarrera/django-select2,hisie/django-select2,Feria/https-github.com-applegrew-django-select2,hisie/django-select2,bubenkoff/django-select2,pbs/django-select2,dantagg/django-select2,hobarrera/django-select2,applegrew/django-select2,SmithsonianEnterprises/django-select2,bubenkoff/django-select2,dulaccc/django-select2,DMOJ/django-select2,emorozov/django-select2,strongriley/django-select2,pbs/django-select2,DMOJ/django-select2,rizumu/django-select2,emorozov/django-select2,applegrew/django-select2,SmithsonianEnterprises/django-select2,patgmiller/django-select2,hisie/django-select2,anneFly/django-select2,TempoIQ/django-select2,patgmiller/django-select2,bubenkoff/django-select2,patgmiller/django-select2,rizumu/django-select2,DMOJ/django-select2,pbs/django-select2,dantagg/django-select2,Feria/https-github.com-applegrew-django-select2,rizumu/django-select2,anneFly/django-select2,dulaccc/django-select2,TempoIQ/django-select2,SmithsonianEnterprises/django-select2,anneFly/django-select2,TempoIQ/django-select2,hobarrera/django-select2,strongriley/django-select2,dantagg/django-select2,emorozov/django-select2 | from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
+
+ class School(models.Model):
+
+ classes = models.ManyToManyField(ClassRoom)
+ | Add new testing model `School` | ## Code Before:
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
## Instruction:
Add new testing model `School`
## Code After:
from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
class School(models.Model):
classes = models.ManyToManyField(ClassRoom)
| from django.db import models
class ClassRoom(models.Model):
number = models.CharField(max_length=4)
def __unicode__(self):
return unicode(self.number)
class Lab(models.Model):
name = models.CharField(max_length=10)
def __unicode__(self):
return unicode(self.name)
class Dept(models.Model):
name = models.CharField(max_length=10)
allotted_rooms = models.ManyToManyField(ClassRoom)
allotted_labs = models.ManyToManyField(Lab)
def __unicode__(self):
return unicode(self.name)
class Employee(models.Model):
name = models.CharField(max_length=30)
salary = models.FloatField()
dept = models.ForeignKey(Dept)
manager = models.ForeignKey('Employee', null=True, blank=True)
def __unicode__(self):
return unicode(self.name)
class Word(models.Model):
word = models.CharField(max_length=15)
def __unicode__(self):
return unicode(self.word)
+
+
+ class School(models.Model):
+
+ classes = models.ManyToManyField(ClassRoom) |
ff391fc302b6d4e9fab0653522fa2fe47d8e8faa | beavy_modules/url_extractor/lib.py | beavy_modules/url_extractor/lib.py | import lassie
from pyembed.core import PyEmbed
from beavy.app import cache
pyembed = PyEmbed()
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
|
from pyembed.core import PyEmbed
from beavy.app import cache
from lassie import Lassie
import re
# lassie by default isn't extensive enough for us
# configure it so that it is.
from lassie.filters import FILTER_MAPS
FILTER_MAPS['meta']['open_graph']['map'].update({
# general
"og:type": "type",
"og:site_name": "site_name",
})
FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
FILTER_MAPS['meta']['generic']['map'].update({
# articles
"article:published_time": "published_time",
"article:modified_time": "modified_time",
"article:expiration_time": "expiration_time",
"article:section": "section",
"article:section_url": "section_url",
# music
"music:duration": "duration",
"music:release_date": "release_date",
# video
"video:duration": "duration",
"video:release_date": "release_date",
# author
"author": "author",
# book
"book:author": "author",
"book:isbn": "isbn",
"book:release_date": "release_date",
})
# general configuration
pyembed = PyEmbed()
lassie = Lassie()
lassie.request_opts = {
'headers':{
# tell Lassie to tell others it is facebook
'User-Agent': 'facebookexternalhit/1.1'
}
}
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed(url, **kwargs)
| Configure Lassie for more information | Configure Lassie for more information
| Python | mpl-2.0 | beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy,beavyHQ/beavy | - import lassie
+
from pyembed.core import PyEmbed
from beavy.app import cache
+ from lassie import Lassie
+ import re
+
+ # lassie by default isn't extensive enough for us
+ # configure it so that it is.
+
+ from lassie.filters import FILTER_MAPS
+ FILTER_MAPS['meta']['open_graph']['map'].update({
+ # general
+ "og:type": "type",
+ "og:site_name": "site_name",
+ })
+
+ FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
+ FILTER_MAPS['meta']['generic']['map'].update({
+ # articles
+ "article:published_time": "published_time",
+ "article:modified_time": "modified_time",
+ "article:expiration_time": "expiration_time",
+ "article:section": "section",
+ "article:section_url": "section_url",
+
+ # music
+ "music:duration": "duration",
+ "music:release_date": "release_date",
+
+ # video
+ "video:duration": "duration",
+ "video:release_date": "release_date",
+
+ # author
+ "author": "author",
+
+ # book
+ "book:author": "author",
+ "book:isbn": "isbn",
+ "book:release_date": "release_date",
+ })
+
+ # general configuration
pyembed = PyEmbed()
+
+ lassie = Lassie()
+ lassie.request_opts = {
+ 'headers':{
+ # tell Lassie to tell others it is facebook
+ 'User-Agent': 'facebookexternalhit/1.1'
+ }
+ }
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
- return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
+ return pyembed.embed(url, **kwargs)
| Configure Lassie for more information | ## Code Before:
import lassie
from pyembed.core import PyEmbed
from beavy.app import cache
pyembed = PyEmbed()
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
## Instruction:
Configure Lassie for more information
## Code After:
from pyembed.core import PyEmbed
from beavy.app import cache
from lassie import Lassie
import re
# lassie by default isn't extensive enough for us
# configure it so that it is.
from lassie.filters import FILTER_MAPS
FILTER_MAPS['meta']['open_graph']['map'].update({
# general
"og:type": "type",
"og:site_name": "site_name",
})
FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
FILTER_MAPS['meta']['generic']['map'].update({
# articles
"article:published_time": "published_time",
"article:modified_time": "modified_time",
"article:expiration_time": "expiration_time",
"article:section": "section",
"article:section_url": "section_url",
# music
"music:duration": "duration",
"music:release_date": "release_date",
# video
"video:duration": "duration",
"video:release_date": "release_date",
# author
"author": "author",
# book
"book:author": "author",
"book:isbn": "isbn",
"book:release_date": "release_date",
})
# general configuration
pyembed = PyEmbed()
lassie = Lassie()
lassie.request_opts = {
'headers':{
# tell Lassie to tell others it is facebook
'User-Agent': 'facebookexternalhit/1.1'
}
}
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
return pyembed.embed(url, **kwargs)
| - import lassie
+
from pyembed.core import PyEmbed
from beavy.app import cache
+ from lassie import Lassie
+ import re
+
+ # lassie by default isn't extensive enough for us
+ # configure it so that it is.
+
+ from lassie.filters import FILTER_MAPS
+ FILTER_MAPS['meta']['open_graph']['map'].update({
+ # general
+ "og:type": "type",
+ "og:site_name": "site_name",
+ })
+
+ FILTER_MAPS['meta']['generic']['pattern'] = re.compile(r"^(description|keywords|title|author|article:|music:|video:|book:)", re.I)
+ FILTER_MAPS['meta']['generic']['map'].update({
+ # articles
+ "article:published_time": "published_time",
+ "article:modified_time": "modified_time",
+ "article:expiration_time": "expiration_time",
+ "article:section": "section",
+ "article:section_url": "section_url",
+
+ # music
+ "music:duration": "duration",
+ "music:release_date": "release_date",
+
+ # video
+ "video:duration": "duration",
+ "video:release_date": "release_date",
+
+ # author
+ "author": "author",
+
+ # book
+ "book:author": "author",
+ "book:isbn": "isbn",
+ "book:release_date": "release_date",
+ })
+
+ # general configuration
pyembed = PyEmbed()
+
+ lassie = Lassie()
+ lassie.request_opts = {
+ 'headers':{
+ # tell Lassie to tell others it is facebook
+ 'User-Agent': 'facebookexternalhit/1.1'
+ }
+ }
@cache.memoize()
def extract_info(url):
return lassie.fetch(url)
@cache.memoize()
def extract_oembed(url, **kwargs):
- return pyembed.embed('http://www.youtube.com/watch?v=_PEdPBEpQfY', **kwargs)
+ return pyembed.embed(url, **kwargs) |
655e741375b3fad7e3b7657662d33ca8017c0220 | test/requests/link_checker.py | test/requests/link_checker.py | import requests
def check_links(args_obj, parser):
print("")
print("Checking links")
print("########################")
print("Not implemented yet.")
print("This is supposed to check all links in the system.")
print("########################")
| from __future__ import print_function
import re
import requests
from lxml.html import parse
from requests.exceptions import ConnectionError
def is_root_link(link):
pattern = re.compile("^/$")
return pattern.match(link)
def is_mailto_link(link):
pattern = re.compile("^mailto:.*")
return pattern.match(link)
def is_internal_link(link):
pattern = re.compile("^/.*")
return pattern.match(link)
def get_links(doc):
return filter(
lambda x: not (
is_root_link(x)
or is_mailto_link(x))
, map(lambda y: y.get("href")
, doc.cssselect("a")))
def verify_link(link):
try:
result = requests.get(link, timeout=20)
if result.status_code == 200:
print(link+" ==> OK")
else:
print("ERROR: link `"+link+"` failed with status "
, result.status_code)
except ConnectionError as ex:
print("ERROR: ", link, ex)
def check_page(host, start_url):
print("")
print("Checking links in page `"+start_url+"`")
doc = parse(start_url).getroot()
links = get_links(doc)
internal_links = filter(is_internal_link, links)
external_links = filter(lambda x: not is_internal_link(x), links)
external_links.append("http://somenon-existentsite.brr")
for link in internal_links:
verify_link(host+link)
for link in external_links:
verify_link(link)
def check_links(args_obj, parser):
print("")
print("Checking links")
host = args_obj.host
# Check the home page
check_page(host, host)
# Check traits page
check_page(
host,
host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")
| Add tests to check links. | Add tests to check links.
| Python | agpl-3.0 | zsloan/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,DannyArends/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2 | + from __future__ import print_function
+ import re
import requests
+ from lxml.html import parse
+ from requests.exceptions import ConnectionError
+
+ def is_root_link(link):
+ pattern = re.compile("^/$")
+ return pattern.match(link)
+
+ def is_mailto_link(link):
+ pattern = re.compile("^mailto:.*")
+ return pattern.match(link)
+
+ def is_internal_link(link):
+ pattern = re.compile("^/.*")
+ return pattern.match(link)
+
+ def get_links(doc):
+ return filter(
+ lambda x: not (
+ is_root_link(x)
+ or is_mailto_link(x))
+ , map(lambda y: y.get("href")
+ , doc.cssselect("a")))
+
+ def verify_link(link):
+ try:
+ result = requests.get(link, timeout=20)
+ if result.status_code == 200:
+ print(link+" ==> OK")
+ else:
+ print("ERROR: link `"+link+"` failed with status "
+ , result.status_code)
+ except ConnectionError as ex:
+ print("ERROR: ", link, ex)
+
+ def check_page(host, start_url):
+ print("")
+ print("Checking links in page `"+start_url+"`")
+ doc = parse(start_url).getroot()
+ links = get_links(doc)
+ internal_links = filter(is_internal_link, links)
+ external_links = filter(lambda x: not is_internal_link(x), links)
+ external_links.append("http://somenon-existentsite.brr")
+ for link in internal_links:
+ verify_link(host+link)
+
+ for link in external_links:
+ verify_link(link)
def check_links(args_obj, parser):
print("")
print("Checking links")
+ host = args_obj.host
- print("########################")
- print("Not implemented yet.")
- print("This is supposed to check all links in the system.")
- print("########################")
+ # Check the home page
+ check_page(host, host)
+
+ # Check traits page
+ check_page(
+ host,
+ host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")
+ | Add tests to check links. | ## Code Before:
import requests
def check_links(args_obj, parser):
print("")
print("Checking links")
print("########################")
print("Not implemented yet.")
print("This is supposed to check all links in the system.")
print("########################")
## Instruction:
Add tests to check links.
## Code After:
from __future__ import print_function
import re
import requests
from lxml.html import parse
from requests.exceptions import ConnectionError
def is_root_link(link):
pattern = re.compile("^/$")
return pattern.match(link)
def is_mailto_link(link):
pattern = re.compile("^mailto:.*")
return pattern.match(link)
def is_internal_link(link):
pattern = re.compile("^/.*")
return pattern.match(link)
def get_links(doc):
return filter(
lambda x: not (
is_root_link(x)
or is_mailto_link(x))
, map(lambda y: y.get("href")
, doc.cssselect("a")))
def verify_link(link):
try:
result = requests.get(link, timeout=20)
if result.status_code == 200:
print(link+" ==> OK")
else:
print("ERROR: link `"+link+"` failed with status "
, result.status_code)
except ConnectionError as ex:
print("ERROR: ", link, ex)
def check_page(host, start_url):
print("")
print("Checking links in page `"+start_url+"`")
doc = parse(start_url).getroot()
links = get_links(doc)
internal_links = filter(is_internal_link, links)
external_links = filter(lambda x: not is_internal_link(x), links)
external_links.append("http://somenon-existentsite.brr")
for link in internal_links:
verify_link(host+link)
for link in external_links:
verify_link(link)
def check_links(args_obj, parser):
print("")
print("Checking links")
host = args_obj.host
# Check the home page
check_page(host, host)
# Check traits page
check_page(
host,
host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P")
| + from __future__ import print_function
+ import re
import requests
+ from lxml.html import parse
+ from requests.exceptions import ConnectionError
+
+ def is_root_link(link):
+ pattern = re.compile("^/$")
+ return pattern.match(link)
+
+ def is_mailto_link(link):
+ pattern = re.compile("^mailto:.*")
+ return pattern.match(link)
+
+ def is_internal_link(link):
+ pattern = re.compile("^/.*")
+ return pattern.match(link)
+
+ def get_links(doc):
+ return filter(
+ lambda x: not (
+ is_root_link(x)
+ or is_mailto_link(x))
+ , map(lambda y: y.get("href")
+ , doc.cssselect("a")))
+
+ def verify_link(link):
+ try:
+ result = requests.get(link, timeout=20)
+ if result.status_code == 200:
+ print(link+" ==> OK")
+ else:
+ print("ERROR: link `"+link+"` failed with status "
+ , result.status_code)
+ except ConnectionError as ex:
+ print("ERROR: ", link, ex)
+
+ def check_page(host, start_url):
+ print("")
+ print("Checking links in page `"+start_url+"`")
+ doc = parse(start_url).getroot()
+ links = get_links(doc)
+ internal_links = filter(is_internal_link, links)
+ external_links = filter(lambda x: not is_internal_link(x), links)
+ external_links.append("http://somenon-existentsite.brr")
+ for link in internal_links:
+ verify_link(host+link)
+
+ for link in external_links:
+ verify_link(link)
def check_links(args_obj, parser):
print("")
print("Checking links")
- print("########################")
- print("Not implemented yet.")
- print("This is supposed to check all links in the system.")
- print("########################")
+ host = args_obj.host
+
+ # Check the home page
+ check_page(host, host)
+
+ # Check traits page
+ check_page(
+ host,
+ host+"/show_trait?trait_id=1435395_s_at&dataset=HC_M2_0606_P") |
fd50ce4b22b4f3d948a64ed400340c0fc744de49 | src/waldur_core/core/migrations/0008_changeemailrequest_uuid.py | src/waldur_core/core/migrations/0008_changeemailrequest_uuid.py | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
model_name='changeemailrequest', name='uuid', field=models.UUIDField(),
),
]
| from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
model_name='changeemailrequest',
name='uuid',
field=models.UUIDField(null=True),
),
]
| Allow null values in UUID field. | Allow null values in UUID field.
| Python | mit | opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind,opennode/waldur-mastermind,opennode/nodeconductor-assembly-waldur,opennode/waldur-mastermind | from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
- model_name='changeemailrequest', name='uuid', field=models.UUIDField(),
+ model_name='changeemailrequest',
+ name='uuid',
+ field=models.UUIDField(null=True),
),
]
| Allow null values in UUID field. | ## Code Before:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
model_name='changeemailrequest', name='uuid', field=models.UUIDField(),
),
]
## Instruction:
Allow null values in UUID field.
## Code After:
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
model_name='changeemailrequest',
name='uuid',
field=models.UUIDField(null=True),
),
]
| from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0007_changeemailrequest'),
]
operations = [
migrations.AddField(
- model_name='changeemailrequest', name='uuid', field=models.UUIDField(),
+ model_name='changeemailrequest',
+ name='uuid',
+ field=models.UUIDField(null=True),
),
] |
d106719a0b7bcbd87989bd36f618f90c4df02c46 | sequana/gui/browser.py | sequana/gui/browser.py | from PyQt5 import QtCore
from PyQt5.QtWebKitWidgets import QWebView
class MyBrowser(QWebView):
closing = QtCore.Signal()
def __init(self):
super().__init__()
self.loadFinished.connec(self._results_available)
def _results_available(self, ok):
print("results")
frame = self.page().mainFrame()
print(unicode(frame.toHtml()).encode('utf-8'))
def closeEvent(self, event):
print("done")
self.closing.emit()
| from PyQt5 import QtCore
from PyQt5.QtWebKitWidgets import QWebView
class MyBrowser(QWebView):
#closing = QtCore.Signal()
def __init(self):
super().__init__()
self.loadFinished.connec(self._results_available)
def _results_available(self, ok):
frame = self.page().mainFrame()
print(unicode(frame.toHtml()).encode('utf-8'))
def closeEvent(self, event):
#print("done")
pass
#self.closing.emit()
| Fix issue with signal on tars | Fix issue with signal on tars
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | from PyQt5 import QtCore
-
from PyQt5.QtWebKitWidgets import QWebView
class MyBrowser(QWebView):
- closing = QtCore.Signal()
+ #closing = QtCore.Signal()
+
def __init(self):
super().__init__()
self.loadFinished.connec(self._results_available)
+
def _results_available(self, ok):
- print("results")
frame = self.page().mainFrame()
print(unicode(frame.toHtml()).encode('utf-8'))
def closeEvent(self, event):
- print("done")
+ #print("done")
+ pass
- self.closing.emit()
+ #self.closing.emit()
| Fix issue with signal on tars | ## Code Before:
from PyQt5 import QtCore
from PyQt5.QtWebKitWidgets import QWebView
class MyBrowser(QWebView):
closing = QtCore.Signal()
def __init(self):
super().__init__()
self.loadFinished.connec(self._results_available)
def _results_available(self, ok):
print("results")
frame = self.page().mainFrame()
print(unicode(frame.toHtml()).encode('utf-8'))
def closeEvent(self, event):
print("done")
self.closing.emit()
## Instruction:
Fix issue with signal on tars
## Code After:
from PyQt5 import QtCore
from PyQt5.QtWebKitWidgets import QWebView
class MyBrowser(QWebView):
#closing = QtCore.Signal()
def __init(self):
super().__init__()
self.loadFinished.connec(self._results_available)
def _results_available(self, ok):
frame = self.page().mainFrame()
print(unicode(frame.toHtml()).encode('utf-8'))
def closeEvent(self, event):
#print("done")
pass
#self.closing.emit()
| from PyQt5 import QtCore
-
from PyQt5.QtWebKitWidgets import QWebView
class MyBrowser(QWebView):
- closing = QtCore.Signal()
+ #closing = QtCore.Signal()
? +
+
def __init(self):
super().__init__()
self.loadFinished.connec(self._results_available)
+
def _results_available(self, ok):
- print("results")
frame = self.page().mainFrame()
print(unicode(frame.toHtml()).encode('utf-8'))
def closeEvent(self, event):
- print("done")
+ #print("done")
? +
+ pass
- self.closing.emit()
+ #self.closing.emit()
? +
|
05fc957280fecbc99c8f58897a06e23dcc4b9453 | elections/uk/forms.py | elections/uk/forms.py |
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name or postcode',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
def clean_postcode(self):
postcode = self.cleaned_data['postcode']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
|
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name or postcode',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
def clean_q(self):
postcode = self.cleaned_data['q']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
| Fix the postcode form so that it's actually validating the input | Fix the postcode form so that it's actually validating the input
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative |
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name or postcode',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
- def clean_postcode(self):
+ def clean_q(self):
- postcode = self.cleaned_data['postcode']
+ postcode = self.cleaned_data['q']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
| Fix the postcode form so that it's actually validating the input | ## Code Before:
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name or postcode',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
def clean_postcode(self):
postcode = self.cleaned_data['postcode']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
## Instruction:
Fix the postcode form so that it's actually validating the input
## Code After:
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name or postcode',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
def clean_q(self):
postcode = self.cleaned_data['q']
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode
|
from __future__ import unicode_literals
from django import forms
from django.core.exceptions import ValidationError
from candidates.mapit import BaseMapItException
from popolo.models import Area
from compat import text_type
from .mapit import get_areas_from_postcode
class PostcodeForm(forms.Form):
q = forms.CharField(
label='Enter a candidate name or postcode',
max_length=200,
widget=forms.TextInput(attrs={'placeholder': 'Enter a name'})
)
- def clean_postcode(self):
? ^^^^^^^^
+ def clean_q(self):
? ^
- postcode = self.cleaned_data['postcode']
? ^^^^^^^^
+ postcode = self.cleaned_data['q']
? ^
try:
# Go to MapIt to check if this postcode is valid and
# contained in a constituency. (If it's valid then the
# result is cached, so this doesn't cause a double lookup.)
get_areas_from_postcode(postcode)
except BaseMapItException as e:
raise ValidationError(text_type(e))
return postcode |
f41adb3b11a572251949778ed3fa49cd0c3901c7 | AFQ/tests/test_csd.py | AFQ/tests/test_csd.py | import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| Replace the test data set with this one. | Replace the test data set with this one.
| Python | bsd-2-clause | arokem/pyAFQ,arokem/pyAFQ,yeatmanlab/pyAFQ,yeatmanlab/pyAFQ | import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
- fdata, fbval, fbvec = dpd.get_data()
+ fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
- for sh_order in [4, 8]:
+ for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| Replace the test data set with this one. | ## Code Before:
import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data()
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 8]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
## Instruction:
Replace the test data set with this one.
## Code After:
import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
fdata, fbval, fbvec = dpd.get_data('small_64D')
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
for sh_order in [4, 6]:
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1]))
| import os.path as op
import numpy as np
import numpy.testing as npt
import nibabel as nib
import nibabel.tmpdirs as nbtmp
import dipy.data as dpd
from dipy.reconst.shm import calculate_max_order
from AFQ import csd
def test_fit_csd():
- fdata, fbval, fbvec = dpd.get_data()
+ fdata, fbval, fbvec = dpd.get_data('small_64D')
? +++++++++++
with nbtmp.InTemporaryDirectory() as tmpdir:
# Convert from npy to txt:
bvals = np.load(fbval)
bvecs = np.load(fbvec)
np.savetxt(op.join(tmpdir, 'bvals.txt'), bvals)
np.savetxt(op.join(tmpdir, 'bvecs.txt'), bvecs)
- for sh_order in [4, 8]:
? ^
+ for sh_order in [4, 6]:
? ^
fname = csd.fit_csd(fdata, op.join(tmpdir, 'bvals.txt'),
op.join(tmpdir, 'bvecs.txt'),
out_dir=tmpdir, sh_order=sh_order)
npt.assert_(op.exists(fname))
sh_coeffs_img = nib.load(fname)
npt.assert_equal(sh_order,
calculate_max_order(sh_coeffs_img.shape[-1])) |
ba84740e7ba0edd709c9cd076a7dce83a6c91a30 | research/mlt_quality_research.py | research/mlt_quality_research.py |
import pprint
from elasticsearch import Elasticsearch
'''Inside ipython:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index='bhp10', doc_type='places', id=71433, mlt_fields=related_fields, search_types=['places','personalities','photoUnits','familyNames'], search_size=40)['hits']['hits']]
'''
def get_related(es, doc_id, index, doc_type, mlt_fields, target_collections, limit):
return [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index=index, doc_type=doc_type, id=doc_id, mlt_fields=mlt_fields, search_types=target_collections, search_size=limit)['hits']['hits']]
if __name__ == '__main__':
es = Elasticsearch()
mlt_fields = ['Header.En', 'UnitText1.En', 'Header.He', 'UnitText1.He']
target_collections = ['places','personalities','photoUnits','familyNames']
# For Paris:
pprint.pprint (get_related(es, 72312, 'bhp10', 'places', mlt_fields, target_collections, 40))
|
import pprint
from elasticsearch import Elasticsearch
'''Inside ipython:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index='bhp10', doc_type='places', id=71433, mlt_fields=related_fields, search_types=['places','personalities','photoUnits','familyNames'], search_size=40)['hits']['hits']]
'''
'''Try with search:
es.search(doc_type='', size=1, q='UnitText1.En:Einstein')
Or even better:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'], r['_id'])for r in es.search(doc_type=['places'], size=40, q='UnitText1.En:Albert Einstein')['hits']['hits']]
'''
def get_related(es, doc_id, index, doc_type, mlt_fields, target_collections, limit):
return [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index=index, doc_type=doc_type, id=doc_id, mlt_fields=mlt_fields, search_types=target_collections, search_size=limit)['hits']['hits']]
if __name__ == '__main__':
es = Elasticsearch()
mlt_fields = ['Header.En', 'UnitText1.En', 'Header.He', 'UnitText1.He']
target_collections = ['places','personalities','photoUnits','familyNames']
# For Paris:
pprint.pprint (get_related(es, 72312, 'bhp10', 'places', mlt_fields, target_collections, 40))
| Add ES search example, similar to Mongo related FTS | Add ES search example, similar to Mongo related FTS
| Python | agpl-3.0 | Beit-Hatfutsot/dbs-back,Beit-Hatfutsot/dbs-back,Beit-Hatfutsot/dbs-back,Beit-Hatfutsot/dbs-back |
import pprint
from elasticsearch import Elasticsearch
'''Inside ipython:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index='bhp10', doc_type='places', id=71433, mlt_fields=related_fields, search_types=['places','personalities','photoUnits','familyNames'], search_size=40)['hits']['hits']]
+ '''
+
+ '''Try with search:
+ es.search(doc_type='', size=1, q='UnitText1.En:Einstein')
+ Or even better:
+ [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'], r['_id'])for r in es.search(doc_type=['places'], size=40, q='UnitText1.En:Albert Einstein')['hits']['hits']]
'''
def get_related(es, doc_id, index, doc_type, mlt_fields, target_collections, limit):
return [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index=index, doc_type=doc_type, id=doc_id, mlt_fields=mlt_fields, search_types=target_collections, search_size=limit)['hits']['hits']]
if __name__ == '__main__':
es = Elasticsearch()
mlt_fields = ['Header.En', 'UnitText1.En', 'Header.He', 'UnitText1.He']
target_collections = ['places','personalities','photoUnits','familyNames']
# For Paris:
pprint.pprint (get_related(es, 72312, 'bhp10', 'places', mlt_fields, target_collections, 40))
| Add ES search example, similar to Mongo related FTS | ## Code Before:
import pprint
from elasticsearch import Elasticsearch
'''Inside ipython:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index='bhp10', doc_type='places', id=71433, mlt_fields=related_fields, search_types=['places','personalities','photoUnits','familyNames'], search_size=40)['hits']['hits']]
'''
def get_related(es, doc_id, index, doc_type, mlt_fields, target_collections, limit):
return [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index=index, doc_type=doc_type, id=doc_id, mlt_fields=mlt_fields, search_types=target_collections, search_size=limit)['hits']['hits']]
if __name__ == '__main__':
es = Elasticsearch()
mlt_fields = ['Header.En', 'UnitText1.En', 'Header.He', 'UnitText1.He']
target_collections = ['places','personalities','photoUnits','familyNames']
# For Paris:
pprint.pprint (get_related(es, 72312, 'bhp10', 'places', mlt_fields, target_collections, 40))
## Instruction:
Add ES search example, similar to Mongo related FTS
## Code After:
import pprint
from elasticsearch import Elasticsearch
'''Inside ipython:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index='bhp10', doc_type='places', id=71433, mlt_fields=related_fields, search_types=['places','personalities','photoUnits','familyNames'], search_size=40)['hits']['hits']]
'''
'''Try with search:
es.search(doc_type='', size=1, q='UnitText1.En:Einstein')
Or even better:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'], r['_id'])for r in es.search(doc_type=['places'], size=40, q='UnitText1.En:Albert Einstein')['hits']['hits']]
'''
def get_related(es, doc_id, index, doc_type, mlt_fields, target_collections, limit):
return [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index=index, doc_type=doc_type, id=doc_id, mlt_fields=mlt_fields, search_types=target_collections, search_size=limit)['hits']['hits']]
if __name__ == '__main__':
es = Elasticsearch()
mlt_fields = ['Header.En', 'UnitText1.En', 'Header.He', 'UnitText1.He']
target_collections = ['places','personalities','photoUnits','familyNames']
# For Paris:
pprint.pprint (get_related(es, 72312, 'bhp10', 'places', mlt_fields, target_collections, 40))
|
import pprint
from elasticsearch import Elasticsearch
'''Inside ipython:
[(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index='bhp10', doc_type='places', id=71433, mlt_fields=related_fields, search_types=['places','personalities','photoUnits','familyNames'], search_size=40)['hits']['hits']]
+ '''
+
+ '''Try with search:
+ es.search(doc_type='', size=1, q='UnitText1.En:Einstein')
+ Or even better:
+ [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'], r['_id'])for r in es.search(doc_type=['places'], size=40, q='UnitText1.En:Albert Einstein')['hits']['hits']]
'''
def get_related(es, doc_id, index, doc_type, mlt_fields, target_collections, limit):
return [(r['_score'], r['_source']['Header']['En'], r['_source']['UnitTypeDesc'])for r in es.mlt(index=index, doc_type=doc_type, id=doc_id, mlt_fields=mlt_fields, search_types=target_collections, search_size=limit)['hits']['hits']]
if __name__ == '__main__':
es = Elasticsearch()
mlt_fields = ['Header.En', 'UnitText1.En', 'Header.He', 'UnitText1.He']
target_collections = ['places','personalities','photoUnits','familyNames']
# For Paris:
pprint.pprint (get_related(es, 72312, 'bhp10', 'places', mlt_fields, target_collections, 40))
|
4f6400e9ecf9bbc1cee62567673c619f9a975f95 | lib/python/opendiamond/bundle.py | lib/python/opendiamond/bundle.py |
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
if os.path.exists(path):
raise Exception("Refusing to clobber destination file")
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
|
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
| Allow make_zipfile() to clobber the destination file | Allow make_zipfile() to clobber the destination file
| Python | epl-1.0 | cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond,cmusatyalab/opendiamond |
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
- if os.path.exists(path):
- raise Exception("Refusing to clobber destination file")
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
| Allow make_zipfile() to clobber the destination file | ## Code Before:
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
if os.path.exists(path):
raise Exception("Refusing to clobber destination file")
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
## Instruction:
Allow make_zipfile() to clobber the destination file
## Code After:
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files)
|
import os
import subprocess
import zipfile
def make_zipfile(path, manifest, files):
'''manifest is a string, files is a dict of filename => path pairs'''
- if os.path.exists(path):
- raise Exception("Refusing to clobber destination file")
zip = zipfile.ZipFile(path, mode = 'w', compression = zipfile.ZIP_DEFLATED)
zip.writestr('opendiamond-manifest.txt', manifest)
for name, path in files.items():
zip.write(path, name)
zip.close()
def bundle_python(out, filter, blob = None):
try:
proc = subprocess.Popen(['python', os.path.realpath(filter),
'--get-manifest'], stdout = subprocess.PIPE)
except OSError:
raise Exception("Couldn't execute filter program")
manifest = proc.communicate()[0]
if proc.returncode != 0:
raise Exception("Couldn't generate filter manifest")
files = {'filter': filter}
if blob is not None:
files['blob'] = blob
make_zipfile(out, manifest, files) |
12e924cd617811cb763857a9abf14e8b3487f5a1 | ckanext/nhm/routes/bbcm.py | ckanext/nhm/routes/bbcm.py |
from flask import Blueprint
from ckan.plugins import toolkit
# bbcm = big butterfly count map :)
# create a flask blueprint with a prefix
blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__,
url_prefix=u'/big-butterfly-count-map')
@blueprint.route(u'')
@blueprint.route(u'/')
def bbcm():
'''
Render the big butterfly count map page.
'''
return toolkit.render(u'bbcm.html', {})
|
from flask import Blueprint
from ckan.plugins import toolkit
# bbcm = big butterfly count map :)
# create a flask blueprint
blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__)
@blueprint.route(u'/big-butterfly-count-map')
def bbcm():
'''
Render the big butterfly count map page.
'''
return toolkit.render(u'bbcm.html', {})
| Allow the url to be accessed with or without a / on the end | Allow the url to be accessed with or without a / on the end
| Python | mit | NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm,NaturalHistoryMuseum/ckanext-nhm |
from flask import Blueprint
from ckan.plugins import toolkit
# bbcm = big butterfly count map :)
- # create a flask blueprint with a prefix
+ # create a flask blueprint
- blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__,
+ blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__)
- url_prefix=u'/big-butterfly-count-map')
+ @blueprint.route(u'/big-butterfly-count-map')
- @blueprint.route(u'')
- @blueprint.route(u'/')
def bbcm():
'''
Render the big butterfly count map page.
'''
return toolkit.render(u'bbcm.html', {})
| Allow the url to be accessed with or without a / on the end | ## Code Before:
from flask import Blueprint
from ckan.plugins import toolkit
# bbcm = big butterfly count map :)
# create a flask blueprint with a prefix
blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__,
url_prefix=u'/big-butterfly-count-map')
@blueprint.route(u'')
@blueprint.route(u'/')
def bbcm():
'''
Render the big butterfly count map page.
'''
return toolkit.render(u'bbcm.html', {})
## Instruction:
Allow the url to be accessed with or without a / on the end
## Code After:
from flask import Blueprint
from ckan.plugins import toolkit
# bbcm = big butterfly count map :)
# create a flask blueprint
blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__)
@blueprint.route(u'/big-butterfly-count-map')
def bbcm():
'''
Render the big butterfly count map page.
'''
return toolkit.render(u'bbcm.html', {})
|
from flask import Blueprint
from ckan.plugins import toolkit
# bbcm = big butterfly count map :)
- # create a flask blueprint with a prefix
? --------------
+ # create a flask blueprint
- blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__,
? ^
+ blueprint = Blueprint(name=u'big-butterfly-count-map', import_name=__name__)
? ^
- url_prefix=u'/big-butterfly-count-map')
+ @blueprint.route(u'/big-butterfly-count-map')
- @blueprint.route(u'')
- @blueprint.route(u'/')
def bbcm():
'''
Render the big butterfly count map page.
'''
return toolkit.render(u'bbcm.html', {}) |
da98272c3b19828dabbbb339f025c9d3dd4a949e | relay_api/core/relay.py | relay_api/core/relay.py | import RPi.GPIO as GPIO
class relay():
def __init__(self, gpio_num):
self.gpio = gpio_num
GPIO.setmode(GPIO.BCM)
try:
GPIO.input(self.gpio)
raise LookupError("Relay is already in use!")
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
except ValueError:
raise LookupError("Relay number invalid!")
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio)
| import RPi.GPIO as GPIO
MAX_RELAY_GPIO = 27
class relay():
def __init__(self, gpio_num):
if gpio_num not in range(MAX_RELAY_GPIO + 1):
raise LookupError("Relay GPIO invalid! Use one between 0 - " +
str(MAX_RELAY_GPIO))
self.gpio = gpio_num
GPIO.setmode(GPIO.BCM)
try:
GPIO.input(self.gpio)
raise LookupError("Relay GPIO is already in use!")
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio)
| Change the way that GPIO is verified | Change the way that GPIO is verified
| Python | mit | pahumadad/raspi-relay-api | import RPi.GPIO as GPIO
+
+ MAX_RELAY_GPIO = 27
class relay():
def __init__(self, gpio_num):
+ if gpio_num not in range(MAX_RELAY_GPIO + 1):
+ raise LookupError("Relay GPIO invalid! Use one between 0 - " +
+ str(MAX_RELAY_GPIO))
+
self.gpio = gpio_num
GPIO.setmode(GPIO.BCM)
+
try:
GPIO.input(self.gpio)
- raise LookupError("Relay is already in use!")
+ raise LookupError("Relay GPIO is already in use!")
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
+
- except ValueError:
- raise LookupError("Relay number invalid!")
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio)
| Change the way that GPIO is verified | ## Code Before:
import RPi.GPIO as GPIO
class relay():
def __init__(self, gpio_num):
self.gpio = gpio_num
GPIO.setmode(GPIO.BCM)
try:
GPIO.input(self.gpio)
raise LookupError("Relay is already in use!")
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
except ValueError:
raise LookupError("Relay number invalid!")
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio)
## Instruction:
Change the way that GPIO is verified
## Code After:
import RPi.GPIO as GPIO
MAX_RELAY_GPIO = 27
class relay():
def __init__(self, gpio_num):
if gpio_num not in range(MAX_RELAY_GPIO + 1):
raise LookupError("Relay GPIO invalid! Use one between 0 - " +
str(MAX_RELAY_GPIO))
self.gpio = gpio_num
GPIO.setmode(GPIO.BCM)
try:
GPIO.input(self.gpio)
raise LookupError("Relay GPIO is already in use!")
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio)
| import RPi.GPIO as GPIO
+
+ MAX_RELAY_GPIO = 27
class relay():
def __init__(self, gpio_num):
+ if gpio_num not in range(MAX_RELAY_GPIO + 1):
+ raise LookupError("Relay GPIO invalid! Use one between 0 - " +
+ str(MAX_RELAY_GPIO))
+
self.gpio = gpio_num
GPIO.setmode(GPIO.BCM)
+
try:
GPIO.input(self.gpio)
- raise LookupError("Relay is already in use!")
+ raise LookupError("Relay GPIO is already in use!")
? +++++
except RuntimeError:
GPIO.setup(self.gpio, GPIO.OUT)
+
- except ValueError:
- raise LookupError("Relay number invalid!")
self.off()
def on(self):
GPIO.output(self.gpio, GPIO.HIGH)
self.state = True
def off(self):
GPIO.output(self.gpio, GPIO.LOW)
self.state = False
def get_state(self):
return self.state
def cleanup(self):
GPIO.cleanup(self.gpio) |
a2d6c32305577640bcd111fa1011bea61d7ca9e7 | packages/mono-llvm-2-10.py | packages/mono-llvm-2-10.py | GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
| GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
| Fix llvm so it doesn't corrupt the env when configuring itself | Fix llvm so it doesn't corrupt the env when configuring itself
| Python | mit | BansheeMediaPlayer/bockbuild,mono/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild | GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
- configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
+ configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
| Fix llvm so it doesn't corrupt the env when configuring itself | ## Code Before:
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
## Instruction:
Fix llvm so it doesn't corrupt the env when configuring itself
## Code After:
GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
)
| GitHubTarballPackage ('mono', 'llvm', '2.10', '943edbc1a93df204d687d82d34d2b2bdf9978f4e',
- configure = 'CFLAGS="-m32" CPPFLAGS="-m32" CXXFLAGS="-m32" LDFLAGS="-m32" ./configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
? -------------------------------------------------------------
+ configure = './configure --prefix="%{prefix}" --enable-optimized --enable-targets="x86 x86_64" --target=i386-apple-darwin10.8.0',
override_properties = { 'make': 'make' }
) |
1bd344a3ccda43f4ac1d4b94b1a18fc816c9b6ae | slurmscale/jobs/jobs.py | slurmscale/jobs/jobs.py | """Get info about jobs running on this cluster."""
import pyslurm
from job import Job
class Jobs(object):
"""A service object to inspect jobs."""
@property
def _jobs(self):
"""Fetch fresh data."""
return pyslurm.job().get()
def list(self):
"""List the current jobs on the cluster."""
current_jobs = self._jobs
return [Job(current_jobs[j]) for j in current_jobs]
| """Get info about jobs running on this cluster."""
import pyslurm
from job import Job
class Jobs(object):
"""A service object to inspect jobs."""
@property
def _jobs(self):
"""Fetch fresh data."""
return pyslurm.job().get()
def list(self, states=None):
"""
List the current jobs on the cluster.
:type states: List of ``str``
:param states: Filter jobs in the given states. Available states are
``PENDING``, ``RUNNING``, ``CANCELLED``, ``CONFIGURING``,
``COMPLETING``, ``COMPLETED``, ``FAILED``, ``TIMEOUT``,
``PREEMPTED``, ``NODE_FAIL`` and ``SPECIAL_EXIT``.
:rtype: List of ``Job``
:return: A list of current cluster jobs, possibly filtered by supplied
states.
"""
current_jobs = self._jobs
jobs = []
if states:
for i in current_jobs:
if current_jobs[i].get('job_state', '') in states:
jobs.append(Job(current_jobs[i]))
else:
jobs = [Job(current_jobs[j]) for j in current_jobs]
return jobs
| Add ability to filter job list by job state | Add ability to filter job list by job state
| Python | mit | afgane/slurmscale,afgane/slurmscale | """Get info about jobs running on this cluster."""
import pyslurm
from job import Job
class Jobs(object):
"""A service object to inspect jobs."""
@property
def _jobs(self):
"""Fetch fresh data."""
return pyslurm.job().get()
- def list(self):
+ def list(self, states=None):
+ """
- """List the current jobs on the cluster."""
+ List the current jobs on the cluster.
+
+ :type states: List of ``str``
+ :param states: Filter jobs in the given states. Available states are
+ ``PENDING``, ``RUNNING``, ``CANCELLED``, ``CONFIGURING``,
+ ``COMPLETING``, ``COMPLETED``, ``FAILED``, ``TIMEOUT``,
+ ``PREEMPTED``, ``NODE_FAIL`` and ``SPECIAL_EXIT``.
+
+ :rtype: List of ``Job``
+ :return: A list of current cluster jobs, possibly filtered by supplied
+ states.
+ """
current_jobs = self._jobs
+ jobs = []
+ if states:
+ for i in current_jobs:
+ if current_jobs[i].get('job_state', '') in states:
+ jobs.append(Job(current_jobs[i]))
+ else:
- return [Job(current_jobs[j]) for j in current_jobs]
+ jobs = [Job(current_jobs[j]) for j in current_jobs]
+ return jobs
| Add ability to filter job list by job state | ## Code Before:
"""Get info about jobs running on this cluster."""
import pyslurm
from job import Job
class Jobs(object):
"""A service object to inspect jobs."""
@property
def _jobs(self):
"""Fetch fresh data."""
return pyslurm.job().get()
def list(self):
"""List the current jobs on the cluster."""
current_jobs = self._jobs
return [Job(current_jobs[j]) for j in current_jobs]
## Instruction:
Add ability to filter job list by job state
## Code After:
"""Get info about jobs running on this cluster."""
import pyslurm
from job import Job
class Jobs(object):
"""A service object to inspect jobs."""
@property
def _jobs(self):
"""Fetch fresh data."""
return pyslurm.job().get()
def list(self, states=None):
"""
List the current jobs on the cluster.
:type states: List of ``str``
:param states: Filter jobs in the given states. Available states are
``PENDING``, ``RUNNING``, ``CANCELLED``, ``CONFIGURING``,
``COMPLETING``, ``COMPLETED``, ``FAILED``, ``TIMEOUT``,
``PREEMPTED``, ``NODE_FAIL`` and ``SPECIAL_EXIT``.
:rtype: List of ``Job``
:return: A list of current cluster jobs, possibly filtered by supplied
states.
"""
current_jobs = self._jobs
jobs = []
if states:
for i in current_jobs:
if current_jobs[i].get('job_state', '') in states:
jobs.append(Job(current_jobs[i]))
else:
jobs = [Job(current_jobs[j]) for j in current_jobs]
return jobs
| """Get info about jobs running on this cluster."""
import pyslurm
from job import Job
class Jobs(object):
"""A service object to inspect jobs."""
@property
def _jobs(self):
"""Fetch fresh data."""
return pyslurm.job().get()
- def list(self):
+ def list(self, states=None):
+ """
- """List the current jobs on the cluster."""
? --- ---
+ List the current jobs on the cluster.
+
+ :type states: List of ``str``
+ :param states: Filter jobs in the given states. Available states are
+ ``PENDING``, ``RUNNING``, ``CANCELLED``, ``CONFIGURING``,
+ ``COMPLETING``, ``COMPLETED``, ``FAILED``, ``TIMEOUT``,
+ ``PREEMPTED``, ``NODE_FAIL`` and ``SPECIAL_EXIT``.
+
+ :rtype: List of ``Job``
+ :return: A list of current cluster jobs, possibly filtered by supplied
+ states.
+ """
current_jobs = self._jobs
+ jobs = []
+ if states:
+ for i in current_jobs:
+ if current_jobs[i].get('job_state', '') in states:
+ jobs.append(Job(current_jobs[i]))
+ else:
- return [Job(current_jobs[j]) for j in current_jobs]
? ^^^^^^
+ jobs = [Job(current_jobs[j]) for j in current_jobs]
? ^^^^^^^^^^
+ return jobs |
7f0ab829f677a5d91d5b24dc6181a2519e25a934 | notes/managers.py | notes/managers.py |
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
|
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
| Support hiding note and notebook template notes | Support hiding note and notebook template notes
| Python | agpl-3.0 | syskill/snowy,NoUsername/PrivateNotesExperimental,GNOME/snowy,jaredjennings/snowy,jaredjennings/snowy,sandyarmstrong/snowy,jaredjennings/snowy,NoUsername/PrivateNotesExperimental,widox/snowy,syskill/snowy,leonhandreke/snowy,leonhandreke/snowy,jaredjennings/snowy,widox/snowy,GNOME/snowy,sandyarmstrong/snowy |
from django.db import models
class NoteManager(models.Manager):
- def user_viewable(self, request_user, author):
+ def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
- notes = notes.filter(permissions=1)
+ notes = notes.filter(permissions=1)
+ if not templates:
+ notes = notes.exclude(tags__name="system:template")
return notes
| Support hiding note and notebook template notes | ## Code Before:
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
return notes
## Instruction:
Support hiding note and notebook template notes
## Code After:
from django.db import models
class NoteManager(models.Manager):
def user_viewable(self, request_user, author, templates=False):
notes = self.filter(author=author)
if request_user != author:
# Public notes only
notes = notes.filter(permissions=1)
if not templates:
notes = notes.exclude(tags__name="system:template")
return notes
|
from django.db import models
class NoteManager(models.Manager):
- def user_viewable(self, request_user, author):
+ def user_viewable(self, request_user, author, templates=False):
? +++++++++++++++++
notes = self.filter(author=author)
if request_user != author:
# Public notes only
- notes = notes.filter(permissions=1)
? -
+ notes = notes.filter(permissions=1)
+ if not templates:
+ notes = notes.exclude(tags__name="system:template")
return notes |
fe4bc023d207f219e487badc668f81ce7485ba5a | sympy/utilities/source.py | sympy/utilities/source.py |
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
| Remove a redundant line from get_class | Remove a redundant line from get_class
| Python | bsd-3-clause | emon10005/sympy,ahhda/sympy,kaichogami/sympy,mafiya69/sympy,Designist/sympy,aktech/sympy,Titan-C/sympy,jerli/sympy,Davidjohnwilson/sympy,sampadsaha5/sympy,hargup/sympy,drufat/sympy,Vishluck/sympy,maniteja123/sympy,wanglongqi/sympy,jaimahajan1997/sympy,yukoba/sympy,AkademieOlympia/sympy,ChristinaZografou/sympy,Titan-C/sympy,souravsingh/sympy,yashsharan/sympy,kaushik94/sympy,VaibhavAgarwalVA/sympy,mcdaniel67/sympy,kevalds51/sympy,emon10005/sympy,rahuldan/sympy,Davidjohnwilson/sympy,AkademieOlympia/sympy,pandeyadarsh/sympy,ga7g08/sympy,Vishluck/sympy,cswiercz/sympy,jaimahajan1997/sympy,kevalds51/sympy,Vishluck/sympy,drufat/sympy,Designist/sympy,moble/sympy,lindsayad/sympy,postvakje/sympy,cswiercz/sympy,debugger22/sympy,mafiya69/sympy,debugger22/sympy,saurabhjn76/sympy,iamutkarshtiwari/sympy,yashsharan/sympy,Curious72/sympy,farhaanbukhsh/sympy,mcdaniel67/sympy,yukoba/sympy,abhiii5459/sympy,sahmed95/sympy,skidzo/sympy,atreyv/sympy,Curious72/sympy,maniteja123/sympy,ga7g08/sympy,shikil/sympy,atreyv/sympy,maniteja123/sympy,saurabhjn76/sympy,skidzo/sympy,ahhda/sympy,Shaswat27/sympy,oliverlee/sympy,Arafatk/sympy,sahmed95/sympy,jbbskinny/sympy,shikil/sympy,abhiii5459/sympy,ahhda/sympy,Titan-C/sympy,rahuldan/sympy,madan96/sympy,debugger22/sympy,Shaswat27/sympy,iamutkarshtiwari/sympy,kaushik94/sympy,kaichogami/sympy,jerli/sympy,mafiya69/sympy,MechCoder/sympy,moble/sympy,pandeyadarsh/sympy,abhiii5459/sympy,Davidjohnwilson/sympy,emon10005/sympy,wanglongqi/sympy,oliverlee/sympy,ga7g08/sympy,kevalds51/sympy,postvakje/sympy,oliverlee/sympy,postvakje/sympy,farhaanbukhsh/sympy,lindsayad/sympy,jaimahajan1997/sympy,yukoba/sympy,kaushik94/sympy,sampadsaha5/sympy,rahuldan/sympy,VaibhavAgarwalVA/sympy,sahmed95/sympy,chaffra/sympy,yashsharan/sympy,Designist/sympy,shikil/sympy,cswiercz/sympy,hargup/sympy,iamutkarshtiwari/sympy,farhaanbukhsh/sympy,mcdaniel67/sympy,pandeyadarsh/sympy,kaichogami/sympy,chaffra/sympy,MechCoder/sympy,atreyv/sympy,drufat/sympy,Arafatk/sympy,Curious72/sympy,aktech/sympy,Arafatk/sympy,wanglongqi/sympy,saurabhjn76/sympy,jbbskinny/sympy,Shaswat27/sympy,ChristinaZografou/sympy,aktech/sympy,jerli/sympy,madan96/sympy,VaibhavAgarwalVA/sympy,madan96/sympy,moble/sympy,chaffra/sympy,hargup/sympy,souravsingh/sympy,AkademieOlympia/sympy,souravsingh/sympy,MechCoder/sympy,lindsayad/sympy,sampadsaha5/sympy,jbbskinny/sympy,ChristinaZografou/sympy,skidzo/sympy |
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
- lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
| Remove a redundant line from get_class | ## Code Before:
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
## Instruction:
Remove a redundant line from get_class
## Code After:
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:]
|
from __future__ import print_function, division
import inspect
def source(object):
"""
Prints the source code of a given object.
"""
print('In file: %s' % inspect.getsourcefile(object))
print(inspect.getsource(object))
def get_class(lookup_view):
"""
Convert a string version of a class name to the object.
For example, get_class('sympy.core.Basic') will return
class Basic located in module sympy.core
"""
if isinstance(lookup_view, str):
- lookup_view = lookup_view
mod_name, func_name = get_mod_func(lookup_view)
if func_name != '':
lookup_view = getattr(
__import__(mod_name, {}, {}, ['*']), func_name)
if not callable(lookup_view):
raise AttributeError(
"'%s.%s' is not a callable." % (mod_name, func_name))
return lookup_view
def get_mod_func(callback):
"""
splits the string path to a class into a string path to the module
and the name of the class. For example:
>>> from sympy.utilities.source import get_mod_func
>>> get_mod_func('sympy.core.basic.Basic')
('sympy.core.basic', 'Basic')
"""
dot = callback.rfind('.')
if dot == -1:
return callback, ''
return callback[:dot], callback[dot + 1:] |
0c2305db6c6792f624cf09a9134aaa090c82d5c1 | tasks.py | tasks.py | from invoke import task
import jschema
@task
def pip(ctx):
ctx.run("rm -rf dist jschema.egg-info")
ctx.run("./setup.py sdist")
ctx.run("twine upload dist/jschema-{}.tar.gz".format(jschema.__version__))
@task
def doc(ctx):
ctx.run("./setup.py build_sphinx")
ctx.run("./setup.py upload_sphinx")
| from invoke import task
from invoke.util import cd
import jschema
@task
def pip(ctx):
ctx.run("rm -rf dist jschema.egg-info")
ctx.run("./setup.py sdist")
ctx.run("twine upload dist/jschema-{}.tar.gz".format(jschema.__version__))
@task
def doc(ctx):
ctx.run("./setup.py build_sphinx")
ctx.run("./setup.py upload_sphinx")
@task
def mezzo(ctx):
ctx.run("mkdir -p build/jschema")
ctx.run("cp -R jschema setup.py build/jschema")
with cd("build"):
ctx.run("tar cf jschema.tar.gz jschema")
ctx.run("mv jschema.tar.gz /opt/mezzo/dependencies")
ctx.run("rm -rf jschema")
| Add mezzo task, for copy project to mezzo | Add mezzo task, for copy project to mezzo
| Python | mit | stepan-perlov/jschema,stepan-perlov/jschema | from invoke import task
+ from invoke.util import cd
import jschema
@task
def pip(ctx):
ctx.run("rm -rf dist jschema.egg-info")
ctx.run("./setup.py sdist")
ctx.run("twine upload dist/jschema-{}.tar.gz".format(jschema.__version__))
@task
def doc(ctx):
ctx.run("./setup.py build_sphinx")
ctx.run("./setup.py upload_sphinx")
+ @task
+ def mezzo(ctx):
+ ctx.run("mkdir -p build/jschema")
+ ctx.run("cp -R jschema setup.py build/jschema")
+ with cd("build"):
+ ctx.run("tar cf jschema.tar.gz jschema")
+ ctx.run("mv jschema.tar.gz /opt/mezzo/dependencies")
+ ctx.run("rm -rf jschema")
+ | Add mezzo task, for copy project to mezzo | ## Code Before:
from invoke import task
import jschema
@task
def pip(ctx):
ctx.run("rm -rf dist jschema.egg-info")
ctx.run("./setup.py sdist")
ctx.run("twine upload dist/jschema-{}.tar.gz".format(jschema.__version__))
@task
def doc(ctx):
ctx.run("./setup.py build_sphinx")
ctx.run("./setup.py upload_sphinx")
## Instruction:
Add mezzo task, for copy project to mezzo
## Code After:
from invoke import task
from invoke.util import cd
import jschema
@task
def pip(ctx):
ctx.run("rm -rf dist jschema.egg-info")
ctx.run("./setup.py sdist")
ctx.run("twine upload dist/jschema-{}.tar.gz".format(jschema.__version__))
@task
def doc(ctx):
ctx.run("./setup.py build_sphinx")
ctx.run("./setup.py upload_sphinx")
@task
def mezzo(ctx):
ctx.run("mkdir -p build/jschema")
ctx.run("cp -R jschema setup.py build/jschema")
with cd("build"):
ctx.run("tar cf jschema.tar.gz jschema")
ctx.run("mv jschema.tar.gz /opt/mezzo/dependencies")
ctx.run("rm -rf jschema")
| from invoke import task
+ from invoke.util import cd
import jschema
@task
def pip(ctx):
ctx.run("rm -rf dist jschema.egg-info")
ctx.run("./setup.py sdist")
ctx.run("twine upload dist/jschema-{}.tar.gz".format(jschema.__version__))
@task
def doc(ctx):
ctx.run("./setup.py build_sphinx")
ctx.run("./setup.py upload_sphinx")
+
+ @task
+ def mezzo(ctx):
+ ctx.run("mkdir -p build/jschema")
+ ctx.run("cp -R jschema setup.py build/jschema")
+ with cd("build"):
+ ctx.run("tar cf jschema.tar.gz jschema")
+ ctx.run("mv jschema.tar.gz /opt/mezzo/dependencies")
+ ctx.run("rm -rf jschema") |
393bc0dc82524802c8d548216d4c51b4394e5394 | tests.py | tests.py |
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
assertEqual('test', 'test')
def second(self):
"""second test"""
assertEqual('2','2')
|
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
self.assertEqual('test', 'test')
def second(self):
"""second test"""
self.assertEqual('2','2')
| Add self to test cases | Add self to test cases
Change-Id: Ib8a8fea97fb7390613a5521103f0f9d31615f262
| Python | apache-2.0 | khoser/mini_games |
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
- assertEqual('test', 'test')
+ self.assertEqual('test', 'test')
def second(self):
"""second test"""
- assertEqual('2','2')
+ self.assertEqual('2','2')
| Add self to test cases | ## Code Before:
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
assertEqual('test', 'test')
def second(self):
"""second test"""
assertEqual('2','2')
## Instruction:
Add self to test cases
## Code After:
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
self.assertEqual('test', 'test')
def second(self):
"""second test"""
self.assertEqual('2','2')
|
import unittest
class TestFunctions(unittest.TestCase):
def first(self):
- assertEqual('test', 'test')
+ self.assertEqual('test', 'test')
? +++++
def second(self):
"""second test"""
- assertEqual('2','2')
+ self.assertEqual('2','2')
? +++++
|
3139ae7dceb3605e70db2cbcde0d732dcb68bc2a | serfnode/handler/config.py | serfnode/handler/config.py | import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
| import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
| Make yaml fields lowercase in serfnode section | Make yaml fields lowercase in serfnode section | Python | mit | waltermoreira/serfnode,waltermoreira/serfnode,waltermoreira/serfnode | import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
- role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
+ role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
- peer = os.environ.get('PEER') or yml.get('PEER')
+ peer = os.environ.get('PEER') or yml.get('peer')
- ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
+ ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
- bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
+ bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
- service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
+ service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
- service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
+ service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
| Make yaml fields lowercase in serfnode section | ## Code Before:
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
peer = os.environ.get('PEER') or yml.get('PEER')
ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
## Instruction:
Make yaml fields lowercase in serfnode section
## Code After:
import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
peer = os.environ.get('PEER') or yml.get('peer')
ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
get_local_ip('8.8.8.8'))
bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
| import os
import uuid
from mischief.actors.pipe import get_local_ip
import yaml
def read_serfnode_yml():
with open('/serfnode.yml') as input:
conf = yaml.load(input) or {}
return conf.get('serfnode') or {}
yml = read_serfnode_yml()
- role = os.environ.get('ROLE') or yml.get('ROLE') or 'no_role'
? ^^^^
+ role = os.environ.get('ROLE') or yml.get('role') or 'no_role'
? ^^^^
- peer = os.environ.get('PEER') or yml.get('PEER')
? ^^^^
+ peer = os.environ.get('PEER') or yml.get('peer')
? ^^^^
- ip = (os.environ.get('SERF_IP') or yml.get('SERF_IP') or
? ^^^^ ^^
+ ip = (os.environ.get('SERF_IP') or yml.get('serf_ip') or
? ^^^^ ^^
get_local_ip('8.8.8.8'))
- bind_port = os.environ.get('SERF_PORT') or yml.get('SERF_PORT') or 7946
? ^^^^ ^^^^
+ bind_port = os.environ.get('SERF_PORT') or yml.get('serf_port') or 7946
? ^^^^ ^^^^
node = os.environ.get('NODE_NAME') or uuid.uuid4().hex
rpc_port = os.environ.get('RPC_PORT') or 7373
- service = os.environ.get('SERVICE_IP') or yml.get('SERVICE_IP')
? ^^^^^^^ ^^
+ service = os.environ.get('SERVICE_IP') or yml.get('service_ip')
? ^^^^^^^ ^^
- service_port = os.environ.get('SERVICE_PORT') or yml.get('SERVICE_PORT') or 0
? ^^^^^^^ ^^^^
+ service_port = os.environ.get('SERVICE_PORT') or yml.get('service_port') or 0
? ^^^^^^^ ^^^^
|
955cb0d27ab52348b753c3edea731223e2631f50 | Climate_Police/tests/test_plot_pollutants.py | Climate_Police/tests/test_plot_pollutants.py |
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main() |
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
| Add flag to plot_pollutant unit test | Add flag to plot_pollutant unit test
also change assertTrue to assertEqual | Python | mit | abhisheksugam/Climate_Police |
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
- result=plot_pollutants(df, year, state)
+ fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
- self.assertTrue(result, expected_explanation)
+ self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
+ | Add flag to plot_pollutant unit test | ## Code Before:
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
result=plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertTrue(result, expected_explanation)
if __name__ == '__main__':
unittest.main()
## Instruction:
Add flag to plot_pollutant unit test
## Code After:
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
fig, flag = plot_pollutants(df, year, state)
expected_explanation="Levels of pollutants plotted."
self.assertEqual(flag, expected_explanation)
if __name__ == '__main__':
unittest.main()
|
import unittest
from plot_pollutants import plot_pollutants
import pandas as pd
df = pd.read_csv("../data/pollution_us_2000_2016.csv")
year="2010"
state="Arizona"
class TestPlot(unittest.TestCase):
def testPlotPollutants(self):
- result=plot_pollutants(df, year, state)
? ^^^^ ^
+ fig, flag = plot_pollutants(df, year, state)
? ^^^^^^ ^^^ +
expected_explanation="Levels of pollutants plotted."
- self.assertTrue(result, expected_explanation)
? ^^ ^ ^^^^ ^
+ self.assertEqual(flag, expected_explanation)
? ^^ ^^ ^ ^^
if __name__ == '__main__':
unittest.main() |
893540d492b731b93a31f3c5158c99f4db9fc3e4 | tasks.py | tasks.py | import urlparse
import requests
def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25):
session = requests.session()
headers = {"X-Fastly-Key": api_key, "Accept": "application/json"}
all_tags = set(tags)
purges = {}
count = 0
while all_tags and not count > max_tries:
try:
for tag in set(all_tags):
# Build the URL
url_path = "/service/%s/purge/%s" % (service_id, tag)
url = urlparse.urljoin(domain, url_path)
# Issue the Purge
resp = session.post(url, headers=headers)
resp.raise_for_status()
# Store the Purge ID so we can track it later
purges[tag] = resp.json()["id"]
# for tag, purge_id in purges.iteritems():
# # Ensure that the purge completed successfully
# url = urlparse.urljoin(domain, "/purge")
# status = session.get(url, params={"id": purge_id})
# status.raise_for_status()
# # If the purge completely successfully remove the tag from
# # our list.
# if status.json().get("results", {}).get("complete", None):
# all_tags.remove(tag)
except Exception:
if count > max_tries:
raise
| import urlparse
import requests
def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25):
session = requests.session()
headers = {"X-Fastly-Key": api_key, "Accept": "application/json"}
all_tags = set(tags)
purges = {}
count = 0
while all_tags and not count > max_tries:
count += 1
try:
for tag in set(all_tags):
# Build the URL
url_path = "/service/%s/purge/%s" % (service_id, tag)
url = urlparse.urljoin(domain, url_path)
# Issue the Purge
resp = session.post(url, headers=headers)
resp.raise_for_status()
# Store the Purge ID so we can track it later
purges[tag] = resp.json()["id"]
# for tag, purge_id in purges.iteritems():
# # Ensure that the purge completed successfully
# url = urlparse.urljoin(domain, "/purge")
# status = session.get(url, params={"id": purge_id})
# status.raise_for_status()
# # If the purge completely successfully remove the tag from
# # our list.
# if status.json().get("results", {}).get("complete", None):
# all_tags.remove(tag)
except Exception:
if count > max_tries:
raise
| Increase the count so we don't spin forever | Increase the count so we don't spin forever
| Python | bsd-3-clause | pydotorg/pypi,pydotorg/pypi,pydotorg/pypi,pydotorg/pypi | import urlparse
import requests
def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25):
session = requests.session()
headers = {"X-Fastly-Key": api_key, "Accept": "application/json"}
all_tags = set(tags)
purges = {}
count = 0
while all_tags and not count > max_tries:
+ count += 1
+
try:
for tag in set(all_tags):
# Build the URL
url_path = "/service/%s/purge/%s" % (service_id, tag)
url = urlparse.urljoin(domain, url_path)
# Issue the Purge
resp = session.post(url, headers=headers)
resp.raise_for_status()
# Store the Purge ID so we can track it later
purges[tag] = resp.json()["id"]
# for tag, purge_id in purges.iteritems():
# # Ensure that the purge completed successfully
# url = urlparse.urljoin(domain, "/purge")
# status = session.get(url, params={"id": purge_id})
# status.raise_for_status()
# # If the purge completely successfully remove the tag from
# # our list.
# if status.json().get("results", {}).get("complete", None):
# all_tags.remove(tag)
except Exception:
if count > max_tries:
raise
| Increase the count so we don't spin forever | ## Code Before:
import urlparse
import requests
def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25):
session = requests.session()
headers = {"X-Fastly-Key": api_key, "Accept": "application/json"}
all_tags = set(tags)
purges = {}
count = 0
while all_tags and not count > max_tries:
try:
for tag in set(all_tags):
# Build the URL
url_path = "/service/%s/purge/%s" % (service_id, tag)
url = urlparse.urljoin(domain, url_path)
# Issue the Purge
resp = session.post(url, headers=headers)
resp.raise_for_status()
# Store the Purge ID so we can track it later
purges[tag] = resp.json()["id"]
# for tag, purge_id in purges.iteritems():
# # Ensure that the purge completed successfully
# url = urlparse.urljoin(domain, "/purge")
# status = session.get(url, params={"id": purge_id})
# status.raise_for_status()
# # If the purge completely successfully remove the tag from
# # our list.
# if status.json().get("results", {}).get("complete", None):
# all_tags.remove(tag)
except Exception:
if count > max_tries:
raise
## Instruction:
Increase the count so we don't spin forever
## Code After:
import urlparse
import requests
def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25):
session = requests.session()
headers = {"X-Fastly-Key": api_key, "Accept": "application/json"}
all_tags = set(tags)
purges = {}
count = 0
while all_tags and not count > max_tries:
count += 1
try:
for tag in set(all_tags):
# Build the URL
url_path = "/service/%s/purge/%s" % (service_id, tag)
url = urlparse.urljoin(domain, url_path)
# Issue the Purge
resp = session.post(url, headers=headers)
resp.raise_for_status()
# Store the Purge ID so we can track it later
purges[tag] = resp.json()["id"]
# for tag, purge_id in purges.iteritems():
# # Ensure that the purge completed successfully
# url = urlparse.urljoin(domain, "/purge")
# status = session.get(url, params={"id": purge_id})
# status.raise_for_status()
# # If the purge completely successfully remove the tag from
# # our list.
# if status.json().get("results", {}).get("complete", None):
# all_tags.remove(tag)
except Exception:
if count > max_tries:
raise
| import urlparse
import requests
def purge_fastly_tags(domain, api_key, service_id, tags, max_tries=25):
session = requests.session()
headers = {"X-Fastly-Key": api_key, "Accept": "application/json"}
all_tags = set(tags)
purges = {}
count = 0
while all_tags and not count > max_tries:
+ count += 1
+
try:
for tag in set(all_tags):
# Build the URL
url_path = "/service/%s/purge/%s" % (service_id, tag)
url = urlparse.urljoin(domain, url_path)
# Issue the Purge
resp = session.post(url, headers=headers)
resp.raise_for_status()
# Store the Purge ID so we can track it later
purges[tag] = resp.json()["id"]
# for tag, purge_id in purges.iteritems():
# # Ensure that the purge completed successfully
# url = urlparse.urljoin(domain, "/purge")
# status = session.get(url, params={"id": purge_id})
# status.raise_for_status()
# # If the purge completely successfully remove the tag from
# # our list.
# if status.json().get("results", {}).get("complete", None):
# all_tags.remove(tag)
except Exception:
if count > max_tries:
raise |
8386d7372f9ff8bfad651efe43504746aff19b73 | app/models/rooms/rooms.py | app/models/rooms/rooms.py | from models.people.people import Staff, Fellow
from models.rooms.rooms import Office, LivingSpace
import random
class Dojo(object):
def __init__(self):
self.offices = []
self.livingrooms = []
self.staff = []
self.fellows = []
self.all_rooms = []
self.all_people = []
def get_room(self, rooms):
"""A function to generate a list of random rooms with space.
:param rooms:
:return: room_name
"""
# a room is only available if it's capacity is not exceeded
available_rooms = [room for room in rooms if len(room.occupants) < room.room_capacity]
# return False if all rooms are full
if len(available_rooms) < 1:
return False
# choose a room fro the list of available rooms.
chosen_room = random.choice(available_rooms)
return chosen_room.room_name
def create_room(self, room_name, room_type):
if room_type is 'office':
if room_name not in [room.room_name for room in self.offices]:
room = Office(room_name=room_name, room_type=room_type)
self.offices.append(room)
self.all_rooms.append(room)
return 'An office called' + ' ' + room_name + ' ' + 'has been successfully created'
return 'An office with that name already exists'
if room_type is 'livingspace':
if room_name not in [room.room_name for room in self.livingrooms]:
room = LivingSpace(room_name=room_name, room_type=room_type)
# add object to list( has both room_name and room_type)
self.livingrooms.append(room)
self.all_rooms.append(room)
return 'A room called ' + room_name + ' has been successfully created!'
return 'A living room with that name already exists'
| import os
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
class Room(object):
"""Models the kind of rooms available at Andela,
It forms the base class Room from which OfficeSpace and LivingRoom inherit"""
def __init__(self, room_name, room_type, room_capacity):
"""Initializes the base class Room
:param room_name: A string representing the name of the room
:param room_type: A string representing the type of room, whether office or residential
:param room_capacity: An integer representing the amount of space per room.
"""
self.room_name = room_name
self.room_type = room_type
self.room_capacity = room_capacity
self.occupants = []
| Implement the Room base class | Implement the Room base class
| Python | mit | Alweezy/alvin-mutisya-dojo-project | - from models.people.people import Staff, Fellow
- from models.rooms.rooms import Office, LivingSpace
- import random
+ import os
+ import sys
+ from os import path
+ sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
- class Dojo(object):
+ class Room(object):
+ """Models the kind of rooms available at Andela,
+ It forms the base class Room from which OfficeSpace and LivingRoom inherit"""
+ def __init__(self, room_name, room_type, room_capacity):
+ """Initializes the base class Room
+ :param room_name: A string representing the name of the room
+ :param room_type: A string representing the type of room, whether office or residential
+ :param room_capacity: An integer representing the amount of space per room.
- def __init__(self):
- self.offices = []
- self.livingrooms = []
- self.staff = []
- self.fellows = []
- self.all_rooms = []
- self.all_people = []
-
- def get_room(self, rooms):
- """A function to generate a list of random rooms with space.
- :param rooms:
- :return: room_name
"""
+ self.room_name = room_name
+ self.room_type = room_type
+ self.room_capacity = room_capacity
+ self.occupants = []
- # a room is only available if it's capacity is not exceeded
- available_rooms = [room for room in rooms if len(room.occupants) < room.room_capacity]
- # return False if all rooms are full
- if len(available_rooms) < 1:
- return False
- # choose a room fro the list of available rooms.
- chosen_room = random.choice(available_rooms)
- return chosen_room.room_name
-
- def create_room(self, room_name, room_type):
- if room_type is 'office':
- if room_name not in [room.room_name for room in self.offices]:
- room = Office(room_name=room_name, room_type=room_type)
- self.offices.append(room)
- self.all_rooms.append(room)
- return 'An office called' + ' ' + room_name + ' ' + 'has been successfully created'
- return 'An office with that name already exists'
- if room_type is 'livingspace':
- if room_name not in [room.room_name for room in self.livingrooms]:
- room = LivingSpace(room_name=room_name, room_type=room_type)
- # add object to list( has both room_name and room_type)
- self.livingrooms.append(room)
- self.all_rooms.append(room)
- return 'A room called ' + room_name + ' has been successfully created!'
- return 'A living room with that name already exists'
| Implement the Room base class | ## Code Before:
from models.people.people import Staff, Fellow
from models.rooms.rooms import Office, LivingSpace
import random
class Dojo(object):
def __init__(self):
self.offices = []
self.livingrooms = []
self.staff = []
self.fellows = []
self.all_rooms = []
self.all_people = []
def get_room(self, rooms):
"""A function to generate a list of random rooms with space.
:param rooms:
:return: room_name
"""
# a room is only available if it's capacity is not exceeded
available_rooms = [room for room in rooms if len(room.occupants) < room.room_capacity]
# return False if all rooms are full
if len(available_rooms) < 1:
return False
# choose a room fro the list of available rooms.
chosen_room = random.choice(available_rooms)
return chosen_room.room_name
def create_room(self, room_name, room_type):
if room_type is 'office':
if room_name not in [room.room_name for room in self.offices]:
room = Office(room_name=room_name, room_type=room_type)
self.offices.append(room)
self.all_rooms.append(room)
return 'An office called' + ' ' + room_name + ' ' + 'has been successfully created'
return 'An office with that name already exists'
if room_type is 'livingspace':
if room_name not in [room.room_name for room in self.livingrooms]:
room = LivingSpace(room_name=room_name, room_type=room_type)
# add object to list( has both room_name and room_type)
self.livingrooms.append(room)
self.all_rooms.append(room)
return 'A room called ' + room_name + ' has been successfully created!'
return 'A living room with that name already exists'
## Instruction:
Implement the Room base class
## Code After:
import os
import sys
from os import path
sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
class Room(object):
"""Models the kind of rooms available at Andela,
It forms the base class Room from which OfficeSpace and LivingRoom inherit"""
def __init__(self, room_name, room_type, room_capacity):
"""Initializes the base class Room
:param room_name: A string representing the name of the room
:param room_type: A string representing the type of room, whether office or residential
:param room_capacity: An integer representing the amount of space per room.
"""
self.room_name = room_name
self.room_type = room_type
self.room_capacity = room_capacity
self.occupants = []
| - from models.people.people import Staff, Fellow
- from models.rooms.rooms import Office, LivingSpace
- import random
+ import os
+ import sys
+ from os import path
+ sys.path.append(path.dirname(path.dirname(path.abspath(__file__))))
- class Dojo(object):
? ^ -
+ class Room(object):
? ^ +
- def __init__(self):
+ """Models the kind of rooms available at Andela,
+ It forms the base class Room from which OfficeSpace and LivingRoom inherit"""
+ def __init__(self, room_name, room_type, room_capacity):
+ """Initializes the base class Room
+ :param room_name: A string representing the name of the room
+ :param room_type: A string representing the type of room, whether office or residential
+ :param room_capacity: An integer representing the amount of space per room.
+ """
+ self.room_name = room_name
+ self.room_type = room_type
+ self.room_capacity = room_capacity
- self.offices = []
? --- ^
+ self.occupants = []
? ^^^^^^
- self.livingrooms = []
- self.staff = []
- self.fellows = []
- self.all_rooms = []
- self.all_people = []
- def get_room(self, rooms):
- """A function to generate a list of random rooms with space.
- :param rooms:
- :return: room_name
- """
- # a room is only available if it's capacity is not exceeded
- available_rooms = [room for room in rooms if len(room.occupants) < room.room_capacity]
- # return False if all rooms are full
- if len(available_rooms) < 1:
- return False
- # choose a room fro the list of available rooms.
- chosen_room = random.choice(available_rooms)
- return chosen_room.room_name
-
- def create_room(self, room_name, room_type):
- if room_type is 'office':
- if room_name not in [room.room_name for room in self.offices]:
- room = Office(room_name=room_name, room_type=room_type)
- self.offices.append(room)
- self.all_rooms.append(room)
- return 'An office called' + ' ' + room_name + ' ' + 'has been successfully created'
- return 'An office with that name already exists'
- if room_type is 'livingspace':
- if room_name not in [room.room_name for room in self.livingrooms]:
- room = LivingSpace(room_name=room_name, room_type=room_type)
- # add object to list( has both room_name and room_type)
- self.livingrooms.append(room)
- self.all_rooms.append(room)
- return 'A room called ' + room_name + ' has been successfully created!'
- return 'A living room with that name already exists'
- |
cbb59747af48ae60473f27b6de976a08a741ab54 | tests/test_test_utils.py | tests/test_test_utils.py | from itertools import product
from unittest import TestCase
from zipline.utils.test_utils import parameter_space
class TestParameterSpace(TestCase):
x_args = [1, 2]
y_args = [3, 4]
@classmethod
def setUpClass(cls):
cls.xy_invocations = []
@classmethod
def tearDownClass(cls):
# This is the only actual test here.
assert cls.xy_invocations == list(product(cls.x_args, cls.y_args))
@parameter_space(x=x_args, y=y_args)
def test_xy(self, x, y):
self.xy_invocations.append((x, y))
def test_nothing(self):
# Ensure that there's at least one "real" test in the class, or else
# our {setUp,tearDown}Class won't be called if, for example,
# `parameter_space` returns None.
pass
| from itertools import product
from unittest import TestCase
from zipline.utils.test_utils import parameter_space
class TestParameterSpace(TestCase):
x_args = [1, 2]
y_args = [3, 4]
@classmethod
def setUpClass(cls):
cls.xy_invocations = []
cls.yx_invocations = []
@classmethod
def tearDownClass(cls):
# This is the only actual test here.
assert cls.xy_invocations == list(product(cls.x_args, cls.y_args))
assert cls.yx_invocations == list(product(cls.y_args, cls.x_args))
@parameter_space(x=x_args, y=y_args)
def test_xy(self, x, y):
self.xy_invocations.append((x, y))
@parameter_space(x=x_args, y=y_args)
def test_yx(self, y, x):
# Ensure that product is called with args in the order that they appear
# in the function's parameter list.
self.yx_invocations.append((y, x))
def test_nothing(self):
# Ensure that there's at least one "real" test in the class, or else
# our {setUp,tearDown}Class won't be called if, for example,
# `parameter_space` returns None.
pass
| Add test for parameter_space ordering. | TEST: Add test for parameter_space ordering.
| Python | apache-2.0 | magne-max/zipline-ja,florentchandelier/zipline,Scapogo/zipline,florentchandelier/zipline,bartosh/zipline,wilsonkichoi/zipline,bartosh/zipline,alphaBenj/zipline,wilsonkichoi/zipline,humdings/zipline,humdings/zipline,umuzungu/zipline,alphaBenj/zipline,enigmampc/catalyst,enigmampc/catalyst,magne-max/zipline-ja,quantopian/zipline,Scapogo/zipline,umuzungu/zipline,quantopian/zipline | from itertools import product
from unittest import TestCase
from zipline.utils.test_utils import parameter_space
class TestParameterSpace(TestCase):
x_args = [1, 2]
y_args = [3, 4]
@classmethod
def setUpClass(cls):
cls.xy_invocations = []
+ cls.yx_invocations = []
@classmethod
def tearDownClass(cls):
# This is the only actual test here.
assert cls.xy_invocations == list(product(cls.x_args, cls.y_args))
+ assert cls.yx_invocations == list(product(cls.y_args, cls.x_args))
@parameter_space(x=x_args, y=y_args)
def test_xy(self, x, y):
self.xy_invocations.append((x, y))
+
+ @parameter_space(x=x_args, y=y_args)
+ def test_yx(self, y, x):
+ # Ensure that product is called with args in the order that they appear
+ # in the function's parameter list.
+ self.yx_invocations.append((y, x))
def test_nothing(self):
# Ensure that there's at least one "real" test in the class, or else
# our {setUp,tearDown}Class won't be called if, for example,
# `parameter_space` returns None.
pass
| Add test for parameter_space ordering. | ## Code Before:
from itertools import product
from unittest import TestCase
from zipline.utils.test_utils import parameter_space
class TestParameterSpace(TestCase):
x_args = [1, 2]
y_args = [3, 4]
@classmethod
def setUpClass(cls):
cls.xy_invocations = []
@classmethod
def tearDownClass(cls):
# This is the only actual test here.
assert cls.xy_invocations == list(product(cls.x_args, cls.y_args))
@parameter_space(x=x_args, y=y_args)
def test_xy(self, x, y):
self.xy_invocations.append((x, y))
def test_nothing(self):
# Ensure that there's at least one "real" test in the class, or else
# our {setUp,tearDown}Class won't be called if, for example,
# `parameter_space` returns None.
pass
## Instruction:
Add test for parameter_space ordering.
## Code After:
from itertools import product
from unittest import TestCase
from zipline.utils.test_utils import parameter_space
class TestParameterSpace(TestCase):
x_args = [1, 2]
y_args = [3, 4]
@classmethod
def setUpClass(cls):
cls.xy_invocations = []
cls.yx_invocations = []
@classmethod
def tearDownClass(cls):
# This is the only actual test here.
assert cls.xy_invocations == list(product(cls.x_args, cls.y_args))
assert cls.yx_invocations == list(product(cls.y_args, cls.x_args))
@parameter_space(x=x_args, y=y_args)
def test_xy(self, x, y):
self.xy_invocations.append((x, y))
@parameter_space(x=x_args, y=y_args)
def test_yx(self, y, x):
# Ensure that product is called with args in the order that they appear
# in the function's parameter list.
self.yx_invocations.append((y, x))
def test_nothing(self):
# Ensure that there's at least one "real" test in the class, or else
# our {setUp,tearDown}Class won't be called if, for example,
# `parameter_space` returns None.
pass
| from itertools import product
from unittest import TestCase
from zipline.utils.test_utils import parameter_space
class TestParameterSpace(TestCase):
x_args = [1, 2]
y_args = [3, 4]
@classmethod
def setUpClass(cls):
cls.xy_invocations = []
+ cls.yx_invocations = []
@classmethod
def tearDownClass(cls):
# This is the only actual test here.
assert cls.xy_invocations == list(product(cls.x_args, cls.y_args))
+ assert cls.yx_invocations == list(product(cls.y_args, cls.x_args))
@parameter_space(x=x_args, y=y_args)
def test_xy(self, x, y):
self.xy_invocations.append((x, y))
+
+ @parameter_space(x=x_args, y=y_args)
+ def test_yx(self, y, x):
+ # Ensure that product is called with args in the order that they appear
+ # in the function's parameter list.
+ self.yx_invocations.append((y, x))
def test_nothing(self):
# Ensure that there's at least one "real" test in the class, or else
# our {setUp,tearDown}Class won't be called if, for example,
# `parameter_space` returns None.
pass |
b51e4e7af7065a487f5ee91697fda8848c209faf | libpasteurize/fixes/fix_newstyle.py | libpasteurize/fixes/fix_newstyle.py |
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
insert_object(node, idx)
touch_import_top(u'future.builtins', 'object', node)
|
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
# Match:
# class Blah:
# and:
# class Blah():
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
if (node.children[idx-2].value == '(' and
node.children[idx-1].value == ')'):
del node.children[idx-2:idx]
idx -= 2
insert_object(node, idx)
touch_import_top(u'future.builtins', 'object', node)
| Generalize fixer for old->new-style classes to accept "class C():" | Generalize fixer for old->new-style classes to accept "class C():"
| Python | mit | michaelpacer/python-future,PythonCharmers/python-future,QuLogic/python-future,krischer/python-future,michaelpacer/python-future,PythonCharmers/python-future,QuLogic/python-future,krischer/python-future |
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
+ # Match:
+ # class Blah:
+ # and:
+ # class Blah():
+
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
+ if (node.children[idx-2].value == '(' and
+ node.children[idx-1].value == ')'):
+ del node.children[idx-2:idx]
+ idx -= 2
insert_object(node, idx)
touch_import_top(u'future.builtins', 'object', node)
| Generalize fixer for old->new-style classes to accept "class C():" | ## Code Before:
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
insert_object(node, idx)
touch_import_top(u'future.builtins', 'object', node)
## Instruction:
Generalize fixer for old->new-style classes to accept "class C():"
## Code After:
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
# Match:
# class Blah:
# and:
# class Blah():
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
if (node.children[idx-2].value == '(' and
node.children[idx-1].value == ')'):
del node.children[idx-2:idx]
idx -= 2
insert_object(node, idx)
touch_import_top(u'future.builtins', 'object', node)
|
from lib2to3 import fixer_base
from lib2to3.fixer_util import LParen, RParen, Name
from libfuturize.fixer_util import touch_import_top
def insert_object(node, idx):
node.insert_child(idx, RParen())
node.insert_child(idx, Name(u"object"))
node.insert_child(idx, LParen())
class FixNewstyle(fixer_base.BaseFix):
+ # Match:
+ # class Blah:
+ # and:
+ # class Blah():
+
PATTERN = u"classdef< 'class' NAME ['(' ')'] colon=':' any >"
def transform(self, node, results):
colon = results[u"colon"]
idx = node.children.index(colon)
+ if (node.children[idx-2].value == '(' and
+ node.children[idx-1].value == ')'):
+ del node.children[idx-2:idx]
+ idx -= 2
insert_object(node, idx)
touch_import_top(u'future.builtins', 'object', node) |
b970f230864b40eaddb8e5faa76538c9f8e5c59c | txircd/modules/rfc/cmd_userhost.py | txircd/modules/rfc/cmd_userhost.py | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, ""):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand() | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand() | Add affected users to userhasoperpermission call in USERHOST | Add affected users to userhasoperpermission call in USERHOST
| Python | bsd-3-clause | Heufneutje/txircd,ElementalAlchemist/txircd | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
- if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, ""):
+ if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand() | Add affected users to userhasoperpermission call in USERHOST | ## Code Before:
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, ""):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand()
## Instruction:
Add affected users to userhasoperpermission call in USERHOST
## Code After:
from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand() | from twisted.plugin import IPlugin
from twisted.words.protocols import irc
from txircd.module_interface import Command, ICommand, IModuleData, ModuleData
from zope.interface import implements
class UserhostCommand(ModuleData, Command):
implements(IPlugin, IModuleData, ICommand)
name = "UserhostCommand"
core = True
def userCommands(self):
return [ ("USERHOST", 1, self) ]
def parseParams(self, user, params, prefix, tags):
if not params:
user.sendSingleError("UserhostParams", irc.ERR_NEEDMOREPARAMS, "USERHOST", "Not enough parameters")
return None
return {
"nicks": params[:5]
}
def execute(self, user, data):
userHosts = []
for nick in data["nicks"]:
if nick not in self.ircd.userNicks:
continue
targetUser = self.ircd.users[self.ircd.userNicks[nick]]
output = targetUser.nick
- if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, ""):
+ if self.ircd.runActionUntilValue("userhasoperpermission", targetUser, "", users=[targetUser]):
? ++++++++++++++++++++
output += "*"
output += "="
if user.metadataKeyExists("away"):
output += "-"
else:
output += "+"
output += "{}@{}".format(targetUser.ident, targetUser.host())
userHosts.append(output)
user.sendMessage(irc.RPL_USERHOST, " ".join(userHosts))
return True
userhostCmd = UserhostCommand() |
3d3d6ef8393339f7246e6c6a9693d883ca3246f2 | marconi/__init__.py | marconi/__init__.py | try:
if __MARCONI_SETUP__: # NOQA
import sys as _sys
_sys.stderr.write('Running from marconi source directory.\n')
del _sys
except NameError:
import marconi.queues.bootstrap
Bootstrap = marconi.queues.bootstrap.Bootstrap
import marconi.version
__version__ = marconi.version.version_info.cached_version_string()
|
import marconi.queues.bootstrap
import marconi.version
Bootstrap = marconi.queues.bootstrap.Bootstrap
__version__ = marconi.version.version_info.cached_version_string()
| Remove the __MARCONI_SETUP_ global from init | Remove the __MARCONI_SETUP_ global from init
This was used to know when Marconi was being loaded and avoid
registering configuration options and doing other things. This is not
necessary anymore.
Change-Id: Icf43302581eefb563b10ddec5831eeec0d068872
Partially-Implements: py3k-support
| Python | apache-2.0 | openstack/zaqar,openstack/zaqar,rackerlabs/marconi,openstack/zaqar,openstack/zaqar | - try:
- if __MARCONI_SETUP__: # NOQA
- import sys as _sys
- _sys.stderr.write('Running from marconi source directory.\n')
- del _sys
- except NameError:
- import marconi.queues.bootstrap
- Bootstrap = marconi.queues.bootstrap.Bootstrap
+ import marconi.queues.bootstrap
import marconi.version
+
+ Bootstrap = marconi.queues.bootstrap.Bootstrap
+
__version__ = marconi.version.version_info.cached_version_string()
| Remove the __MARCONI_SETUP_ global from init | ## Code Before:
try:
if __MARCONI_SETUP__: # NOQA
import sys as _sys
_sys.stderr.write('Running from marconi source directory.\n')
del _sys
except NameError:
import marconi.queues.bootstrap
Bootstrap = marconi.queues.bootstrap.Bootstrap
import marconi.version
__version__ = marconi.version.version_info.cached_version_string()
## Instruction:
Remove the __MARCONI_SETUP_ global from init
## Code After:
import marconi.queues.bootstrap
import marconi.version
Bootstrap = marconi.queues.bootstrap.Bootstrap
__version__ = marconi.version.version_info.cached_version_string()
| - try:
- if __MARCONI_SETUP__: # NOQA
- import sys as _sys
- _sys.stderr.write('Running from marconi source directory.\n')
- del _sys
- except NameError:
- import marconi.queues.bootstrap
- Bootstrap = marconi.queues.bootstrap.Bootstrap
+ import marconi.queues.bootstrap
import marconi.version
+ Bootstrap = marconi.queues.bootstrap.Bootstrap
+
+
__version__ = marconi.version.version_info.cached_version_string() |
5e671fe98093cf506ce1cb134c335cabd934ad84 | aioredis/locks.py | aioredis/locks.py | from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
| from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
| Fix critical bug with patched Lock | Fix critical bug with patched Lock
| Python | mit | aio-libs/aioredis,aio-libs/aioredis,ymap/aioredis | from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
+ break
| Fix critical bug with patched Lock | ## Code Before:
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
## Instruction:
Fix critical bug with patched Lock
## Code After:
from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
break
| from asyncio.locks import Lock as _Lock
from asyncio import coroutine
from asyncio import futures
from .util import create_future
# Fixes an issue with all Python versions that leaves pending waiters
# without being awakened when the first waiter is canceled.
# Code adapted from the PR https://github.com/python/cpython/pull/1031
# Waiting once it is merged to make a proper condition to relay on
# the stdlib implementation or this one patched
class Lock(_Lock):
@coroutine
def acquire(self):
"""Acquire a lock.
This method blocks until the lock is unlocked, then sets it to
locked and returns True.
"""
if not self._locked and all(w.cancelled() for w in self._waiters):
self._locked = True
return True
fut = create_future(self._loop)
self._waiters.append(fut)
try:
yield from fut
self._locked = True
return True
except futures.CancelledError:
if not self._locked: # pragma: no cover
self._wake_up_first()
raise
finally:
self._waiters.remove(fut)
def _wake_up_first(self):
"""Wake up the first waiter who isn't cancelled."""
for fut in self._waiters:
if not fut.done():
fut.set_result(True)
+ break |
351c05b6e474b266a7594a775cb48cd7cfe0b833 | shapely/linref.py | shapely/linref.py |
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
| Allow linear referencing on rings. | Allow linear referencing on rings.
Closes #286.
Eliminating the assert is good for optimization reasons, too.
| Python | bsd-3-clause | abali96/Shapely,mouadino/Shapely,mindw/shapely,abali96/Shapely,jdmcbr/Shapely,jdmcbr/Shapely,mindw/shapely,mouadino/Shapely |
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
- try:
- assert ob.geom_type in ['LineString', 'MultiLineString']
+ if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
- except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
| Allow linear referencing on rings. | ## Code Before:
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
try:
assert ob.geom_type in ['LineString', 'MultiLineString']
except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
## Instruction:
Allow linear referencing on rings.
## Code After:
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
from shapely.topology import Delegating
class LinearRefBase(Delegating):
def _validate_line(self, ob):
super(LinearRefBase, self)._validate(ob)
- try:
- assert ob.geom_type in ['LineString', 'MultiLineString']
? ^^^^^^^^
+ if not ob.geom_type in ['LinearRing', 'LineString', 'MultiLineString']:
? ++ ^^ ++++++++++++++ +
- except AssertionError:
raise TypeError("Only linear types support this operation")
class ProjectOp(LinearRefBase):
def __call__(self, this, other):
self._validate_line(this)
self._validate(other)
return self.fn(this._geom, other._geom)
class InterpolateOp(LinearRefBase):
def __call__(self, this, distance):
self._validate_line(this)
return self.fn(this._geom, distance)
|
f463247198354b0af1d0b8a4ff63c0757d4c2839 | regression.py | regression.py | import subprocess
subprocess.check_call(["coverage", "run", "--source", "toyplot", "-m", "nose"])
subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "-m", "behave"])
subprocess.check_call(["coverage", "report"])
subprocess.check_call(["coverage", "html", "--directory", ".cover"])
| import subprocess
subprocess.check_call(["coverage", "run", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "nose"])
subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "behave"])
subprocess.check_call(["coverage", "report"])
subprocess.check_call(["coverage", "html", "--directory", ".cover"])
| Exclude the testing module from coverage results. | Exclude the testing module from coverage results.
| Python | bsd-3-clause | cmorgan/toyplot,cmorgan/toyplot | import subprocess
- subprocess.check_call(["coverage", "run", "--source", "toyplot", "-m", "nose"])
+ subprocess.check_call(["coverage", "run", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "nose"])
- subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "-m", "behave"])
+ subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "behave"])
subprocess.check_call(["coverage", "report"])
subprocess.check_call(["coverage", "html", "--directory", ".cover"])
| Exclude the testing module from coverage results. | ## Code Before:
import subprocess
subprocess.check_call(["coverage", "run", "--source", "toyplot", "-m", "nose"])
subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "-m", "behave"])
subprocess.check_call(["coverage", "report"])
subprocess.check_call(["coverage", "html", "--directory", ".cover"])
## Instruction:
Exclude the testing module from coverage results.
## Code After:
import subprocess
subprocess.check_call(["coverage", "run", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "nose"])
subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "behave"])
subprocess.check_call(["coverage", "report"])
subprocess.check_call(["coverage", "html", "--directory", ".cover"])
| import subprocess
- subprocess.check_call(["coverage", "run", "--source", "toyplot", "-m", "nose"])
+ subprocess.check_call(["coverage", "run", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "nose"])
? ++++++++++++++++++++++++++++++++
- subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "-m", "behave"])
+ subprocess.check_call(["coverage", "run", "--append", "--source", "toyplot", "--omit", "toyplot/testing.py", "-m", "behave"])
? ++++++++++++++++++++++++++++++++
subprocess.check_call(["coverage", "report"])
subprocess.check_call(["coverage", "html", "--directory", ".cover"]) |
e5b42db249dd94a0d7652881a8bba8ed78772d3e | examples/turnAndMove.py | examples/turnAndMove.py | import slither, pygame
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
snakey.goto(0, 0)
slither.slitherStage.setColor(40, 222, 40)
slither.setup() # Begin slither
def handlequit():
print("Quitting...")
return True
slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form
@slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form
def handlemouseup(event):
print("Mouseup:", event.pos, event.button)
def run_a_frame():
snakey.xpos += 1
snakey.ypos += 1
snakey.direction += 1
slither.runMainLoop(run_a_frame)
| import slither, pygame
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
snakey.goto(0, 0)
slither.setup() # Begin slither
def handlequit():
print("Quitting...")
return True
slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form
@slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form
def handlemouseup(event):
print("Mouseup:", event.pos, event.button)
def run_a_frame():
snakey.xpos += 1
snakey.ypos += 1
snakey.direction += 1
slither.runMainLoop(run_a_frame)
| Fix small test problem\nBTW rotation works now, thanks @BookOwl | Fix small test problem\nBTW rotation works now, thanks @BookOwl
| Python | mit | PySlither/Slither,PySlither/Slither | import slither, pygame
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
snakey.goto(0, 0)
-
- slither.slitherStage.setColor(40, 222, 40)
slither.setup() # Begin slither
def handlequit():
print("Quitting...")
return True
slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form
@slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form
def handlemouseup(event):
print("Mouseup:", event.pos, event.button)
def run_a_frame():
snakey.xpos += 1
snakey.ypos += 1
snakey.direction += 1
slither.runMainLoop(run_a_frame)
| Fix small test problem\nBTW rotation works now, thanks @BookOwl | ## Code Before:
import slither, pygame
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
snakey.goto(0, 0)
slither.slitherStage.setColor(40, 222, 40)
slither.setup() # Begin slither
def handlequit():
print("Quitting...")
return True
slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form
@slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form
def handlemouseup(event):
print("Mouseup:", event.pos, event.button)
def run_a_frame():
snakey.xpos += 1
snakey.ypos += 1
snakey.direction += 1
slither.runMainLoop(run_a_frame)
## Instruction:
Fix small test problem\nBTW rotation works now, thanks @BookOwl
## Code After:
import slither, pygame
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
snakey.goto(0, 0)
slither.setup() # Begin slither
def handlequit():
print("Quitting...")
return True
slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form
@slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form
def handlemouseup(event):
print("Mouseup:", event.pos, event.button)
def run_a_frame():
snakey.xpos += 1
snakey.ypos += 1
snakey.direction += 1
slither.runMainLoop(run_a_frame)
| import slither, pygame
snakey = slither.Sprite()
snakey.setCostumeByName("costume0")
snakey.goto(0, 0)
-
- slither.slitherStage.setColor(40, 222, 40)
slither.setup() # Begin slither
def handlequit():
print("Quitting...")
return True
slither.registerCallback(pygame.QUIT, handlequit) # This uses the direct call form
@slither.registerCallback(pygame.MOUSEBUTTONUP) # This uses the decorator form
def handlemouseup(event):
print("Mouseup:", event.pos, event.button)
def run_a_frame():
snakey.xpos += 1
snakey.ypos += 1
snakey.direction += 1
slither.runMainLoop(run_a_frame) |
baedff75f2b86f09368e3bd72b72e27bf887cc88 | rotational-cipher/rotational_cipher.py | rotational-cipher/rotational_cipher.py | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
| import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| Use lambda function with method | Use lambda function with method
| Python | agpl-3.0 | CubicComet/exercism-python-solutions | import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
- return "".join(rot_gen(s,n))
+ rules = shift_rules(n)
+ return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
-
- def rot_gen(s, n):
- rules = shift_rules(n)
- for ch in s:
- try:
- yield rules[ch]
- except KeyError:
- yield ch
- | Use lambda function with method | ## Code Before:
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
return "".join(rot_gen(s,n))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
def rot_gen(s, n):
rules = shift_rules(n)
for ch in s:
try:
yield rules[ch]
except KeyError:
yield ch
## Instruction:
Use lambda function with method
## Code After:
import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
rules = shift_rules(n)
return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
| import string
UPPER = string.ascii_uppercase
LOWER = string.ascii_lowercase
def rotate(s, n):
- return "".join(rot_gen(s,n))
+ rules = shift_rules(n)
+ return "".join(map(lambda k: rules.get(k, k), s))
def shift_rules(n):
shifted = UPPER[n:] + UPPER[:n] + LOWER[n:] + LOWER[:n]
return {k:v for k,v in zip(UPPER+LOWER, shifted)}
-
-
- def rot_gen(s, n):
- rules = shift_rules(n)
- for ch in s:
- try:
- yield rules[ch]
- except KeyError:
- yield ch |
9fa3775c78b8c44b503ce1565e2e990644a61da6 | Lib/test/test_lib2to3.py | Lib/test/test_lib2to3.py | from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| Disable lib2to3 by default, unless run explicitly. | Disable lib2to3 by default, unless run explicitly.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
- from test.test_support import run_unittest
+ from test.test_support import run_unittest, requires
+
+ # Don't run lib2to3 tests by default since they take too long
+ if __name__ != '__main__':
+ requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| Disable lib2to3 by default, unless run explicitly. | ## Code Before:
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
## Instruction:
Disable lib2to3 by default, unless run explicitly.
## Code After:
from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
from test.test_support import run_unittest, requires
# Don't run lib2to3 tests by default since they take too long
if __name__ != '__main__':
requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main()
| from lib2to3.tests import test_fixers, test_pytree, test_util
import unittest
- from test.test_support import run_unittest
+ from test.test_support import run_unittest, requires
? ++++++++++
+
+ # Don't run lib2to3 tests by default since they take too long
+ if __name__ != '__main__':
+ requires('lib2to3')
def suite():
tests = unittest.TestSuite()
loader = unittest.TestLoader()
for m in (test_fixers,test_pytree,test_util):
tests.addTests(loader.loadTestsFromModule(m))
return tests
def test_main():
run_unittest(suite())
if __name__ == '__main__':
test_main() |
733890e0267d07c4d312427a30f136589a85626e | loom/test/test_benchmark.py | loom/test/test_benchmark.py | import loom.benchmark
DATASET = 'dd-100-100-0.5'
def test_shuffle():
loom.benchmark.shuffle(DATASET, profile=None)
def test_infer():
loom.benchmark.infer(DATASET, profile=None)
def test_checkpoint():
loom.benchmark.load_checkpoint(DATASET)
loom.benchmark.infer_checkpoint(DATASET, profile=None)
def test_generate():
loom.benchmark.generate(profile=None)
| import loom.benchmark
DATASET = 'dd-100-100-0.5'
def test_shuffle():
loom.benchmark.shuffle(DATASET, profile=None)
def test_infer():
loom.benchmark.infer(DATASET, profile=None)
def test_checkpoint():
loom.benchmark.load_checkpoint(DATASET, period_sec=1)
loom.benchmark.infer_checkpoint(DATASET, profile=None)
def test_generate():
loom.benchmark.generate(profile=None)
| Reduce test checkpoint period for faster tests | Reduce test checkpoint period for faster tests
| Python | bsd-3-clause | posterior/loom,priorknowledge/loom,posterior/loom,priorknowledge/loom,fritzo/loom,priorknowledge/loom,posterior/loom,fritzo/loom,fritzo/loom | import loom.benchmark
DATASET = 'dd-100-100-0.5'
def test_shuffle():
loom.benchmark.shuffle(DATASET, profile=None)
def test_infer():
loom.benchmark.infer(DATASET, profile=None)
def test_checkpoint():
- loom.benchmark.load_checkpoint(DATASET)
+ loom.benchmark.load_checkpoint(DATASET, period_sec=1)
loom.benchmark.infer_checkpoint(DATASET, profile=None)
def test_generate():
loom.benchmark.generate(profile=None)
| Reduce test checkpoint period for faster tests | ## Code Before:
import loom.benchmark
DATASET = 'dd-100-100-0.5'
def test_shuffle():
loom.benchmark.shuffle(DATASET, profile=None)
def test_infer():
loom.benchmark.infer(DATASET, profile=None)
def test_checkpoint():
loom.benchmark.load_checkpoint(DATASET)
loom.benchmark.infer_checkpoint(DATASET, profile=None)
def test_generate():
loom.benchmark.generate(profile=None)
## Instruction:
Reduce test checkpoint period for faster tests
## Code After:
import loom.benchmark
DATASET = 'dd-100-100-0.5'
def test_shuffle():
loom.benchmark.shuffle(DATASET, profile=None)
def test_infer():
loom.benchmark.infer(DATASET, profile=None)
def test_checkpoint():
loom.benchmark.load_checkpoint(DATASET, period_sec=1)
loom.benchmark.infer_checkpoint(DATASET, profile=None)
def test_generate():
loom.benchmark.generate(profile=None)
| import loom.benchmark
DATASET = 'dd-100-100-0.5'
def test_shuffle():
loom.benchmark.shuffle(DATASET, profile=None)
def test_infer():
loom.benchmark.infer(DATASET, profile=None)
def test_checkpoint():
- loom.benchmark.load_checkpoint(DATASET)
+ loom.benchmark.load_checkpoint(DATASET, period_sec=1)
? ++++++++++++++
loom.benchmark.infer_checkpoint(DATASET, profile=None)
def test_generate():
loom.benchmark.generate(profile=None) |
e2fbf646b193284fc5d01684193b9c5aeb415efe | generate_html.py | generate_html.py | from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
with open('output/names.html', 'w+') as name_output:
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
))
| from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file:
key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file]))
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
key_fields_json=json.dumps(key_fields),
))
| Fix due to merge conflicts | Fix due to merge conflicts
| Python | agpl-3.0 | TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine,TalkAboutLocal/local-news-engine | from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
- with open('output/names.html', 'w+') as name_output:
+ with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file:
+ key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file]))
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
+ key_fields_json=json.dumps(key_fields),
))
+ | Fix due to merge conflicts | ## Code Before:
from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
with open('output/names.html', 'w+') as name_output:
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
))
## Instruction:
Fix due to merge conflicts
## Code After:
from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file:
key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file]))
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
key_fields_json=json.dumps(key_fields),
))
| from jinja2 import Environment, FileSystemLoader
import datetime
import json
env = Environment(loader=FileSystemLoader('templates'), autoescape=True)
names_template = env.get_template('names.html')
area_template = env.get_template('areas.html')
with open("output/templates.js") as templatesjs:
templates = templatesjs.read()
with open("processed/area_matches.json") as area_matches_file:
area_matches = json.load(area_matches_file)
with open('output/areas.html', 'w+') as name_output:
name_output.write(area_template.render(
templates=templates,
area_matches=area_matches,
date=datetime.date.today().isoformat(),
))
with open("processed/interesting_names.json") as interesting_names_file:
interesting_names = json.load(interesting_names_file)
- with open('output/names.html', 'w+') as name_output:
+ with open('output/names.html', 'w+') as name_output, open("key_field_names.txt") as key_field_names_file:
+ key_fields = list(set([key_field_name.strip() for key_field_name in key_field_names_file]))
name_output.write(names_template.render(
templates=templates,
interesting_names=interesting_names,
interesting_names_json=json.dumps(interesting_names),
date=datetime.date.today().isoformat(),
+ key_fields_json=json.dumps(key_fields),
))
+ |
bd2636db55396cac2ff6766593d5082562d865e2 | lightning/types/decorators.py | lightning/types/decorators.py | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | Create session if one doesn't exist | Create session if one doesn't exist
| Python | mit | peterkshultz/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,garretstuber/lightning-python,garretstuber/lightning-python,lightning-viz/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
+ if not hasattr(self, 'session'):
+ self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | Create session if one doesn't exist | ## Code Before:
from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType
## Instruction:
Create session if one doesn't exist
## Code After:
from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
if not hasattr(self, 'session'):
self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType | from lightning import Lightning
def viztype(VizType):
def plotter(self, *args, **kwargs):
+ if not hasattr(self, 'session'):
+ self.create_session()
viz = VizType.baseplot(self.session, VizType._name, *args, **kwargs)
self.session.visualizations.append(viz)
return viz
if not hasattr(VizType,'_func'):
func = VizType._name
else:
func = VizType._func
setattr(Lightning, func, plotter)
return VizType
def imgtype(ImgType):
def plotter(self, *args, **kwargs):
img = ImgType.baseimage(self.session, ImgType._name, *args, **kwargs)
self.session.visualizations.append(img)
return img
if not hasattr(ImgType, '_func'):
func = ImgType._name
else:
func = ImgType._func
setattr(Lightning, func, plotter)
return ImgType |
74983cc059bc3480331b0815240c579b0b4517fc | bluebottle/assignments/filters.py | bluebottle/assignments/filters.py | from django.db.models import Q
from rest_framework_json_api.django_filters import DjangoFilterBackend
from bluebottle.assignments.transitions import ApplicantTransitions
class ApplicantListFilter(DjangoFilterBackend):
"""
Filter that shows all applicant if user is owner,
otherwise only show accepted applicants.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_authenticated():
queryset = queryset.filter(
Q(user=request.user) |
Q(activity__owner=request.user) |
Q(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
)
else:
queryset = queryset.filter(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
return super(ApplicantListFilter, self).filter_queryset(request, queryset, view)
| from django.db.models import Q
from rest_framework_json_api.django_filters import DjangoFilterBackend
from bluebottle.assignments.transitions import ApplicantTransitions
class ApplicantListFilter(DjangoFilterBackend):
"""
Filter that shows all applicant if user is owner,
otherwise only show accepted applicants.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_authenticated():
queryset = queryset.filter(
Q(user=request.user) |
Q(activity__owner=request.user) |
Q(activity__initiative__activity_manager=request.user) |
Q(status__in=[
ApplicantTransitions.values.active,
ApplicantTransitions.values.accepted,
ApplicantTransitions.values.succeeded
])
)
else:
queryset = queryset.filter(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
return super(ApplicantListFilter, self).filter_queryset(request, queryset, view)
| Tweak filtering of applicants on assignment | Tweak filtering of applicants on assignment
| Python | bsd-3-clause | onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle | from django.db.models import Q
from rest_framework_json_api.django_filters import DjangoFilterBackend
from bluebottle.assignments.transitions import ApplicantTransitions
class ApplicantListFilter(DjangoFilterBackend):
"""
Filter that shows all applicant if user is owner,
otherwise only show accepted applicants.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_authenticated():
queryset = queryset.filter(
Q(user=request.user) |
Q(activity__owner=request.user) |
+ Q(activity__initiative__activity_manager=request.user) |
Q(status__in=[
- ApplicantTransitions.values.new,
+ ApplicantTransitions.values.active,
+ ApplicantTransitions.values.accepted,
ApplicantTransitions.values.succeeded
])
)
else:
queryset = queryset.filter(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
return super(ApplicantListFilter, self).filter_queryset(request, queryset, view)
| Tweak filtering of applicants on assignment | ## Code Before:
from django.db.models import Q
from rest_framework_json_api.django_filters import DjangoFilterBackend
from bluebottle.assignments.transitions import ApplicantTransitions
class ApplicantListFilter(DjangoFilterBackend):
"""
Filter that shows all applicant if user is owner,
otherwise only show accepted applicants.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_authenticated():
queryset = queryset.filter(
Q(user=request.user) |
Q(activity__owner=request.user) |
Q(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
)
else:
queryset = queryset.filter(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
return super(ApplicantListFilter, self).filter_queryset(request, queryset, view)
## Instruction:
Tweak filtering of applicants on assignment
## Code After:
from django.db.models import Q
from rest_framework_json_api.django_filters import DjangoFilterBackend
from bluebottle.assignments.transitions import ApplicantTransitions
class ApplicantListFilter(DjangoFilterBackend):
"""
Filter that shows all applicant if user is owner,
otherwise only show accepted applicants.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_authenticated():
queryset = queryset.filter(
Q(user=request.user) |
Q(activity__owner=request.user) |
Q(activity__initiative__activity_manager=request.user) |
Q(status__in=[
ApplicantTransitions.values.active,
ApplicantTransitions.values.accepted,
ApplicantTransitions.values.succeeded
])
)
else:
queryset = queryset.filter(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
return super(ApplicantListFilter, self).filter_queryset(request, queryset, view)
| from django.db.models import Q
from rest_framework_json_api.django_filters import DjangoFilterBackend
from bluebottle.assignments.transitions import ApplicantTransitions
class ApplicantListFilter(DjangoFilterBackend):
"""
Filter that shows all applicant if user is owner,
otherwise only show accepted applicants.
"""
def filter_queryset(self, request, queryset, view):
if request.user.is_authenticated():
queryset = queryset.filter(
Q(user=request.user) |
Q(activity__owner=request.user) |
+ Q(activity__initiative__activity_manager=request.user) |
Q(status__in=[
- ApplicantTransitions.values.new,
? ^ -
+ ApplicantTransitions.values.active,
? ^^^^^
+ ApplicantTransitions.values.accepted,
ApplicantTransitions.values.succeeded
])
)
else:
queryset = queryset.filter(status__in=[
ApplicantTransitions.values.new,
ApplicantTransitions.values.succeeded
])
return super(ApplicantListFilter, self).filter_queryset(request, queryset, view) |
ef0d59781fbc9dcd89334843e5b6fc1461aed246 | rollbar/contrib/asgi/__init__.py | rollbar/contrib/asgi/__init__.py | __all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
| __all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
| Use unique identifier name for ASGIApp type | Use unique identifier name for ASGIApp type
Due to collision with ASGIApp class decorator
| Python | mit | rollbar/pyrollbar | __all__ = ["ASGIMiddleware"]
import rollbar
try:
- from starlette.types import ASGIApp, Receive, Scope, Send
+ from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
- def __init__(self, app: ASGIApp) -> None:
+ def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
| Use unique identifier name for ASGIApp type | ## Code Before:
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIApp) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
## Instruction:
Use unique identifier name for ASGIApp type
## Code After:
__all__ = ["ASGIMiddleware"]
import rollbar
try:
from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app: ASGIAppType) -> None:
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook
| __all__ = ["ASGIMiddleware"]
import rollbar
try:
- from starlette.types import ASGIApp, Receive, Scope, Send
+ from starlette.types import ASGIApp as ASGIAppType, Receive, Scope, Send
? +++++++++++++++
except ImportError:
STARLETTE_INSTALLED = False
else:
STARLETTE_INSTALLED = True
# Optional class annotations must be statically declared because
# IDEs cannot infer type hinting for arbitrary dynamic code
def ASGIApp(cls):
async def _asgi_app(self, scope, receive, send):
try:
await self.app(scope, receive, send)
except Exception:
if scope["type"] == "http":
rollbar.report_exc_info()
raise
cls._asgi_app = _asgi_app
return cls
if STARLETTE_INSTALLED is True:
@ASGIApp
class ASGIMiddleware:
- def __init__(self, app: ASGIApp) -> None:
+ def __init__(self, app: ASGIAppType) -> None:
? ++++
self.app = app
async def __call__(self, scope: Scope, receive: Receive, send: Send) -> None:
await self._asgi_app(scope, receive, send)
else:
@ASGIApp
class ASGIMiddleware:
def __init__(self, app):
self.app = app
async def __call__(self, scope, receive, send):
await self._asgi_app(scope, receive, send)
def _hook(request, data):
data["framework"] = "asgi"
rollbar.BASE_DATA_HOOK = _hook |
815c246f1ef185e24991efc4075b2358c7955c6c | onadata/libs/utils/storage.py | onadata/libs/utils/storage.py | import os
import shutil
from django.core.files.storage import get_storage_class
def delete_user_storage(username):
storage = get_storage_class()()
def _recursive_delete(path):
directories, files = storage.listdir(path)
for file_ in files:
storage.delete(os.path.join(path, file_))
for directory in directories:
_recursive_delete(os.path.join(path, directory))
if storage.__class__.__name__ == 'FileSystemStorage':
if storage.exists(username):
shutil.rmtree(storage.path(username))
else:
_recursive_delete(username)
def user_storage_exists(username):
storage = get_storage_class()()
return storage.exists(username)
| import os
import shutil
from django.core.files.storage import FileSystemStorage, get_storage_class
def delete_user_storage(username):
storage = get_storage_class()()
def _recursive_delete(path):
directories, files = storage.listdir(path)
for file_ in files:
storage.delete(os.path.join(path, file_))
for directory in directories:
_recursive_delete(os.path.join(path, directory))
if isinstance(storage, FileSystemStorage):
if storage.exists(username):
shutil.rmtree(storage.path(username))
else:
_recursive_delete(username)
def user_storage_exists(username):
storage = get_storage_class()()
return storage.exists(username)
| Use `isinstance()` at the cost of an extra import | Use `isinstance()` at the cost of an extra import
| Python | bsd-2-clause | kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat,kobotoolbox/kobocat | import os
import shutil
- from django.core.files.storage import get_storage_class
+ from django.core.files.storage import FileSystemStorage, get_storage_class
def delete_user_storage(username):
storage = get_storage_class()()
def _recursive_delete(path):
directories, files = storage.listdir(path)
for file_ in files:
storage.delete(os.path.join(path, file_))
for directory in directories:
_recursive_delete(os.path.join(path, directory))
- if storage.__class__.__name__ == 'FileSystemStorage':
+ if isinstance(storage, FileSystemStorage):
if storage.exists(username):
shutil.rmtree(storage.path(username))
else:
_recursive_delete(username)
def user_storage_exists(username):
storage = get_storage_class()()
return storage.exists(username)
| Use `isinstance()` at the cost of an extra import | ## Code Before:
import os
import shutil
from django.core.files.storage import get_storage_class
def delete_user_storage(username):
storage = get_storage_class()()
def _recursive_delete(path):
directories, files = storage.listdir(path)
for file_ in files:
storage.delete(os.path.join(path, file_))
for directory in directories:
_recursive_delete(os.path.join(path, directory))
if storage.__class__.__name__ == 'FileSystemStorage':
if storage.exists(username):
shutil.rmtree(storage.path(username))
else:
_recursive_delete(username)
def user_storage_exists(username):
storage = get_storage_class()()
return storage.exists(username)
## Instruction:
Use `isinstance()` at the cost of an extra import
## Code After:
import os
import shutil
from django.core.files.storage import FileSystemStorage, get_storage_class
def delete_user_storage(username):
storage = get_storage_class()()
def _recursive_delete(path):
directories, files = storage.listdir(path)
for file_ in files:
storage.delete(os.path.join(path, file_))
for directory in directories:
_recursive_delete(os.path.join(path, directory))
if isinstance(storage, FileSystemStorage):
if storage.exists(username):
shutil.rmtree(storage.path(username))
else:
_recursive_delete(username)
def user_storage_exists(username):
storage = get_storage_class()()
return storage.exists(username)
| import os
import shutil
- from django.core.files.storage import get_storage_class
+ from django.core.files.storage import FileSystemStorage, get_storage_class
? +++++++++++++++++++
def delete_user_storage(username):
storage = get_storage_class()()
def _recursive_delete(path):
directories, files = storage.listdir(path)
for file_ in files:
storage.delete(os.path.join(path, file_))
for directory in directories:
_recursive_delete(os.path.join(path, directory))
- if storage.__class__.__name__ == 'FileSystemStorage':
+ if isinstance(storage, FileSystemStorage):
if storage.exists(username):
shutil.rmtree(storage.path(username))
else:
_recursive_delete(username)
def user_storage_exists(username):
storage = get_storage_class()()
return storage.exists(username) |
1e8c094c0f806b624a41447446676c1f2ac3590d | tools/debug_adapter.py | tools/debug_adapter.py | import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
| import sys
import subprocess
import string
out = subprocess.check_output(['lldb', '-P'])
sys.path.append(string.strip(out))
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
| Fix adapter debugging on Linux. | Fix adapter debugging on Linux.
| Python | mit | vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb,vadimcn/vscode-lldb | import sys
- if 'darwin' in sys.platform:
- sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
+ import subprocess
+ import string
+
+ out = subprocess.check_output(['lldb', '-P'])
+ sys.path.append(string.strip(out))
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
| Fix adapter debugging on Linux. | ## Code Before:
import sys
if 'darwin' in sys.platform:
sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
## Instruction:
Fix adapter debugging on Linux.
## Code After:
import sys
import subprocess
import string
out = subprocess.check_output(['lldb', '-P'])
sys.path.append(string.strip(out))
sys.path.append('.')
import adapter
adapter.main.run_tcp_server()
| import sys
- if 'darwin' in sys.platform:
- sys.path.append('/Applications/Xcode.app/Contents/SharedFrameworks/LLDB.framework/Resources/Python')
+ import subprocess
+ import string
+
+ out = subprocess.check_output(['lldb', '-P'])
+ sys.path.append(string.strip(out))
sys.path.append('.')
import adapter
adapter.main.run_tcp_server() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.