commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
446d19e6e63e110b10cccbe3407cbd86205f2101 | bump version number | aureooms/sak,aureooms/sak | setup.py | setup.py |
try : from setuptools import setup
except ImportError : from distutils.core import setup
setup (
name = 'sak' , version = '0.5.0' ,
description = 'Swiss Army Knife',
long_description = 'sak is a module, submodule and function based tool' ,
author = 'aureooms' ,
author_email = 'aurelien.ooms@gmail.com' ,
url = 'https://github.com/aureooms/sak' ,
license = 'LICENSE' ,
install_requires = [
'lxml' ,
'semantic_version'
] ,
packages = [
'sak' ,
'sak.test' ,
'lib' ,
'lib.nice'
] ,
scripts = [ '$' ]
)
|
try : from setuptools import setup
except ImportError : from distutils.core import setup
setup (
name = 'sak' , version = '0.4.2' ,
description = 'Swiss Army Knife',
long_description = 'sak is a module, submodule and function based tool' ,
author = 'aureooms' ,
author_email = 'aurelien.ooms@gmail.com' ,
url = 'https://github.com/aureooms/sak' ,
license = 'LICENSE' ,
install_requires = [
'lxml' ,
'semantic_version'
] ,
packages = [
'sak' ,
'sak.test' ,
'lib' ,
'lib.nice'
] ,
scripts = [ '$' ]
)
| agpl-3.0 | Python |
047108836154ad9153ae009c8a0576329547a8bf | Update version from 0.4.1 to 0.4.2. | ABI-Software/MeshParser | setup.py | setup.py | from setuptools import setup, find_packages
dependencies = []
long_description = """A Python library that parses different mesh format files into a Python dict
ready for consumption by other libraries.
"""
setup(name=u'meshparser',
version='0.4.2',
description='A Small Python library for parsing files that describe a mesh.',
long_description=long_description,
classifiers=[],
author=u'Hugh Sorby',
author_email='h.sorby@auckland.ac.nz',
url='https://github.com/ABI-Software/MeshParser',
license='Apache',
packages=find_packages('src', exclude=['tests', 'tests.*', ]),
package_dir={'': 'src'},
zip_safe=True,
install_requires=dependencies,
)
| from setuptools import setup, find_packages
dependencies = []
long_description = """A Python library that parses different mesh format files into a Python dict
ready for consumption by other libraries.
"""
setup(name=u'meshparser',
version='0.4.1',
description='A Small Python library for parsing files that describe a mesh.',
long_description=long_description,
classifiers=[],
author=u'Hugh Sorby',
author_email='h.sorby@auckland.ac.nz',
url='https://github.com/ABI-Software/MeshParser',
license='Apache',
packages=find_packages('src', exclude=['tests', 'tests.*', ]),
package_dir={'': 'src'},
zip_safe=True,
install_requires=dependencies,
)
| apache-2.0 | Python |
99356f01c38fe09dc084ad3f54cfdf3beaca63b5 | change packages to py_modules | gmega/sparmap,gmega/parmap | setup.py | setup.py | from setuptools import setup
setup(name='parmap',
version='0.1',
description='Simple parallel map implementation for Python.',
author='Giuliano Mega',
author_email='mega@spaziodati.eu',
py_modules=['parmap'],
license="LICENSE",
install_requires=[
"multiprocessing >= 2.6.2.1"
]
) | from setuptools import setup
setup(name='parmap',
version='0.1',
description='Simple parallel map implementation for Python.',
author='Giuliano Mega',
author_email='mega@spaziodati.eu',
packages=['parmap'],
license="LICENSE",
install_requires=[
"multiprocessing >= 2.6.2.1"
]
) | apache-2.0 | Python |
09c5d6fd0def4fbd909565a5dffb61a04dc0ec86 | Update version to 0.1.25 | edx/edx-val | setup.py | setup.py | #!/usr/bin/env python
import os
import sys
from setuptools import setup
PACKAGES = [
'edxval',
'edxval.migrations',
'edxval.tests',
]
def is_requirement(line):
"""
Return True if the requirement line is a package requirement;
that is, it is not blank, a comment, or editable.
"""
# Remove whitespace at the start/end of the line
line = line.strip()
# Skip blank lines, comments, and editable installs
return not (
line == '' or
line.startswith('-r') or
line.startswith('#') or
line.startswith('-e') or
line.startswith('git+')
)
def load_requirements(*requirements_paths):
"""
Load all requirements from the specified requirements files.
Returns a list of requirement strings.
"""
requirements = set()
for path in requirements_paths:
requirements.update(
line.split('#')[0].strip() for line in open(path).readlines()
if is_requirement(line)
)
return list(requirements)
VERSION = '0.1.25'
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a v%s -m 'version %s'" % (VERSION, VERSION))
os.system("git push --tags")
sys.exit()
setup(
name='edxval',
version=VERSION,
author='edX',
url='http://github.com/edx/edx-val',
description='edx-val',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
packages=PACKAGES,
install_requires=load_requirements('requirements/base.in'),
tests_require=load_requirements('requirements/test.in'),
)
| #!/usr/bin/env python
import os
import sys
from setuptools import setup
PACKAGES = [
'edxval',
'edxval.migrations',
'edxval.tests',
]
def is_requirement(line):
"""
Return True if the requirement line is a package requirement;
that is, it is not blank, a comment, or editable.
"""
# Remove whitespace at the start/end of the line
line = line.strip()
# Skip blank lines, comments, and editable installs
return not (
line == '' or
line.startswith('-r') or
line.startswith('#') or
line.startswith('-e') or
line.startswith('git+')
)
def load_requirements(*requirements_paths):
"""
Load all requirements from the specified requirements files.
Returns a list of requirement strings.
"""
requirements = set()
for path in requirements_paths:
requirements.update(
line.split('#')[0].strip() for line in open(path).readlines()
if is_requirement(line)
)
return list(requirements)
VERSION = '0.1.24'
if sys.argv[-1] == 'tag':
print("Tagging the version on github:")
os.system("git tag -a v%s -m 'version %s'" % (VERSION, VERSION))
os.system("git push --tags")
sys.exit()
setup(
name='edxval',
version=VERSION,
author='edX',
url='http://github.com/edx/edx-val',
description='edx-val',
license='AGPL',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: GNU Affero General Public License v3',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
packages=PACKAGES,
install_requires=load_requirements('requirements/base.in'),
tests_require=load_requirements('requirements/test.in'),
)
| agpl-3.0 | Python |
0f0c42b0d61fb0a3693fc20cd0152128c097a905 | add missing demands requirements | westerncapelabs/uopbmoh-hub,westerncapelabs/uopbmoh-hub,westerncapelabs/uopboh-hub,westerncapelabs/uopbmoh-hub | setup.py | setup.py | from setuptools import setup, find_packages
setup(
name="uopbmoh-hub",
version="0.1",
url='http://github.com/westerncapelabs/uopbmoh-hub',
license='BSD',
author='Western Cape Labs',
author_email='devops@westerncapelabs.com',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'whitenoise==2.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'django-rest-hooks==1.2.1',
'python-dateutil==2.5.3',
'django-bootstrap-form==3.2.1',
'demands==3.0.0'
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| from setuptools import setup, find_packages
setup(
name="uopbmoh-hub",
version="0.1",
url='http://github.com/westerncapelabs/uopbmoh-hub',
license='BSD',
author='Western Cape Labs',
author_email='devops@westerncapelabs.com',
packages=find_packages(),
include_package_data=True,
install_requires=[
'Django==1.9.1',
'djangorestframework==3.3.2',
'dj-database-url==0.3.0',
'psycopg2==2.6.1',
'raven==5.10.0',
'gunicorn==19.4.5',
'django-filter==0.12.0',
'whitenoise==2.0.6',
'celery==3.1.19',
'django-celery==3.1.17',
'redis==2.10.5',
'pytz==2015.7',
'django-rest-hooks==1.2.1',
'python-dateutil==2.5.3',
'django-bootstrap-form==3.2.1',
],
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| bsd-3-clause | Python |
f3838ac01face8bece084f551481ad4c3a23ac6a | Fix Configurable to look for kwargs in entire MRO | edgedb/edgedb,edgedb/edgedb,edgedb/edgedb | edgedb/lang/common/config/configurable.py | edgedb/lang/common/config/configurable.py | ##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
import inspect
from .cvalue import cvalue, _no_default
__all__ = 'ConfigurableMeta', 'Configurable'
class ConfigurableMeta(type):
def __new__(mcls, name, bases, dct):
dct['__sx_configurable__'] = True
return super().__new__(mcls, name, bases, dct)
def __init__(cls, name, bases, dct):
super().__init__(name, bases, dct)
for attrname, attrval in dct.items():
if not callable(attrval) and isinstance(attrval, cvalue):
attrval._owner = cls
attrval._set_name(attrname)
if attrval._default is not _no_default:
attrval._validate(attrval._default, attrval.fullname, 'class definition')
class Configurable(metaclass=ConfigurableMeta):
# For compatibility with objects that use __slots__. Zero impact
# on normal objects with __dict__.
#
__slots__ = ()
def __init__(self, *args, **kwargs):
if kwargs:
# Automatically process kwargs and init corresponding configurable
# attributes on the object. However, if the object has __slots__
# defined, you'll need to manually list there which of specified
# cvalues are configurable this way.
cls = self.__class__
to_pop = []
base_name = '{}.{}'.format(cls.__module__, cls.__name__)
for name, value in kwargs.items():
cval = inspect.getattr_static(cls, name, None)
if isinstance(cval, cvalue):
fullname = '{}.{}'.format(base_name, name)
cval._validate(value, fullname)
setattr(self, name, value)
to_pop.append(name)
for name in to_pop:
kwargs.pop(name)
super().__init__(*args, **kwargs)
| ##
# Copyright (c) 2011 Sprymix Inc.
# All rights reserved.
#
# See LICENSE for details.
##
from .cvalue import cvalue, _no_default
__all__ = 'ConfigurableMeta', 'Configurable'
class ConfigurableMeta(type):
def __new__(mcls, name, bases, dct):
dct['__sx_configurable__'] = True
return super().__new__(mcls, name, bases, dct)
def __init__(cls, name, bases, dct):
super().__init__(name, bases, dct)
for attrname, attrval in dct.items():
if not callable(attrval) and isinstance(attrval, cvalue):
attrval._owner = cls
attrval._set_name(attrname)
if attrval._default is not _no_default:
attrval._validate(attrval._default, attrval.fullname, 'class definition')
class Configurable(metaclass=ConfigurableMeta):
# For compatibility with objects that use __slots__. Zero impact
# on normal objects with __dict__.
#
__slots__ = ()
def __init__(self, *args, **kwargs):
if kwargs:
# Automatically process kwargs and init corresponding configurable
# attributes on the object. However, if the object has __slots__
# defined, you'll need to manually list there which of specified
# cvalues are configurable this way.
cls = self.__class__
dct = cls.__dict__
to_pop = []
base_name = '{}.{}'.format(cls.__module__, cls.__name__)
for name, value in kwargs.items():
try:
dct_val = dct[name]
except KeyError:
continue
else:
if isinstance(dct_val, cvalue):
fullname = '{}.{}'.format(base_name, name)
dct_val._validate(value, fullname)
setattr(self, name, value)
to_pop.append(name)
for name in to_pop:
kwargs.pop(name)
super().__init__(*args, **kwargs)
| apache-2.0 | Python |
3f850922436869a8fcff4d3acc9201831b55d49f | Improve readability. | shaurz/devo | new_project_dialog.py | new_project_dialog.py | import os
import wx
from file_picker import DirPicker
class NewProjectDialog(wx.Dialog):
def __init__(self, parent, path=""):
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
wx.Dialog.__init__(self, parent, title="New Project", style=style)
self.text_name = wx.TextCtrl(self, size=(380, -1))
self.fp_root = DirPicker(self, value=path)
grid = wx.FlexGridSizer(cols=2, vgap=5, hgap=5)
grid.AddGrowableCol(1)
grid.Add(wx.StaticText(self, label="Project Name"), 0, wx.ALIGN_CENTRE_VERTICAL)
grid.Add(self.text_name, 1, wx.EXPAND | wx.ALIGN_CENTRE_VERTICAL)
grid.Add(wx.StaticText(self, label="Directory"), 0, wx.ALIGN_CENTRE_VERTICAL)
grid.Add(self.fp_root, 1, wx.EXPAND | wx.ALIGN_CENTRE_VERTICAL)
btnsizer = wx.StdDialogButtonSizer()
btn_ok = wx.Button(self, wx.ID_OK)
btn_ok.SetDefault()
btnsizer.AddButton(btn_ok)
btnsizer.AddButton(wx.Button(self, wx.ID_CANCEL))
btnsizer.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(grid, 0, wx.EXPAND | wx.ALL, 5)
sizer.Add(btnsizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
self.Fit()
self.Centre()
self.text_name.SetFocus()
self.Bind(wx.EVT_UPDATE_UI, self.OnUpdateUI_OK, id=wx.ID_OK)
def OnUpdateUI_OK(self, evt):
evt.Enable(bool(self.GetName()) and os.path.isdir(self.GetRoot()))
def GetName(self):
return self.text_name.GetValue().strip()
def GetRoot(self):
return os.path.expanduser(self.fp_root.GetValue())
if __name__ == "__main__":
app = wx.App()
dlg = NewProjectDialog(None)
dlg.ShowModal()
| import os
import wx
from file_picker import DirPicker
class NewProjectDialog(wx.Dialog):
def __init__(self, parent, path=""):
style = wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
wx.Dialog.__init__(self, parent, title="New Project", style=style)
self.text_name = wx.TextCtrl(self, size=(380, -1))
self.fp_root = DirPicker(self, value=path)
grid = wx.FlexGridSizer(cols=2, vgap=5, hgap=5)
grid.AddGrowableCol(1)
grid.Add(wx.StaticText(self, label="Project Name"), 0, wx.ALIGN_CENTRE_VERTICAL)
grid.Add(self.text_name, 1, wx.EXPAND | wx.ALIGN_CENTRE_VERTICAL)
grid.Add(wx.StaticText(self, label="Directory"), 0, wx.ALIGN_CENTRE_VERTICAL)
grid.Add(self.fp_root, 1, wx.EXPAND | wx.ALIGN_CENTRE_VERTICAL)
btnsizer = wx.StdDialogButtonSizer()
btn_ok = wx.Button(self, wx.ID_OK)
btn_ok.SetDefault()
btnsizer.AddButton(btn_ok)
btnsizer.AddButton(wx.Button(self, wx.ID_CANCEL))
btnsizer.Realize()
sizer = wx.BoxSizer(wx.VERTICAL)
sizer.Add(grid, 0, wx.EXPAND | wx.ALL, 5)
sizer.Add(btnsizer, 0, wx.EXPAND | wx.ALL, 5)
self.SetSizer(sizer)
self.Fit()
self.Centre()
self.text_name.SetFocus()
self.Bind(wx.EVT_UPDATE_UI, self.__OnUpdateUI_OK, id=wx.ID_OK)
def __OnUpdateUI_OK(self, evt):
evt.Enable(bool(self.GetName()) and os.path.isdir(self.GetRoot()))
def GetName(self):
return self.text_name.GetValue().strip()
def GetRoot(self):
return os.path.expanduser(self.fp_root.GetValue())
if __name__ == "__main__":
app = wx.App()
dlg = NewProjectDialog(None)
dlg.ShowModal()
| mit | Python |
28b62491218c20303268187f3d65602ab20868a5 | Add detailed comments for methods | chetankrishna08/aws-health-tools,chetankrishna08/aws-health-tools,robperc/aws-health-tools,robperc/aws-health-tools | automated-actions/AWS_RISK_CREDENTIALS_EXPOSED/lambda_functions/lookup_cloudtrail_events.py | automated-actions/AWS_RISK_CREDENTIALS_EXPOSED/lambda_functions/lookup_cloudtrail_events.py | import datetime
import collections
import boto3
cloudtrail = boto3.client('cloudtrail')
def lambda_handler(event, context):
account_id = event['account_id']
time_discovered = event['time_discovered']
username = event['username']
deleted_key = event['deleted_key']
endtime = datetime.datetime.now() # Create start and end time for CloudTrail lookup
interval = datetime.timedelta(hours=24)
starttime = endtime - interval
print('Retrieving events...')
events = get_events(username, starttime, endtime)
print('Summarizing events...')
event_names, resource_names, resource_types = get_events_summaries(events)
return {
"account_id": account_id,
"time_discovered": time_discovered,
"username": username,
"deleted_key": deleted_key,
"event_names": event_names,
"resource_names": resource_names,
"resource_types": resource_types
}
def get_events(username, starttime, endtime):
""" Retrieves detailed list of CloudTrail events that occured between the specified time interval.
Args:
username (string): Username to lookup CloudTrail events for.
starttime(datetime): Start of interval to lookup CloudTrail events between.
endtime(datetime): End of interval to lookup CloudTrail events between.
Returns:
(dict)
Dictionary containing list of CloudTrail events occuring between the start and end time with detailed information for each event.
"""
try:
response = cloudtrail.lookup_events(
LookupAttributes=[
{
'AttributeKey': 'Username',
'AttributeValue': username
},
],
StartTime=starttime,
EndTime=endtime,
MaxResults=50
)
except Exception as e:
print(e)
print('Unable to retrieve CloudTrail events for user "{}"'.format(username))
raise(e)
return response
def get_events_summaries(events):
""" Summarizes CloudTrail events list by reducing into counters of occurences for each event, resource name, and resource type in list.
Args:
events (dict): Dictionary containing list of CloudTrail events to be summarized.
Returns:
(list, list, list)
Lists containing name:count tuples of most common occurences of events, resource names, and resource types in events list.
"""
event_name_counter = collections.Counter()
resource_name_counter = collections.Counter()
resource_type_counter = collections.Counter()
for event in events['Events']:
resources = event.get("Resources")
event_name_counter.update([event.get('EventName')])
if resources is not None:
resource_name_counter.update([resource.get("ResourceName") for resource in resources])
resource_type_counter.update([resource.get("ResourceType") for resource in resources])
return event_name_counter.most_common(10), resource_name_counter.most_common(10), resource_type_counter.most_common(10)
| import datetime
import collections
import boto3
cloudtrail = boto3.client('cloudtrail')
def lambda_handler(event, context):
account_id = event['account_id']
time_discovered = event['time_discovered']
username = event['username']
deleted_key = event['deleted_key']
endtime = datetime.datetime.now()
interval = datetime.timedelta(hours=24)
starttime = endtime - interval
print('Retrieving events...')
events = get_events(username, starttime, endtime)
print('Summarizing events...')
event_names, resource_names, resource_types = get_events_summaries(events)
return {
"account_id": account_id,
"time_discovered": time_discovered,
"username": username,
"deleted_key": deleted_key,
"event_names": event_names,
"resource_names": resource_names,
"resource_types": resource_types
}
def get_events(username, starttime, endtime):
try:
response = cloudtrail.lookup_events(
LookupAttributes=[
{
'AttributeKey': 'Username',
'AttributeValue': username
},
],
StartTime=starttime,
EndTime=endtime,
MaxResults=50
)
except Exception as e:
print(e)
print('Unable to retrieve CloudTrail events for user "{}"'.format(username))
raise(e)
return response
def get_events_summaries(events):
event_name_counter = collections.Counter()
resource_name_counter = collections.Counter()
resource_type_counter = collections.Counter()
for event in events['Events']:
resources = event.get("Resources")
event_name_counter.update([event.get('EventName')])
if resources is not None:
resource_name_counter.update([resource.get("ResourceName") for resource in resources])
resource_type_counter.update([resource.get("ResourceType") for resource in resources])
return event_name_counter.most_common(10), resource_name_counter.most_common(10), resource_type_counter.most_common(10)
| apache-2.0 | Python |
0cc7bc679edd0d82bcb7a841c577d49662a9349f | Change path of ee_test.py script | niksu/pax,niksu/pax,niksu/pax | examples/EthernetEcho/mn_ethernet_echo.py | examples/EthernetEcho/mn_ethernet_echo.py | #!/usr/bin/env python
# coding: latin-1
# Mininet testing script for EthernetEcho
# Nik Sultana, February 2017
#
# Use of this source code is governed by the Apache 2.0 license; see LICENSE.
from mininet.net import Mininet, CLI
from scapy.all import *
import os
host_mac = "02:00:00:00:00:02"
echoer_mac = "02:00:00:00:00:01"
echoer_iface_name = "echoer-eth0"
PAX = None
try:
PAX = os.environ['PAX']
except KeyError:
print "PAX environment variable must point to path where Pax repo is cloned"
exit(1)
net = Mininet()
echoer = net.addHost('echoer', mac=echoer_mac)
host = net.addHost('host', mac=host_mac)
switch = net.addSwitch('s0')
controller = net.addController('c0')
net.addLink(echoer, switch)
net.addLink(host, switch)
net.start()
# FIXME use Jonny Shipton's PaxNode node type, to set these up automatically.
echoer.cmd("iptables -A INPUT -p tcp -i " + echoer_iface_name + " -j DROP")
echoer.cmd("arptables -P INPUT DROP")
echoer.cmd("sysctl -w net.ipv4.ip_forward=0")
echoer.cmd("sudo " + PAX + "/Bin/Pax.exe " + PAX + "/examples/EthernetEcho/ethernet_echo.json " + PAX + "/examples/Bin/Examples.dll &")
output = host.cmdPrint("sudo python " + PAX + "/examples/EthernetEcho/mn_ethernet_echo_test.py")
print output
net.stop()
| #!/usr/bin/env python
# coding: latin-1
# Mininet testing script for EthernetEcho
# Nik Sultana, February 2017
#
# Use of this source code is governed by the Apache 2.0 license; see LICENSE.
from mininet.net import Mininet, CLI
from scapy.all import *
import os
host_mac = "02:00:00:00:00:02"
echoer_mac = "02:00:00:00:00:01"
echoer_iface_name = "echoer-eth0"
PAX = None
try:
PAX = os.environ['PAX']
except KeyError:
print "PAX environment variable must point to path where Pax repo is cloned"
exit(1)
net = Mininet()
echoer = net.addHost('echoer', mac=echoer_mac)
host = net.addHost('host', mac=host_mac)
switch = net.addSwitch('s0')
controller = net.addController('c0')
net.addLink(echoer, switch)
net.addLink(host, switch)
net.start()
# FIXME use Jonny Shipton's PaxNode node type, to set these up automatically.
echoer.cmd("iptables -A INPUT -p tcp -i " + echoer_iface_name + " -j DROP")
echoer.cmd("arptables -P INPUT DROP")
echoer.cmd("sysctl -w net.ipv4.ip_forward=0")
echoer.cmd("sudo " + PAX + "/Bin/Pax.exe " + PAX + "/examples/EthernetEcho/ethernet_echo.json " + PAX + "/examples/Bin/Examples.dll &")
output = host.cmdPrint("sudo python " + PAX + "/ee_test.py")
print output
net.stop()
| apache-2.0 | Python |
ad761908537b63c2d262f69a75e7b221f84e8647 | Add stub for multiple posts in school boards | opencivicdata/scrapers-ca,opencivicdata/scrapers-ca | ca_on_school_boards_english_public/__init__.py | ca_on_school_boards_english_public/__init__.py | from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
for i in range (0, 15): # XXX made-up number
organization.add_post(role='Representative (seat {})'.format(i), label=division.name, division_id=division.id)
yield organization
| from utils import CanadianJurisdiction
from opencivicdata.divisions import Division
from pupa.scrape import Organization
class OntarioEnglishPublicSchoolBoards(CanadianJurisdiction):
classification = 'legislature' # just to avoid clash
division_id = 'ocd-division/country:ca/province:on'
division_name = 'Ontario English Public School Board boundary"'
name = 'Ontario English Public School Boards'
url = 'http://www.edu.gov.on.ca/eng/sbinfo/boardList.html'
def get_organizations(self):
organization = Organization(self.name, classification=self.classification)
for division in Division.get(self.division_id).children('school_district'):
organization.add_post(role='Representative', label=division.name, division_id=division.id)
yield organization
| mit | Python |
e995b1b18cf274f7c90e6d72f345a4c0f0e78ac8 | Fix project name. (#267) | forseti-security/forseti-security,thenenadx/forseti-security,forseti-security/forseti-security,felixbb/forseti-security,thenenadx/forseti-security,forseti-security/forseti-security,felixbb/forseti-security,forseti-security/forseti-security,cschnei3/forseti-security,cschnei3/forseti-security,cschnei3/forseti-security,felixbb/forseti-security,thenenadx/forseti-security | google/cloud/security/common/gcp_type/project.py | google/cloud/security/common/gcp_type/project.py | # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Project Resource.
See: https://cloud.google.com/resource-manager/reference/rest/v1/projects
"""
from google.cloud.security.common.gcp_api import cloud_resource_manager as crm
from google.cloud.security.common.gcp_type import resource
# pylint: disable=too-few-public-methods
class ProjectLifecycleState(resource.LifecycleState):
"""Project lifecycle state."""
DELETE_REQUESTED = 'DELETE_REQUESTED'
class Project(resource.Resource):
"""Project resource."""
RESOURCE_NAME_FMT = 'projects/%s'
def __init__(
self,
project_id,
project_number=None,
name=None,
display_name=None,
parent=None,
lifecycle_state=ProjectLifecycleState.UNSPECIFIED):
"""Initialize.
Args:
project_id: The project string id.
project_number: The project number.
name: The full unique GCP name, i.e. "projects/{projectId}".
display_name: The display name.
parent: The parent Resource.
lifecycle_state: The project's lifecycle state.
"""
super(Project, self).__init__(
resource_id=project_id,
resource_type=resource.ResourceType.PROJECT,
name=name,
display_name=display_name,
parent=parent,
lifecycle_state=lifecycle_state)
self.project_number = project_number
def get_project_number(self):
"""Returns the project number."""
return self.project_number
def exists(self):
"""Verify that the project exists.
Returns:
True if we can get the project from GCP, otherwise False.
"""
crm_client = crm.CloudResourceManagerClient()
project = crm_client.get_project(self.id)
return project is not None
| # Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A Project Resource.
See: https://cloud.google.com/resource-manager/reference/rest/v1/projects
"""
from google.cloud.security.common.gcp_api import cloud_resource_manager as crm
from google.cloud.security.common.gcp_type import resource
# pylint: disable=too-few-public-methods
class ProjectLifecycleState(resource.LifecycleState):
"""Project lifecycle state."""
DELETE_REQUESTED = 'DELETE_REQUESTED'
class Project(resource.Resource):
"""Project resource."""
RESOURCE_NAME_FMT = 'organizations/%s'
def __init__(
self,
project_id,
project_number=None,
name=None,
display_name=None,
parent=None,
lifecycle_state=ProjectLifecycleState.UNSPECIFIED):
"""Initialize.
Args:
project_id: The project string id.
project_number: The project number.
name: The full unique GCP name, i.e. "projects/{projectId}".
display_name: The display name.
parent: The parent Resource.
lifecycle_state: The project's lifecycle state.
"""
super(Project, self).__init__(
resource_id=project_id,
resource_type=resource.ResourceType.PROJECT,
name=name,
display_name=display_name,
parent=parent,
lifecycle_state=lifecycle_state)
self.project_number = project_number
def get_project_number(self):
"""Returns the project number."""
return self.project_number
def exists(self):
"""Verify that the project exists.
Returns:
True if we can get the project from GCP, otherwise False.
"""
crm_client = crm.CloudResourceManagerClient()
project = crm_client.get_project(self.id)
return project is not None
| apache-2.0 | Python |
06021b4e7f768a1a7d8b5809054883cb58e0d5b3 | hide sd browser notification pop up | liveblog/liveblog,hlmnrmr/liveblog,superdesk/liveblog,superdesk/liveblog,superdesk/liveblog,hlmnrmr/liveblog,liveblog/liveblog,hlmnrmr/liveblog,hlmnrmr/liveblog,liveblog/liveblog,liveblog/liveblog,superdesk/liveblog,liveblog/liveblog | server/liveblog/auth/db.py | server/liveblog/auth/db.py | from apps.auth.db import DbAuthService
from settings import SUBSCRIPTION_LEVEL, ACCESS_SUBSCRIPTIONS_MOBILE
from superdesk.errors import SuperdeskApiError
from superdesk import get_resource_service
from apps.auth.errors import CredentialsAuthError
class AccessAuthService(DbAuthService):
def authenticate(self, credentials):
self._check_subscription_level()
self.disable_sd_desktop_notification(credentials)
return super().authenticate(credentials)
def _check_subscription_level(self):
subscription = SUBSCRIPTION_LEVEL
if subscription not in ACCESS_SUBSCRIPTIONS_MOBILE:
raise SuperdeskApiError.forbiddenError(message='Liveblog mobile can not access on this subscription')
def disable_sd_desktop_notification(self, credentials):
user = get_resource_service('users').find_one(req=None, username=credentials.get('username'))
if not user:
raise CredentialsAuthError(credentials)
user_updates = user
user_updates['user_preferences']['desktop:notification']['enabled'] = False
get_resource_service('users').system_update(user['_id'], user_updates, user)
| from apps.auth.db import DbAuthService
from settings import SUBSCRIPTION_LEVEL, ACCESS_SUBSCRIPTIONS_MOBILE
from superdesk.errors import SuperdeskApiError
class AccessAuthService(DbAuthService):
def authenticate(self, credentials):
self._check_subscription_level()
return super().authenticate(credentials)
def _check_subscription_level(self):
subscription = SUBSCRIPTION_LEVEL
if subscription not in ACCESS_SUBSCRIPTIONS_MOBILE:
raise SuperdeskApiError.forbiddenError(message='Liveblog mobile can not access on this subscription')
| agpl-3.0 | Python |
aaed4427c82fa0af18a79fe51112fb4e5504dcf6 | Add argparse module usage | seleznev/firefox-complete-theme-build-system | src/make-xpi.py | src/make-xpi.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Usage:
./make-xpi.py
./make-xpi.py theme
./make-xpi.py extension
./make-xpi.py clean
"""
import sys
import os
import shutil
import json
import argparse
sys.path.insert(0, "./build")
from themebuilder import ThemeBuilder
from extensionbuilder import ExtensionBuilder
from packagebuilder import PackageBuilder
def main():
parser = argparse.ArgumentParser()
parser.add_argument("action", nargs='?', default="all",
choices=["all", "theme", "extension", "clean"],
help="build theme, extension, package or clean sources")
args = parser.parse_args()
action = args.action
#
# Clean up
#
if action == "clean":
if os.path.isdir(".build"):
shutil.rmtree(".build")
if os.path.isdir("build/__pycache__"):
shutil.rmtree("build/__pycache__")
for name in os.listdir("build"):
if name.endswith(".pyc"):
os.remove(os.path.join("build", name))
sys.exit(0)
#
# Theme building
#
if action in ["theme", "all"]:
builder = ThemeBuilder()
print(":: Starting build theme...")
builder.build()
#
# Extension building
#
if action in ["extension", "all"]:
builder = ExtensionBuilder()
print(":: Starting build extension...")
builder.build()
#
# Package building
#
if action == "all":
builder = PackageBuilder()
print(":: Starting make package...")
builder.build()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Usage:
./make-xpi.py
./make-xpi.py theme
./make-xpi.py extension
./make-xpi.py clean
"""
import sys
import os
import shutil
import json
sys.path.insert(0, "./build")
from themebuilder import ThemeBuilder
from extensionbuilder import ExtensionBuilder
from packagebuilder import PackageBuilder
def main():
args = sys.argv[1:]
if len(args) == 1:
action = args[0]
else:
action = "all"
if not action in ["all", "theme", "extension", "clean"]:
print(sys.argv[0] + ": uncorrect target")
print("Availible targets: all, theme, extension, clean")
sys.exit(1)
#
# Clean up
#
if action == "clean":
if os.path.isdir(".build"):
shutil.rmtree(".build")
if os.path.isdir("build/__pycache__"):
shutil.rmtree("build/__pycache__")
for name in os.listdir("build"):
if name.endswith(".pyc"):
os.remove(os.path.join("build", name))
sys.exit(0)
#
# Theme building
#
if action in ["theme", "all"]:
builder = ThemeBuilder()
print(":: Starting build theme...")
builder.build()
#
# Extension building
#
if action in ["extension", "all"]:
builder = ExtensionBuilder()
print(":: Starting build extension...")
builder.build()
#
# Package building
#
if action in ["all"]:
builder = PackageBuilder()
print(":: Starting make package...")
builder.build()
if __name__ == "__main__":
main()
| mpl-2.0 | Python |
63dabe19f30807ee3ed0c747360a63c634a29ae7 | Throw exception on failure | buzztroll/unix-agent,enStratus/unix-agent,buzztroll/unix-agent,enStratus/unix-agent,buzztroll/unix-agent,JPWKU/unix-agent,enStratus/unix-agent,JPWKU/unix-agent,buzztroll/unix-agent,enStratus/unix-agent,JPWKU/unix-agent,JPWKU/unix-agent | src/dcm/agent/plugins/builtin/remove_user.py | src/dcm/agent/plugins/builtin/remove_user.py | #
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import dcm.agent.exceptions as exceptions
import dcm.agent.plugins.api.base as plugin_base
import dcm.agent.plugins.api.utils as plugin_utils
_g_logger = logging.getLogger(__name__)
class RemoveUser(plugin_base.ScriptPlugin):
protocol_arguments = {
"userId":
("The unix account name of the user to remove",
True, str, None)
}
def __init__(self, conf, job_id, items_map, name, arguments):
super(RemoveUser, self).__init__(
conf, job_id, items_map, name, arguments)
self.ordered_param_list = [self.args.userId]
def run(self):
command = [self.conf.get_script_location("removeUser"),
self.args.userId]
(stdout, stderr, rc) = plugin_utils.run_command(self.conf, command)
if rc != 0:
raise exceptions.AgentExecutableException(
command, rc, stdout, stderr)
return plugin_base.PluginReply(rc, message="job removeUser succeeded.")
def load_plugin(conf, job_id, items_map, name, arguments):
return RemoveUser(conf, job_id, items_map, name, arguments)
| #
# Copyright (C) 2014 Dell, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import psutil
import logging
import dcm.agent.plugins.api.base as plugin_base
_g_logger = logging.getLogger(__name__)
class RemoveUser(plugin_base.ScriptPlugin):
protocol_arguments = {
"userId":
("The unix account name of the user to remove",
True, str, None)
}
def __init__(self, conf, job_id, items_map, name, arguments):
super(RemoveUser, self).__init__(
conf, job_id, items_map, name, arguments)
self.ordered_param_list = [self.args.userId]
def run(self):
rc = super(RemoveUser, self).run()
if rc._reply_doc["return_code"] != 0:
return plugin_base.PluginReply(
rc._reply_doc["return_code"], message='', error_message="Remove User Failed rc = %s" % str(rc))
else:
return plugin_base.PluginReply(
0, message="RemoveUser succeeded", error_message='')
def load_plugin(conf, job_id, items_map, name, arguments):
return RemoveUser(conf, job_id, items_map, name, arguments)
| apache-2.0 | Python |
16ba95e246c8ebd75d8904705a429461ce0c561b | fix spaces | jubatus/jubatus-python-client,hirokiky/jubatus-python-client,hirokiky/jubatus-python-client,jubatus/jubatus-python-client | test/jubatus_test/common.py | test/jubatus_test/common.py | import os
import time
import signal
from threading import Timer
import msgpackrpc
from msgpackrpc.error import *
class CommonUtils:
@staticmethod
def start_server(name, port):
try:
pid = os.fork()
if pid < 0:
print 'fork error'
sys.exit(1)
elif pid == 0:
os.execvp(name, [name, "-p", str(port), "-c", "100"])
CommonUtils.wait_server(port)
return pid
except OSError as error:
print 'Unable to fork. Error: %d (%s)' % (error.errno, error.strerror)
sys.exit(1)
@staticmethod
def stop_server(pid):
if os.kill(pid, signal.SIGTERM) != None:
print 'kill error'
t = Timer(3.0, os.kill, [pid, signal.SIGKILL])
t.start()
os.waitpid(pid, 0)
t.cancel()
@staticmethod
def wait_server(port):
sleep_time = 1000 # usec
# 1000 * \sum {i=0..9} 2^i = 1024000 micro sec = 1024 ms
for i in range(10):
# For Python clients, we need to generate instance for each iteration.
cli = msgpackrpc.Client(msgpackrpc.Address("localhost", port))
time.sleep(sleep_time/1000000.0) # from usec to sec
try:
cli.call("dummy")
raise Exception("dummy rpc succeeded")
except RPCError, e:
if e.args[0] == 1: # "no such method"
return # ... means server is fully up
sleep_time *= 2;
raise Exception("cannot connect")
| import os
import time
import signal
from threading import Timer
import msgpackrpc
from msgpackrpc.error import *
class CommonUtils:
@staticmethod
def start_server(name, port):
try:
pid = os.fork()
if pid < 0:
print 'fork error'
sys.exit(1)
elif pid == 0:
os.execvp(name, [name, "-p", str(port), "-c", "100"])
CommonUtils.wait_server(port)
return pid
except OSError as error:
print 'Unable to fork. Error: %d (%s)' % (error.errno, error.strerror)
sys.exit(1)
@staticmethod
def stop_server(pid):
if os.kill(pid, signal.SIGTERM) != None:
print 'kill error'
t = Timer(3.0, os.kill, [pid, signal.SIGKILL])
t.start()
os.waitpid(pid, 0)
t.cancel()
@staticmethod
def wait_server(port):
sleep_time = 1000 # usec
# 1000 * \sum {i=0..9} 2^i = 1024000 micro sec = 1024 ms
for i in range(10):
# For Python clients, we need to generate instance for each iteration.
cli = msgpackrpc.Client(msgpackrpc.Address("localhost", port))
time.sleep(sleep_time/1000000.0) # from usec to sec
try:
cli.call("dummy")
raise Exception("dummy rpc succeeded")
except RPCError, e:
if e.args[0] == 1: # "no such method"
return # ... means server is fully up
sleep_time *= 2;
raise Exception("cannot connect")
| mit | Python |
6f0cead30aad0e21af8b9b46ee23a56280933e73 | Define `__all__` for `view_manipulation.py` | divmain/GitSavvy,divmain/GitSavvy,divmain/GitSavvy | common/commands/view_manipulation.py | common/commands/view_manipulation.py | from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
__all__ = (
"gs_handle_vintageous",
"gs_handle_arrow_keys"
)
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| from sublime_plugin import TextCommand
from ...core.git_command import GitCommand
class gs_handle_vintageous(TextCommand, GitCommand):
"""
Set the vintageous_friendly view setting if needed.
Enter insert mode if vintageous_enter_insert_mode option is enabled.
"""
def run(self, edit):
if self.savvy_settings.get("vintageous_friendly"):
self.view.settings().set("git_savvy.vintageous_friendly", True)
if self.savvy_settings.get("vintageous_enter_insert_mode"):
self.view.settings().set("vintageous_reset_mode_when_switching_tabs", False)
self.view.run_command("_enter_insert_mode")
class gs_handle_arrow_keys(TextCommand, GitCommand):
"""
Set the arrow_keys_navigation view setting if needed.
It allows navigation by using arrow keys.
"""
def run(self, edit):
if self.savvy_settings.get("arrow_keys_navigation"):
self.view.settings().set("git_savvy.arrow_keys_navigation", True)
| mit | Python |
f5ccc326fa9715422bf0029a3f334643e247a9e8 | Support CPython with patch from lunar https://trac.torproject.org/projects/tor/ticket/8507 | juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe | ooni/utils/geodata.py | ooni/utils/geodata.py | import re
import os
from twisted.web.client import Agent
from twisted.internet import reactor, defer, protocol
from ooni.utils import log, net
from ooni import config
try:
from pygeoip import GeoIP
except ImportError:
try:
import GeoIP as CGeoIP
def GeoIP(database_path, *args, **kwargs):
return CGeoIP.open(database_path)
except ImportError:
log.err("Unable to import pygeoip or GeoIP. We will not be able to run geo IP related measurements")
class GeoIPDataFilesNotFound(Exception):
pass
def IPToLocation(ipaddr):
city_file = os.path.join(config.advanced.geoip_data_dir, 'GeoLiteCity.dat')
country_file = os.path.join(config.advanced.geoip_data_dir, 'GeoIP.dat')
asn_file = os.path.join(config.advanced.geoip_data_dir, 'GeoIPASNum.dat')
location = {'city': None, 'countrycode': None, 'asn': None}
try:
city_dat = GeoIP(city_file)
location['city'] = city_dat.record_by_addr(ipaddr)['city']
country_dat = GeoIP(country_file)
location['countrycode'] = country_dat.country_code_by_addr(ipaddr)
asn_dat = GeoIP(asn_file)
location['asn'] = asn_dat.org_by_addr(ipaddr)
except IOError:
log.err("Could not find GeoIP data files. Go into data/ "
"and run make geoip")
raise GeoIPDataFilesNotFound
return location
| import re
import os
from twisted.web.client import Agent
from twisted.internet import reactor, defer, protocol
from ooni.utils import log, net
from ooni import config
try:
import pygeoip
except ImportError:
log.err("Unable to import pygeoip. We will not be able to run geo IP related measurements")
class GeoIPDataFilesNotFound(Exception):
pass
def IPToLocation(ipaddr):
city_file = os.path.join(config.advanced.geoip_data_dir, 'GeoLiteCity.dat')
country_file = os.path.join(config.advanced.geoip_data_dir, 'GeoIP.dat')
asn_file = os.path.join(config.advanced.geoip_data_dir, 'GeoIPASNum.dat')
location = {'city': None, 'countrycode': None, 'asn': None}
try:
city_dat = pygeoip.GeoIP(city_file)
location['city'] = city_dat.record_by_addr(ipaddr)['city']
country_dat = pygeoip.GeoIP(country_file)
location['countrycode'] = country_dat.country_code_by_addr(ipaddr)
asn_dat = pygeoip.GeoIP(asn_file)
location['asn'] = asn_dat.org_by_addr(ipaddr)
except IOError:
log.err("Could not find GeoIP data files. Go into data/ "
"and run make geoip")
raise GeoIPDataFilesNotFound
return location
| bsd-2-clause | Python |
891f30385b23dbb59d794603a909535dede7beae | Fix generator test | lmaurits/BEASTling | tests/distribution_tests.py | tests/distribution_tests.py | # -*- encoding: utf-8 -*-
"""Additional tests for the distribution module.
The beastling.distribution module contains doctests for core
functionality, but testing all bad paths would overload the docstring
with un-helpful examples, so they are delegated to here.
"""
import unittest
from nose.tools import raises
import beastling.distributions
@raises(ValueError)
def run_with_string(string):
beastling.distributions.parse_prior_string(
string, string, is_point=True)
def test_various():
for string in [
"0,, 1",
" r_lognormal(1, 1)",
"rlognormal(-1, 1)",
"normal (1-5",
"1 – 5",
"1300>1200",
">12OO",
">1200,",
"normal [1-5]",
"lognormal(1, 1) + 4"]:
run_with_string.description = "Bad distribution {:}".format(
string)
yield run_with_string, string
| # -*- encoding: utf-8 -*-
"""Additional tests for the distribution module.
The beastling.distribution module contains doctests for core
functionality, but testing all bad paths would overload the docstring
with un-helpful examples, so they are delegated to here.
"""
import unittest
from nose.tools import raises
import beastling.distributions
class Tests(unittest.TestCase):
@raises(ValueError)
def run_with_string(self, string):
print(string)
beastling.distributions.parse_prior_string(
string, string, is_point=True)
def test_various(self):
for string in [
"0,, 1",
" r_lognormal(1, 1)",
"rlognormal(-1, 1)",
"normal (1-5",
"1 – 5",
"1300>1200",
">12OO",
">1200,",
"normal [1-5]",
"lognormal(1, 1) + 4"]:
self.run_with_string.description = "Bad distribution {:}".format(
string)
yield self.run_with_string, string
| bsd-2-clause | Python |
4bf218a843c61886c910504a47cbc86c8a4982ae | Fix migrate to ia script | theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs,theonion/django-bulbs | bulbs/content/management/commands/migrate_to_ia.py | bulbs/content/management/commands/migrate_to_ia.py | from django.core.management.base import BaseCommand
from bulbs.content.models import Content, FeatureType
from bulbs.content.tasks import post_to_instant_articles_api
import timezone
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('feature', nargs="+", type=str)
def handle(self, *args, **options):
feature_types = FeatureType.objects.all(instant_article=True)
feature = options['feature'][0]
if feature:
feature_types = feature_types.objects.filter(slug=feature)
for ft in feature_types:
# All published content belonging to feature type
content = Content.objects.filter(
feature_type=ft,
published__isnull=False,
published__lte=timezone.now())
for c in content:
post_to_instant_articles_api.delay(c.id)
| from django.core.management.base import BaseCommand
from bulbs.content.models import Content, FeatureType
from bulbs.content.tasks import post_to_instant_articles_api
import timezone
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('feature', nargs="+", type=str)
def handle(self, *args, **options):
feature_types = FeatureType.objects.all()
feature = options['feature'][0]
if feature:
feature_types.objects.filter(slug=feature)
for ft in feature_types:
if ft.instant_article:
# All published content belonging to feature type
content = Content.objects.filter(
feature_type=ft,
published__isnull=False,
published__lte=timezone.now())
for c in content:
post_to_instant_articles_api.delay(c.id)
| mit | Python |
229be108620149be8e48000fc3494a458d783910 | Enable --no-progress to disable the progress bar | davidfischer/warehouse | warehouse/synchronize/commands.py | warehouse/synchronize/commands.py | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import eventlet
from progress.bar import ShadyBar
from warehouse import create_app, db, script
from warehouse.packages import store
from warehouse.synchronize.fetchers import PyPIFetcher
eventlet.monkey_patch()
class DummyBar(object):
def iter(self, iterable):
for x in iterable:
yield x
def synchronize_project(app, project, fetcher):
with app.test_request_context():
project = store.project(project)
for v in fetcher.versions(project.name):
version = store.version(project, fetcher.release(project.name, v))
for dist in fetcher.distributions(project.name, version.version):
distribution = store.distribution(project, version, dist)
# Check if the stored hash matches what the fetcher says
if (distribution.hashes is None or
dist["md5_digest"] != distribution.hashes.get("md5")):
# The fetcher has a different file
# TODO(dstufft): Verify that this url is HTTPS
store.distribution_file(project, version, distribution,
fetcher.file(dist["url"]))
# Commit our changes
db.session.commit()
def syncer(projects=None, fetcher=None, app=None, pool=None, progress=True):
if pool is None:
pool = eventlet.GreenPool(10)
if app is None:
app = create_app()
if fetcher is None:
fetcher = PyPIFetcher()
if projects is None:
# TODO(dstufft): Determine how to make this do the "since last sync"
projects = fetcher.projects()
if progress:
bar = ShadyBar("Synchronizing", max=len(projects))
else:
bar = DummyBar()
with app.app_context():
for project in bar.iter(projects):
pool.spawn_n(synchronize_project, app, project, fetcher)
@script.option("--no-progress", action="store_false", dest="progress")
def synchronize(progress=True):
syncer(progress=progress)
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import eventlet
from progress.bar import ShadyBar
from warehouse import create_app, db, script
from warehouse.packages import store
from warehouse.synchronize.fetchers import PyPIFetcher
eventlet.monkey_patch()
def synchronize_project(app, project, fetcher):
with app.test_request_context():
project = store.project(project)
for v in fetcher.versions(project.name):
version = store.version(project, fetcher.release(project.name, v))
for dist in fetcher.distributions(project.name, version.version):
distribution = store.distribution(project, version, dist)
# Check if the stored hash matches what the fetcher says
if (distribution.hashes is None or
dist["md5_digest"] != distribution.hashes.get("md5")):
# The fetcher has a different file
# TODO(dstufft): Verify that this url is HTTPS
store.distribution_file(project, version, distribution,
fetcher.file(dist["url"]))
# Commit our changes
db.session.commit()
def syncer(projects=None, fetcher=None, app=None, pool=None):
if pool is None:
pool = eventlet.GreenPool(10)
if app is None:
app = create_app()
if fetcher is None:
fetcher = PyPIFetcher()
if projects is None:
# TODO(dstufft): Determine how to make this do the "since last sync"
projects = fetcher.projects()
with app.app_context():
for project in ShadyBar("Syncing", max=len(projects)).iter(projects):
pool.spawn_n(synchronize_project, app, project, fetcher)
@script.command
def synchronize():
syncer()
| bsd-2-clause | Python |
1d53f4cc5766f68b919032bfc0980a017b66c443 | return direct url to gif | isislab/botbot | plugins/gif.py | plugins/gif.py | import random
from util import hook, http
@hook.api_key('giphy')
@hook.command('gif')
@hook.command
def giphy(inp, api_key="dc6zaTOxFJmzC"):
'''.gif/.giphy <query> -- returns first giphy search result'''
url = 'http://api.giphy.com/v1/gifs/search'
try:
response = http.get_json(url, q=inp, limit=10, api_key=api_key)
except http.HTTPError as e:
return e.msg
results = response.get('data')
if results:
return random.choice(results).get('images').get('original').get('url')
else:
return 'no results found'
| import random
from util import hook, http
@hook.api_key('giphy')
@hook.command('gif')
@hook.command
def giphy(inp, api_key="dc6zaTOxFJmzC"):
'''.gif/.giphy <query> -- returns first giphy search result'''
url = 'http://api.giphy.com/v1/gifs/search'
try:
response = http.get_json(url, q=inp, limit=10, api_key=api_key)
except http.HTTPError as e:
return e.msg
results = response.get('data')
if results:
return random.choice(results).get('bitly_gif_url')
else:
return 'no results found'
| unlicense | Python |
6bb21fbf98106520b739b089a8b1a49f01a9cc9e | Fix UI bug. | 40323230/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5,KmolYuan/Pyslvs-PyQt5 | core/dialog/delete.py | core/dialog/delete.py | # -*- coding: utf-8 -*-
from ..QtModules import *
from .Ui_delete import Ui_Dialog as delete_Dialog
class deleteDlg(QDialog, delete_Dialog):
def __init__(self, deleteIcon, icon, table, pos, parent=None):
super(deleteDlg, self).__init__(parent)
self.setupUi(self)
self.setWindowIcon(deleteIcon)
for i in range(table.rowCount()): self.Entity.insertItem(i, icon, table.item(i, 0).text())
self.Entity.setCurrentIndex(pos)
self.on_Entity_currentIndexChanged(0)
@pyqtSlot(int)
def on_Entity_currentIndexChanged(self, index): self.buttonBox.button(QDialogButtonBox.Ok).setEnabled(self.Entity.currentIndex()!=-1)
| # -*- coding: utf-8 -*-
from ..QtModules import *
from .Ui_delete import Ui_Dialog as delete_Dialog
class deleteDlg(QDialog, delete_Dialog):
def __init__(self, deleteIcon, icon, table, pos, parent=None):
super(deleteDlg, self).__init__(parent)
self.setupUi(self)
self.setWindowIcon(deleteIcon)
for i in range(table.rowCount()): self.Entity.insertItem(i, icon, table.item(i, 0).text())
self.Entity.setCurrentIndex(pos)
| agpl-3.0 | Python |
9343dbfa0d822cdf2f00deab8b18cf4d2e809063 | Add id & category to routes list | LoveXanome/urbanbus-rest,LoveXanome/urbanbus-rest | services/display_routes.py | services/display_routes.py | # -*- coding: utf-8 -*-
from database.database_access import get_dao
from gtfslib.model import Route
from services.check_urban import check_urban_category
def get_routes(agency_id):
dao = get_dao(agency_id)
parsedRoutes = list()
for route in dao.routes(fltr=Route.route_type == Route.TYPE_BUS):
print(route)
parsedRoute = dict()
parsedRoute["id"] = route.route_id
parsedRoute["name"] = route.route_long_name
parsedRoute["category"] = check_urban_category(route.trips)
parsedRoutes.append(parsedRoute)
return parsedRoutes
| # -*- coding: utf-8 -*-
from database.database_access import get_dao
from gtfslib.model import Route
from gtfsplugins import decret_2015_1610
from database.database_access import get_dao
def get_routes(agency_id):
dao = get_dao(agency_id)
parsedRoutes = list()
for route in dao.routes(fltr=Route.route_type == Route.TYPE_BUS):
parsedRoute = dict()
parsedRoute["name"] = route.route_long_name
parsedRoutes.append(parsedRoute)
return parsedRoutes
| mit | Python |
4a0fe826143a890365cd1215244346301ebba8df | Remove unused `command` from pluginmanager.register | fbs/fubot | core/pluginmanager.py | core/pluginmanager.py | from core.interface import *
from twisted.python import log
def filter_interface(plugins, interface):
return filter(lambda p: interface.providedBy(p), plugins)
def filter_command(plugins, cmd):
return filter(lambda p: p.accepts_command(cmd), plugins)
class PluginManager(object):
def __init__(self):
self.plugins = []
def load(self, plugins):
# log.msg("Plugins: %s" % plugins)
for plugin in plugins:
__import__(plugin['name'])
def register(self, plugin):
if IPlugin.providedBy(plugin):
self.plugins.append(plugin)
log.msg("Registered plugin: %s, version %s, by %s" %
(plugin.name, plugin.version, plugin.author))
def filter(self, interface=None, command=None):
plugins = self.plugins
if interface:
plugins = filter_interface(plugins, interface)
if command:
plugins = filter_command(plugins, command)
return plugins
plugin_manager = PluginManager()
| from core.interface import *
from twisted.python import log
def filter_interface(plugins, interface):
return filter(lambda p: interface.providedBy(p), plugins)
def filter_command(plugins, cmd):
return filter(lambda p: p.accepts_command(cmd), plugins)
class PluginManager(object):
def __init__(self):
self.plugins = []
def load(self, plugins):
# log.msg("Plugins: %s" % plugins)
for plugin in plugins:
__import__(plugin['name'])
def register(self, plugin, command=''):
if IPlugin.providedBy(plugin):
self.plugins.append(plugin)
log.msg("Registered plugin: %s, version %s, by %s" %
(plugin.name, plugin.version, plugin.author))
def filter(self, interface=None, command=None):
plugins = self.plugins
if interface:
plugins = filter_interface(plugins, interface)
if command:
plugins = filter_command(plugins, command)
return plugins
plugin_manager = PluginManager()
| mit | Python |
bafe5c6a098e5bfff6fa0160c57e7245eccd5188 | Remove stray `django.conf.urls.defaults` import | st8st8/django-organizations,st8st8/django-organizations,DESHRAJ/django-organizations,bennylope/django-organizations,bennylope/django-organizations,arteria/django-ar-organizations,arteria/django-ar-organizations,GauthamGoli/django-organizations,DESHRAJ/django-organizations,GauthamGoli/django-organizations | organizations/urls.py | organizations/urls.py | from django.conf.urls import patterns, url
from django.contrib.auth.decorators import login_required
from organizations.views import (OrganizationList, OrganizationDetail,
OrganizationUpdate, OrganizationDelete, OrganizationCreate,
OrganizationUserList, OrganizationUserDetail, OrganizationUserUpdate,
OrganizationUserCreate, OrganizationUserRemind, OrganizationUserDelete)
urlpatterns = patterns('',
# Organization URLs
url(r'^$', view=login_required(OrganizationList.as_view()),
name="organization_list"),
url(r'^add/$', view=login_required(OrganizationCreate.as_view()),
name="organization_add"),
url(r'^(?P<organization_pk>[\d]+)/$',
view=login_required(OrganizationDetail.as_view()),
name="organization_detail"),
url(r'^(?P<organization_pk>[\d]+)/edit/$',
view=login_required(OrganizationUpdate.as_view()),
name="organization_edit"),
url(r'^(?P<organization_pk>[\d]+)/delete/$',
view=login_required(OrganizationDelete.as_view()),
name="organization_delete"),
# Organization user URLs
url(r'^(?P<organization_pk>[\d]+)/people/$',
view=login_required(OrganizationUserList.as_view()),
name="organization_user_list"),
url(r'^(?P<organization_pk>[\d]+)/people/add/$',
view=login_required(OrganizationUserCreate.as_view()),
name="organization_user_add"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/remind/$',
view=login_required(OrganizationUserRemind.as_view()),
name="organization_user_remind"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/$',
view=login_required(OrganizationUserDetail.as_view()),
name="organization_user_detail"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/edit/$',
view=login_required(OrganizationUserUpdate.as_view()),
name="organization_user_edit"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/delete/$',
view=login_required(OrganizationUserDelete.as_view()),
name="organization_user_delete"),
)
| from django.conf.urls.defaults import patterns, url
from django.contrib.auth.decorators import login_required
from organizations.views import (OrganizationList, OrganizationDetail,
OrganizationUpdate, OrganizationDelete, OrganizationCreate,
OrganizationUserList, OrganizationUserDetail, OrganizationUserUpdate,
OrganizationUserCreate, OrganizationUserRemind, OrganizationUserDelete)
urlpatterns = patterns('',
# Organization URLs
url(r'^$', view=login_required(OrganizationList.as_view()),
name="organization_list"),
url(r'^add/$', view=login_required(OrganizationCreate.as_view()),
name="organization_add"),
url(r'^(?P<organization_pk>[\d]+)/$',
view=login_required(OrganizationDetail.as_view()),
name="organization_detail"),
url(r'^(?P<organization_pk>[\d]+)/edit/$',
view=login_required(OrganizationUpdate.as_view()),
name="organization_edit"),
url(r'^(?P<organization_pk>[\d]+)/delete/$',
view=login_required(OrganizationDelete.as_view()),
name="organization_delete"),
# Organization user URLs
url(r'^(?P<organization_pk>[\d]+)/people/$',
view=login_required(OrganizationUserList.as_view()),
name="organization_user_list"),
url(r'^(?P<organization_pk>[\d]+)/people/add/$',
view=login_required(OrganizationUserCreate.as_view()),
name="organization_user_add"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/remind/$',
view=login_required(OrganizationUserRemind.as_view()),
name="organization_user_remind"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/$',
view=login_required(OrganizationUserDetail.as_view()),
name="organization_user_detail"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/edit/$',
view=login_required(OrganizationUserUpdate.as_view()),
name="organization_user_edit"),
url(r'^(?P<organization_pk>[\d]+)/people/(?P<user_pk>[\d]+)/delete/$',
view=login_required(OrganizationUserDelete.as_view()),
name="organization_user_delete"),
)
| bsd-2-clause | Python |
641f831ba97a247bc58596f8697e253c2ca98f9d | use java helper for templates (#53) | googleapis/java-logging-logback,googleapis/java-logging-logback,googleapis/java-logging-logback | synth.py | synth.py | # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool.languages.java as java
java.common_templates(excludes=[
'README.md',
])
| # Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This script is used to synthesize generated parts of this library."""
import synthtool as s
import synthtool.gcp as gcp
templates = gcp.CommonTemplates().java_library()
s.copy(templates, excludes=[
'README.md',
]) | apache-2.0 | Python |
588a5ca768816a7052588c6a393288ef7b754434 | add tests task | hhatto/python-hoedown,hoedown/python-hoedown,hoedown/python-hoedown,hhatto/python-hoedown,hhatto/python-hoedown,hoedown/python-hoedown,hhatto/python-hoedown,hoedown/python-hoedown | tasks.py | tasks.py | from invoke import run, task
@task
def clean():
run("python setup.py clean")
@task
def compile_cython():
run("python setup.py compile_cython")
@task
def update_submodule():
run("git submodule init")
run("git submodule sync")
run("git submodule update")
@task
def update():
update_submodule()
run("python setup.py update_vendor")
@task
def install():
run("pip install --upgrade .")
@task
def tests():
run("python tests/hoedown_test.py")
@task
def all():
clean()
update()
compile_cython()
install()
| from invoke import run, task
@task
def clean():
run("python setup.py clean")
@task
def compile_cython():
run("python setup.py compile_cython")
@task
def update_submodule():
run("git submodule init")
run("git submodule sync")
run("git submodule update")
@task
def update():
update_submodule()
run("python setup.py update_vendor")
@task
def install():
run("pip install --upgrade .")
@task
def all():
clean()
update()
compile_cython()
install()
| mit | Python |
82b96a8acb9a43d90bfcfe9fd4d8b0dcf1fd06f5 | Make modules uninstallable | ingadhoc/sale,ingadhoc/sale,ingadhoc/sale,ingadhoc/sale | sale_require_ref/__manifest__.py | sale_require_ref/__manifest__.py | ##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sale Order Require Reference on Confirmation',
'version': "13.0.1.0.0",
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'sale_exception',
],
'data': [
'data/exception_rule_data.xml',
],
'demo': [
'demo/exception_rule_demo.xml',
'demo/sale_order_demo.xml',
],
'installable': False,
'auto_install': False,
'application': False,
}
| ##############################################################################
#
# Copyright (C) 2015 ADHOC SA (http://www.adhoc.com.ar)
# All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Sale Order Require Reference on Confirmation',
'version': "13.0.1.0.0",
'category': 'Projects & Services',
'sequence': 14,
'summary': '',
'author': 'ADHOC SA',
'website': 'www.adhoc.com.ar',
'license': 'AGPL-3',
'images': [
],
'depends': [
'sale_exception',
],
'data': [
'data/exception_rule_data.xml',
],
'demo': [
'demo/exception_rule_demo.xml',
'demo/sale_order_demo.xml',
],
'installable': True,
'auto_install': False,
'application': False,
}
| agpl-3.0 | Python |
d8794be78567ffc1265e487de8bcfa9933e5ba5d | Include output | BruceEckel/betools | tests/test_cmdline.py | tests/test_cmdline.py | # Test the local one, not the installed one:
import sys
sys.path.insert(0, "..")
# CmdLine appears to be global for all tests,
# so you can't use the same letter flag throughout
# the test suite (is this a py.test bug?)
from betools import CmdLine
@CmdLine("1", "option1")
def test_1():
"""
Description of option1
"""
print("option1")
@CmdLine("2", "option2")
def test_2():
"""
Description of option2
"""
print("option2")
@CmdLine("3", "option3")
def test_3():
"""
Description of option3
"""
print("option3")
@CmdLine("4")
def option4():
"""
Option 4 uses the function name as the word tag.
"""
print("option4")
@CmdLine("5")
def option5():
"""
Option 5 uses the function name as the word tag.
"""
print("option5")
@CmdLine("6")
def option6():
"""
Option 6 uses the function name as the word tag.
"""
print("option6")
def test_main():
CmdLine.run()
if __name__ == '__main__':
test_main()
output = """
usage: test_cmdline.py [-h] [-1] [-2] [-3] [-4] [-5] [-6]
optional arguments:
-h, --help show this help message and exit
-1, --option1 Description of option1
-2, --option2 Description of option2
-3, --option3 Description of option3
-4, --option4 Option 4 uses the function name as the word tag.
-5, --option5 Option 5 uses the function name as the word tag.
-6, --option6 Option 6 uses the function name as the word tag.
"""
| # Test the local one, not the installed one:
import sys
sys.path.insert(0, "..")
from betools import CmdLine
# CmdLine appears to be global for all tests,
# so you can't use the same letter flag throughout
# the test suite (is this a py.test bug?)
@CmdLine("1", "option1")
def test_1():
"""
Description of option1
"""
print("option1")
@CmdLine("2", "option2")
def test_2():
"""
Description of option2
"""
print("option2")
@CmdLine("3", "option3")
def test_3():
"""
Description of option3
"""
print("option3")
@CmdLine("4")
def option4():
"""
Use the function name as the word tag.
"""
print("option4")
@CmdLine("5")
def option5():
"""
Use the function name as the word tag.
"""
print("option5")
@CmdLine("6")
def option6():
"""
Use the function name as the word tag.
"""
print("option6")
def test_main():
CmdLine.run()
if __name__ == '__main__':
test_main()
| mit | Python |
d46197d033958ffda7d434aaf9b95148c96138d7 | Kill SETTABLE_ENV_VARS allow list. (#11743) | pantsbuild/pants,benjyw/pants,pantsbuild/pants,benjyw/pants,pantsbuild/pants,benjyw/pants,pantsbuild/pants,benjyw/pants,benjyw/pants,pantsbuild/pants,pantsbuild/pants,benjyw/pants,pantsbuild/pants,benjyw/pants | src/python/pants/core/util_rules/subprocess_environment.py | src/python/pants/core/util_rules/subprocess_environment.py | # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from typing import Tuple
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.rules import Get, collect_rules, rule
from pants.option.subsystem import Subsystem
from pants.util.frozendict import FrozenDict
# TODO: We may want to support different sets of env vars for different types of process.
# Can be done via scoped subsystems, possibly. However we should only do this if there
# is a real need.
class SubprocessEnvironment(Subsystem):
options_scope = "subprocess-environment"
help = "Environment settings for forked subprocesses."
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--env-vars",
type=list,
member_type=str,
default=["LANG", "LC_CTYPE", "LC_ALL"],
advanced=True,
help=(
"Environment variables to set for process invocations. "
"Entries are either strings in the form `ENV_VAR=value` to set an explicit value; "
"or just `ENV_VAR` to copy the value from Pants's own environment."
),
)
@property
def env_vars_to_pass_to_subprocesses(self) -> Tuple[str, ...]:
return tuple(sorted(set(self.options.env_vars)))
@dataclass(frozen=True)
class SubprocessEnvironmentVars:
vars: FrozenDict[str, str]
@rule
async def get_subprocess_environment(
subproc_env: SubprocessEnvironment,
) -> SubprocessEnvironmentVars:
return SubprocessEnvironmentVars(
await Get(Environment, EnvironmentRequest(subproc_env.env_vars_to_pass_to_subprocesses))
)
def rules():
return collect_rules()
| # Copyright 2019 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from dataclasses import dataclass
from typing import Tuple
from pants.engine.environment import Environment, EnvironmentRequest
from pants.engine.rules import Get, collect_rules, rule
from pants.option.subsystem import Subsystem
from pants.util.frozendict import FrozenDict
# Names of env vars that can be set on all subprocesses via config.
SETTABLE_ENV_VARS = (
# Customarily used to control i18n settings.
"LANG",
"LC_CTYPE",
"LC_ALL",
# Customarily used to control proxy settings in various processes.
"http_proxy",
"https_proxy",
"ftp_proxy",
"all_proxy",
"no_proxy",
"HTTP_PROXY",
"HTTPS_PROXY",
"FTP_PROXY",
"ALL_PROXY",
"NO_PROXY",
# Allow Requests to verify SSL certificates for HTTPS requests
# https://requests.readthedocs.io/en/master/user/advanced/#ssl-cert-verification
"REQUESTS_CA_BUNDLE",
)
# TODO: We may want to support different sets of env vars for different types of process.
# Can be done via scoped subsystems, possibly. However we should only do this if there
# is a real need.
class SubprocessEnvironment(Subsystem):
options_scope = "subprocess-environment"
help = "Environment settings for forked subprocesses."
@classmethod
def register_options(cls, register):
super().register_options(register)
register(
"--env-vars",
type=list,
member_type=str,
default=["LANG", "LC_CTYPE", "LC_ALL"],
advanced=True,
help=(
"Environment variables to set for process invocations. "
"Entries are either strings in the form `ENV_VAR=value` to set an explicit value; "
"or just `ENV_VAR` to copy the value from Pants's own environment.\n\nEach ENV_VAR "
f"must be one of {', '.join(f'`{v}`' for v in SETTABLE_ENV_VARS)}."
),
)
@property
def env_vars_to_pass_to_subprocesses(self) -> Tuple[str, ...]:
return tuple(sorted(set(self.options.env_vars)))
@dataclass(frozen=True)
class SubprocessEnvironmentVars:
vars: FrozenDict[str, str]
@rule
async def get_subprocess_environment(
subproc_env: SubprocessEnvironment,
) -> SubprocessEnvironmentVars:
return SubprocessEnvironmentVars(
await Get(
Environment,
EnvironmentRequest(
subproc_env.env_vars_to_pass_to_subprocesses, allowed=SETTABLE_ENV_VARS
),
)
)
def rules():
return collect_rules()
| apache-2.0 | Python |
36f2b9f52524a65d5327b2dd414e7ad874537ff1 | Add tests | tadashi-aikawa/owlmixin | tests/test_OwlObjectEnum.py | tests/test_OwlObjectEnum.py | # coding: utf-8
from __future__ import division, absolute_import, unicode_literals
from owlmixin import OwlMixin
from owlmixin.owlenum import OwlObjectEnum
class Sample(OwlMixin):
def __init__(self, color):
self.color = Color.from_symbol(color)
class Color(OwlObjectEnum):
RED = (
"red",
{"japanese": "赤", "coloring": lambda m: "Red: " + m}
)
GREEN = (
"green",
{"japanese": "緑", "coloring": lambda m: "Green: " + m}
)
BLUE = (
"blue",
{"japanese": "青", "coloring": lambda m: "Blue: " + m}
)
@property
def japanese(self):
return self.object["japanese"]
def coloring(self, message):
return self.object["coloring"](message)
class TestFromSymbol:
def test_normal(self):
assert Color.from_symbol("blue") is Color.BLUE
class TestProperty:
def test_normal(self):
assert Color.BLUE.japanese == "青"
class TestFunction:
def test_normal(self):
assert Color.BLUE.coloring("sky") == "Blue: sky"
class TestOwlMixin:
def test_to_dict(self):
assert Sample.from_dict({"color": "blue"}).to_dict() == {
"color": Color.BLUE
}
def test_to_json(self):
assert Sample.from_dict({"color": "blue"}).to_json() == '{"color": "blue"}'
| # coding: utf-8
from __future__ import division, absolute_import, unicode_literals
from owlmixin.owlenum import OwlObjectEnum
class Color(OwlObjectEnum):
RED = (
"red",
{"japanese": "赤", "coloring": lambda m: "Red: " + m}
)
GREEN = (
"green",
{"japanese": "緑", "coloring": lambda m: "Green: " + m}
)
BLUE = (
"blue",
{"japanese": "青", "coloring": lambda m: "Blue: " + m}
)
@property
def japanese(self):
return self.object["japanese"]
def coloring(self, message):
return self.object["coloring"](message)
class TestFromSymbol:
def test_normal(self):
assert Color.from_symbol("blue") is Color.BLUE
class TestProperty:
def test_normal(self):
assert Color.BLUE.japanese == "青"
class TestFunction:
def test_normal(self):
assert Color.BLUE.coloring("sky") == "Blue: sky"
| mit | Python |
3b83b8715e03b9096f9ae5611019fec4e52ca937 | Add an initial test each for resolve and mk | gratipay/filesystem_tree.py,gratipay/filesystem_tree.py | tests.py | tests.py | import os
from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_resolve_resolves(fs):
path = fs.resolve('some/dir')
assert path == os.path.realpath(os.sep.join([fs.root, 'some', 'dir']))
def test_mk_makes_a_dir(fs):
fs.mk('some/dir')
assert isdir(fs.resolve('some/dir'))
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
| from os.path import isdir
import pytest
from filesystem_tree import FilesystemTree
@pytest.yield_fixture
def fs():
fs = FilesystemTree()
yield fs
fs.remove()
def test_it_can_be_instantiated():
assert FilesystemTree().__class__.__name__ == 'FilesystemTree'
def test_args_go_to_mk_not_root():
fs = FilesystemTree('foo', 'bar')
assert fs.root != 'foo'
def test_it_makes_a_directory(fs):
assert isdir(fs.root)
def test_remove_removes(fs):
assert isdir(fs.root)
fs.remove()
assert not isdir(fs.root)
| mit | Python |
f8034181c9dd9b880ee0805fe2c6566260e060e1 | test flask_admin connection | fayazkhan/secret-diary | tests/test_web_interface.py | tests/test_web_interface.py | from unittest.mock import patch, sentinel
from diary.web import application_factory
@patch('diary.web.Flask', return_value=sentinel.app)
def test_webapp_initialization(Flask):
app = application_factory()
Flask.assert_called_once_with('diary.web')
assert app == sentinel.app
@patch('diary.web.Admin')
def test_admin_initialzed(Admin):
app = application_factory()
Admin.assert_called_once_with(app)
| from unittest.mock import patch, sentinel
from diary.web import application_factory
@patch('diary.web.Flask', return_value=sentinel.app)
def test_webapp_initialization(Flask):
app = application_factory()
Flask.assert_called_once_with('diary.web')
assert app == sentinel.app
| agpl-3.0 | Python |
2c667a8a28ed67325d7335246e10dab0d01ffa2c | increase number of tests | inuitwallet/ALP-Server | tests/urls_functionality.py | tests/urls_functionality.py | import json
import logging
import os
from webtest import TestApp
import sys
sys.path.append('../')
os.remove('pool.db')
import pool_server
__author__ = 'sammoth'
log = logging.Logger('ALP_Test')
stream = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(message)s')
stream.setFormatter(formatter)
log.addHandler(stream)
app = TestApp(pool_server.app)
headers = {'Content-Type': 'application/json'}
log.debug('test root url')
resp = app.get('/')
assert resp.json == {'success': True, 'message': 'ALP Server is operational'}
log.debug('test register without correct headers')
resp = app.post('/register')
assert resp.json == {'success': False, 'message': 'Content-Type header must be '
'set to \'application/json\''}
log.debug('test register with no data')
resp = app.post('/register', headers=headers)
assert resp.json == {'success': False, 'message': 'no json found in request'}
log.debug('test register with blank data')
resp = app.post('/register', headers=headers, params={})
log.debug('set test data')
test_data = {'user': 'TEST_USER_1', 'address': 'BMJ2PJ1TNMwnTYUopQVxBrAPmmJjJjhd96',
'exchange': 'test_exchange', 'unit': 'btc'}
log.debug('test register with no user in data')
data = test_data.copy()
del data['user']
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'no user provided'}
log.debug('test register with no address in data')
data = test_data.copy()
del data['address']
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'no address provided'}
log.debug('test register with no exchange in data')
data = test_data.copy()
del data['exchange']
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'no exchange provided'}
log.debug('test register with no unit in data')
data = test_data.copy()
del data['unit']
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'no unit provided'}
log.debug('test register with invalid address in data (no B at start)')
data = test_data.copy()
data['address'] = 'JMJ2PJ1TNMwnTYUopQVxBrAPmmJjJjhd96'
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'JMJ2PJ1TNMwnTYUopQVxBrAPmmJjJjhd96 '
'is not a valid NBT address. It '
'should start with a \'B\''}
log.debug('test register with invalid address in data (invalid checksum)')
data = test_data.copy()
data['address'] = 'BMJ2PJ1TNMwnTYUopQVxBraPmmJjJjhd95'
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'BMJ2PJ1TNMwnTYUopQVxBraPmmJjJjhd95 '
'is not a valid NBT address. The '
'checksum doesn\'t match'}
log.debug('test register with unsupported exchange')
data = test_data.copy()
data['exchange'] = 'bad_exchange'
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'bad_exchange is not supported'}
log.debug('test register with unsupported unit')
data = test_data.copy()
data['unit'] = 'bad_unit'
resp = app.post('/register', headers=headers, params=json.dumps(data))
assert resp.json == {'success': False, 'message': 'bad_unit is not supported on '
'test_exchange'}
| import logging
from webtest import TestApp
import sys
sys.path.append('../')
import pool_server
__author__ = 'sammoth'
log = logging.Logger('ALP_Test')
stream = logging.StreamHandler()
formatter = logging.Formatter(fmt='%(message)s')
stream.setFormatter(formatter)
log.addHandler(stream)
app = TestApp(pool_server.app)
headers = {'Content-Type': 'application/json'}
log.debug('test root url')
resp = app.get('/')
assert resp.json == {'success': True, 'message': 'ALP Server is operational'}
log.debug('test register without correct headers')
resp = app.post('/register')
assert resp.json == {'success': False, 'message': 'Content-Type header must be '
'set to \'application/json\''}
log.debug('test register with no data')
resp = app.post('/register', headers=headers)
assert resp.json == {'success': False, 'message': 'no json found in request'}
log.debug('test register with blank data')
resp = app.post('/register', headers=headers, params={})
log.debug('set test data')
test_data = {'user': 'TEST_USER_1', 'address': 'BMJ2PJ1TNMwnTYUopQVxBrAPmmJjJjhd96',
'exchange': 'test_exchange', 'unit': 'btc'}
log.debug('test register with no user in data')
data = test_data.copy()
del data['user']
#print data
#resp = app.post('/register', headers=headers, params=data)
#assert resp.json == {'success': False, 'message': 'no user provided'}
| mit | Python |
243cae812c1b5c41de8064e6fc46d0e018da2586 | Upgrade database version 20 should succeed even for environment not using the versioncontrol subsystem. Closes #5015. | pkdevbox/trac,pkdevbox/trac,pkdevbox/trac,pkdevbox/trac | trac/upgrades/db20.py | trac/upgrades/db20.py | from trac.db import Table, Column, Index, DatabaseManager
from trac.core import TracError
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme (if needed)
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
try:
repos = env.get_repository(None, sync=False)
youngest = repos.get_youngest_rev_in_cache(db) or ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s",
(CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
except TracError: # no repository available
pass
| from trac.db import Table, Column, Index, DatabaseManager
from trac.versioncontrol.cache import CACHE_YOUNGEST_REV
def do_upgrade(env, ver, cursor):
"""Modify the repository cache scheme.
Now we use the 'youngest_rev' entry in the system table
to explicitly store the youngest rev in the cache.
"""
db = env.get_db_cnx()
repos = env.get_repository(None, sync=False)
youngest = repos.get_youngest_rev_in_cache(db) or ''
# deleting first, for the 0.11dev and 0.10.4dev users
cursor.execute("DELETE FROM system WHERE name=%s", (CACHE_YOUNGEST_REV,))
cursor.execute("INSERT INTO system (name, value) VALUES (%s, %s)",
(CACHE_YOUNGEST_REV, youngest))
| bsd-3-clause | Python |
8e9e9a46c92198cb42f7e498da7dd94f3d23e7b8 | fix config file name | juergh/dwarf,juergh/dwarf,dtroyer/dwarf,dtroyer/dwarf | dwarf/common/config.py | dwarf/common/config.py | #!/usr/bin/python
from __future__ import print_function
import logging
import os
import yaml
LOG = logging.getLogger(__name__)
class Config(object):
def __init__(self):
# Get the config data from file
cfile = '/etc/dwarf.conf'
if not os.path.exists(cfile):
cfile = os.path.join(os.path.dirname(__file__), '../../etc',
'dwarf.conf')
with open(cfile, 'r') as fh:
cfg = yaml.load(fh)
# Handle empty config files
if cfg is None:
cfg = {}
# Add base environment information
cfg['dwarf_dir'] = '/var/lib/dwarf'
cfg['run_dir'] = os.path.join(cfg['dwarf_dir'], 'run')
cfg['instances_dir'] = os.path.join(cfg['dwarf_dir'], 'instances')
cfg['instances_base_dir'] = os.path.join(cfg['instances_dir'], '_base')
cfg['images_dir'] = os.path.join(cfg['dwarf_dir'], 'images')
cfg['dwarf_db'] = os.path.join(cfg['dwarf_dir'], 'dwarf.db')
cfg['dwarf_log'] = os.path.join(cfg['dwarf_dir'], 'dwarf.log')
# Add the config data as attributes to our object
for (key, val) in cfg.iteritems():
setattr(self, key, val)
# Store for later use
self._cfg = cfg
def dump_options(self):
"""
Dump the options to the logfile
"""
for key in sorted(self._cfg.iterkeys()):
LOG.info('%s: %s', key, self._cfg[key])
CONFIG = Config()
| #!/usr/bin/python
from __future__ import print_function
import logging
import os
import yaml
LOG = logging.getLogger(__name__)
class Config(object):
def __init__(self):
# Get the config data from file
cfile = '/etc/default/dwarf'
if not os.path.exists(cfile):
cfile = os.path.join(os.path.dirname(__file__), '../../etc',
'dwarf.conf')
with open(cfile, 'r') as fh:
cfg = yaml.load(fh)
# Handle empty config files
if cfg is None:
cfg = {}
# Add base environment information
cfg['dwarf_dir'] = '/var/lib/dwarf'
cfg['run_dir'] = os.path.join(cfg['dwarf_dir'], 'run')
cfg['instances_dir'] = os.path.join(cfg['dwarf_dir'], 'instances')
cfg['instances_base_dir'] = os.path.join(cfg['instances_dir'], '_base')
cfg['images_dir'] = os.path.join(cfg['dwarf_dir'], 'images')
cfg['dwarf_db'] = os.path.join(cfg['dwarf_dir'], 'dwarf.db')
cfg['dwarf_log'] = os.path.join(cfg['dwarf_dir'], 'dwarf.log')
# Add the config data as attributes to our object
for (key, val) in cfg.iteritems():
setattr(self, key, val)
# Store for later use
self._cfg = cfg
def dump_options(self):
"""
Dump the options to the logfile
"""
for key in sorted(self._cfg.iterkeys()):
LOG.info('%s: %s', key, self._cfg[key])
CONFIG = Config()
| apache-2.0 | Python |
434b9beb73ecdd257c692ade18e083ba1294b9bd | change content | dresl/django_choice_and_question,dresl/django_choice_and_question | polls/views.py | polls/views.py | from django.shortcuts import render
from django.http import HttpResponse
from polls.models import Question
def index(request):
latest_question_list = Question.objects.order_by('-pub_date')[:5]
output = ', '.join([p.question_text for p in latest_question_list])
return HttpResponse(output)
def detail(request, question_id):
return HttpResponse("You're looking at question %s." % question_id)
def results(request, question_id):
response = "You're looking at the results of question %s."
return HttpResponse(response % question_id)
def vote(request, question_id):
return HttpResponse("You're voting on question %s." % question_id)
#def html(request, question_id):
# return HttpResponse("HTML page of %s." % question_id) | from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return HttpResponse("Hello, world. You're at the polls index.")
def detail(request, question_id):
return HttpResponse("You're looking at question %s." % question_id)
def results(request, question_id):
response = "You're looking at the results of question %s."
return HttpResponse(response % question_id)
def vote(request, question_id):
return HttpResponse("You're voting on question %s." % question_id)
#def html(request, question_id):
# return HttpResponse("HTML page of %s." % question_id) | apache-2.0 | Python |
fbe1d8063774dc35765596c88d5cc7b197a8ec6d | fix for our browseable api renderer | izzyalonso/tndata_backend,tndatacommons/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend,tndatacommons/tndata_backend,izzyalonso/tndata_backend | tndata_backend/utils/api.py | tndata_backend/utils/api.py | from rest_framework import exceptions
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.throttling import BaseThrottle
from rest_framework.versioning import QueryParameterVersioning
class DefaultQueryParamVersioning(QueryParameterVersioning):
"""This class includes the default version into requests that do not
specify a version.
NOTE: this should be fixed in DRF 3.3.3 whenever it's released:
https://github.com/tomchristie/django-rest-framework/pull/3833
"""
def determine_version(self, request, *args, **kwargs):
version = request.query_params.get(
self.version_param,
self.default_version,
)
if not self.is_allowed_version(version):
raise exceptions.NotFound(self.invalid_version_message)
return version
class BrowsableAPIRendererWithoutForms(BrowsableAPIRenderer):
# NOTE: the template for the browesable api is:
# template = 'rest_framework/api.html'
def get_context(self, *args, **kwargs):
ctx = super().get_context(*args, **kwargs)
ctx['display_edit_forms'] = False
return ctx
def show_form_for_method(self, view, method, request, obj):
return False
def get_rendered_html_form(self, data, view, method, request):
"""
Return a string representing a rendered HTML form, possibly bound to
either the input or output data.
In the absence of the View having an associated form then return None.
See:
https://github.com/tomchristie/django-rest-framework/blob/3.4.4/rest_framework/renderers.py#L438
"""
return ""
class NoThrottle(BaseThrottle):
"""A throttling class to use for testing DRF api endpoints."""
def allow_request(self, request, view):
return True
| from rest_framework import exceptions
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.throttling import BaseThrottle
from rest_framework.versioning import QueryParameterVersioning
class DefaultQueryParamVersioning(QueryParameterVersioning):
"""This class includes the default version into requests that do not
specify a version.
NOTE: this should be fixed in DRF 3.3.3 whenever it's released:
https://github.com/tomchristie/django-rest-framework/pull/3833
"""
def determine_version(self, request, *args, **kwargs):
version = request.query_params.get(
self.version_param,
self.default_version,
)
if not self.is_allowed_version(version):
raise exceptions.NotFound(self.invalid_version_message)
return version
class BrowsableAPIRendererWithoutForms(BrowsableAPIRenderer):
# NOTE: the template for the browesable api is:
# template = 'rest_framework/api.html'
def get_context(self, *args, **kwargs):
ctx = super().get_context(*args, **kwargs)
ctx['display_edit_forms'] = False
return ctx
class NoThrottle(BaseThrottle):
"""A throttling class to use for testing DRF api endpoints."""
def allow_request(self, request, view):
return True
| mit | Python |
7d0fc8cf7043bfa9e7202932fccff9b629e8148a | Return by default error status code 500 | CulturePlex/pybossa,geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,inteligencia-coletiva-lsd/pybossa,geotagx/geotagx-pybossa-archive,OpenNewsLabs/pybossa,jean/pybossa,geotagx/pybossa,geotagx/geotagx-pybossa-archive,geotagx/pybossa,Scifabric/pybossa,proyectos-analizo-info/pybossa-analizo-info,jean/pybossa,proyectos-analizo-info/pybossa-analizo-info,stefanhahmann/pybossa,harihpr/tweetclickers,PyBossa/pybossa,CulturePlex/pybossa,stefanhahmann/pybossa,CulturePlex/pybossa,inteligencia-coletiva-lsd/pybossa,proyectos-analizo-info/pybossa-analizo-info,Scifabric/pybossa,geotagx/geotagx-pybossa-archive,harihpr/tweetclickers,PyBossa/pybossa,geotagx/geotagx-pybossa-archive | pybossa/error/__init__.py | pybossa/error/__init__.py | # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 500
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
| # -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2013 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""
PyBossa error module for processing error status.
This package adds GET, POST, PUT and DELETE errors for the API:
* applications,
* tasks and
* task_runs
"""
import json
from flask import Response
class ErrorStatus(object):
"""
Class for formatting error status in JSON format.
This class has the following methods:
* format_exception: returns a Flask Response with the error.
"""
error_status = {"Forbidden": 403,
"NotFound": 404,
"Unauthorized": 401,
"TypeError": 415,
"ValueError": 415,
"DataError": 415,
"AttributeError": 415,
"IntegrityError": 415,
"TooManyRequests": 429}
def format_exception(self, e, target, action):
"""
Format the exception to a valid JSON object.
Returns a Flask Response with the error.
"""
exception_cls = e.__class__.__name__
if self.error_status.get(exception_cls):
status = self.error_status.get(exception_cls)
else:
status = 200
error = dict(action=action.upper(),
status="failed",
status_code=status,
target=target,
exception_cls=exception_cls,
exception_msg=e.message)
return Response(json.dumps(error), status=status,
mimetype='application/json')
| agpl-3.0 | Python |
27d6832c6cb7aef33ff5946f018c79c243119ca9 | Fix typo in credits | usrlocalben/pydux | pydux/thunk_middleware.py | pydux/thunk_middleware.py | """
thunks for pydux
original from https://github.com/gaearon/redux-thunk
"""
def thunk_middleware(store):
dispatch, get_state = store['dispatch'], store['get_state']
def wrapper(next_):
def thunk_dispatch(action):
if hasattr(action, '__call__'):
return action(dispatch, get_state)
return next_(action)
return thunk_dispatch
return wrapper
| """
thunks for pydux
original from https://github.com/aearon/redux-thunk
"""
def thunk_middleware(store):
dispatch, get_state = store['dispatch'], store['get_state']
def wrapper(next_):
def thunk_dispatch(action):
if hasattr(action, '__call__'):
return action(dispatch, get_state)
return next_(action)
return thunk_dispatch
return wrapper
| mit | Python |
cff9d1f299bad28adcab1e4f279d045a28e319d2 | Update pylsy_test.py | gnithin/Pylsy,gnithin/Pylsy,muteness/Pylsy,muteness/Pylsy,huiyi1990/Pylsy,huiyi1990/Pylsy | pylsy/tests/pylsy_test.py | pylsy/tests/pylsy_test.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import unittest
from pylsy import pylsytable
TEST_DIR = os.path.dirname(__file__)
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes = ["name", "age"]
self.table = pylsytable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name = ["a"]
self.table.add_data("name", name)
self.table.append_data("name","b")
age = [1, 2]
self.table.add_data("age", age)
with open(os.path.join(TEST_DIR, "correct.out"), "r") as correct_file:
self.assertEqual(self.table.__str__(), correct_file.read())
if __name__ == '__main__':
unittest.main()
| # -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import unittest
from pylsy import pylsytable
TEST_DIR = os.path.dirname(__file__)
class PylsyTableTests(unittest.TestCase):
def setUp(self):
attributes = ["name", "age"]
self.table = pylsytable(attributes)
def tearDown(self):
self.table = None
def testCreateTable(self):
name = ["a"]
self.table.add_data("name", name)
self.table.append_date("name","b")
age = [1, 2]
self.table.add_data("age", age)
with open(os.path.join(TEST_DIR, "correct.out"), "r") as correct_file:
self.assertEqual(self.table.__str__(), correct_file.read())
if __name__ == '__main__':
unittest.main()
| mit | Python |
88633d0f7cbe8c41e17470ea2366acad6fbf91b5 | Add beeping | umbc-hackafe/sign-drivers,umbc-hackafe/sign-drivers,umbc-hackafe/sign-drivers | python/games/countdown.py | python/games/countdown.py | import graphics
import time
import datetime
import game
class Countdown(game.Game):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.timer = graphics.Rectangle(112, 7, x=0, y=8)
self.text = graphics.TextSprite("1:00:00.000 REMAIN", x=0, y=0, width=5, height=7)
self.end_time = datetime.datetime(2015, 4, 12, 10, 0, 0)
#self.end_time = datetime.datetime(2015, 4, 12, 9, 53, 0)
self.sprites.add(self.timer)
self.sprites.add(self.text)
self.framerate = 100
self.light = False
self.blink = False
def loop(self):
tdiff = self.end_time - datetime.datetime.now()
if (not self.light) and tdiff.seconds <= 60:
self.light = True
self.trigger("alert", "on")
if tdiff.seconds <= 0:
if tdiff.seconds % 2 and not self.blink:
self.sprites.remove(self.text)
self.trigger("beeper", "on")
self.blink = True
elif self.blink:
self.sprites.add(self.text)
self.blink = False
self.trigger("beeper", "on")
else:
h, rem = divmod(tdiff.seconds, 3600)
m, s = divmod(rem, 60)
mls = tdiff.microseconds // 1000
self.text.set_text("{:0>1}:{:0>2}:{:0>2}.{:0>3} REMAIN".format(
h, m, s, mls))
self.timer.width = max(0,int(112 * tdiff.seconds / 3600))
super().loop()
GAME = Countdown
| import graphics
import time
import datetime
import game
class Countdown(game.Game):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.timer = graphics.Rectangle(112, 7, x=0, y=8)
self.text = graphics.TextSprite("1:00:00.000 REMAIN", x=0, y=0, width=5, height=7)
self.end_time = datetime.datetime(2015, 4, 12, 10, 0, 0)
#self.end_time = datetime.datetime(2015, 4, 12, 9, 53, 0)
self.sprites.add(self.timer)
self.sprites.add(self.text)
self.framerate = 100
self.light = False
self.blink = False
def loop(self):
tdiff = self.end_time - datetime.datetime.now()
if not self.light and tdiff.seconds <= 900:
self.light = True
self.trigger("alert", "on")
if tdiff.seconds <= 0:
if tdiff.seconds % 2 and not self.blink:
self.sprites.remove(self.text)
self.sprites.add(self.timer)
self.blink = True
elif self.blink:
self.sprites.add(self.text)
self.sprites.remove(self.timer)
self.blink = False
else:
h, rem = divmod(tdiff.seconds, 3600)
m, s = divmod(rem, 60)
mls = tdiff.microseconds // 1000
self.text.set_text("{:0>1}:{:0>2}:{:0>2}.{:0>3} REMAIN".format(
h, m, s, mls))
self.timer.width = max(0,int(112 * tdiff.seconds / 3600))
super().loop()
GAME = Countdown
| mit | Python |
c54a1b05f3d443efb1bc1267b772262c087c6bfa | remove debug | firemark/quizFactory,firemark/quizFactory | quizfactory/views/game.py | quizfactory/views/game.py | from quizfactory import app
from quizfactory.models import Game
from flask import session, jsonify, request
games = {}
def get_game_from_session(quiz_id):
global games
return games[session[quiz_id]]
@app.fine_route()
def get_game(quiz_id):
try:
game = get_game_from_session(quiz_id)
except KeyError:
return jsonify(error="quiz not found"), 404
return jsonify(**game.to_json())
@app.fine_route()
def post_game(quiz_id):
global games
try:
game = get_game_from_session(quiz_id)
except KeyError:
game = Game(quiz_id)
id_game = id(game)
games[id_game] = game
session[quiz_id] = id_game
return jsonify(**game.to_json())
@app.fine_route()
def put_game(quiz_id):
try:
game = get_game_from_session(quiz_id)
except KeyError:
return jsonify(error="quiz not found"), 404
data = request.json
choice = data.get("choice")
pointer = data.get("pointer")
if pointer is not None:
game.change_pointer(pointer)
if choice is not None:
game.get_game_question().choice = choice
return jsonify(**game.to_json())
@app.fine_route()
def delete_game(quiz_id):
pass
| from quizfactory import app
from quizfactory.models import Game
from flask import session, jsonify, request
games = {}
def get_game_from_session(quiz_id):
global games
return games[session[quiz_id]]
@app.fine_route()
def get_game(quiz_id):
try:
game = get_game_from_session(quiz_id)
except KeyError:
return jsonify(error="quiz not found"), 404
return jsonify(**game.to_json())
@app.fine_route()
def post_game(quiz_id):
global games
try:
game = get_game_from_session(quiz_id)
except KeyError:
game = Game(quiz_id)
id_game = id(game)
games[id_game] = game
session[quiz_id] = id_game
return jsonify(**game.to_json())
@app.fine_route()
def put_game(quiz_id):
try:
game = get_game_from_session(quiz_id)
except KeyError:
return jsonify(error="quiz not found"), 404
data = request.json
print(data)
choice = data.get("choice")
pointer = data.get("pointer")
if pointer is not None:
game.change_pointer(pointer)
if choice is not None:
game.get_game_question().choice = choice
return jsonify(**game.to_json())
@app.fine_route()
def delete_game(quiz_id):
pass
| mit | Python |
afeba0c4c3ab49af092c7c85e026d5ee98ab5706 | work on progress | Phedorabot/phedorabot-python-sdk | phedorabot/webhook.py | phedorabot/webhook.py | #!/usr/bin/env python
#
# Copyright 2017 Phedorabot
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, with_statement
# from phedorabot.api import PhedorabotAPIClient
# from phedorabot import exceptions
import json
import types
import hmac
import hashlib
def flatten_payload(attrs, parent_key=None):
blobs = {}
if attrs is None or not type(attrs) in [types.DictType, types.ListType]:
return blobs
def value_type_check(value):
# Checks a type and returns valid
# string
if type(value) == types.StringType:
return str(value)
elif type(value) == types.UnicodeType:
value = value.encode('utf-8')
return str(value)
elif type(value) in [types.IntType, types.LongType]:
return str(value)
else:
return str(value) if type(value) == types.NoneType else ''
if type(attrs) == types.ListType:
for i,val in enumerate(attrs):
k = compute_valid_key(i, parent_key)
if type(val) in [types.DictType,types.ListType]:
children = flatten_payload(val, k)
for m1,n1 in children.iteritems():
blobs[m1] = value_type_check(n1)
else:
blobs[k] = value_type_check(val)
if type(attrs) == types.DictType:
for i,j in attrs.iteritems():
k = compute_valid_key(i, parent_key)
if type(j) in [types.DictType, types.ListType]:
children = flatten_payload(j, k)
for m1,n1 in children.iteritems():
blobs[m1] = value_type_check(n1)
return blobs
def compute_valid_key(current_key, parent_key=None):
if parent_key is not None:
return '{0}_{1}'.format(str(current_key), str(parent_key))
else:
return str(current_key)
class PhedorabotWebHookException(Exception):
# The exception error
def __init__(self, what, reason):
self.what = what or None
self.reason = reason or None
def get_what(self):
return self.what
def get_reason(self):
return self.reason
class PhedorabotWebHook(object):
def __init__(self):
# stuff goes here
self.target_key = 'api_key'
self.target_hmac_key = 'phedorabot_notification_digest'
self.response = {}
self.headers = {}
self.checksum = None
self.payload = {}
self.apiKey = None
self.apiSecret = None
self.error = None
self.errorDescription = None
self.result = None
self.body = None
self.rawHeaderDict = {}
if __name__ == '__main__':
maps = [{'name':'Christian','Age':'38'},{'name':'Vikky','age':'36'}]
print(flatten_payload(maps))
| #!/usr/bin/env python
#
# Copyright 2017 Phedorabot
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, with_statement
from phedorabot.api import PhedorabotAPIClient
from phedorabot import exceptions
import json
import types
import hmac
import hashlib
class PhedorabotWebHookException(Exception):
# The exception error
def __init__(self, what, reason):
self.what = what or None
self.reason = reason or None
def get_what(self):
return self.what
def get_reason(self):
return self.reason
class PhedorabotWebHook(object):
def __init__(self):
# stuff goes here
self.target_key = 'api_key'
self.target_hmac_key = 'phedorabot_notification_digest'
self.response = {}
self.headers = {}
self.checksum = None
self.payload = {}
self.apiKey = None
self.apiSecret = None
self.error = None
self.errorDescription = None
self.result = None
self.body = None
self.rawHeaderDict = {}
| apache-2.0 | Python |
2b7223196b59dc82649fb8a4218fb0772e39aef4 | add NullHandler in the case there is no top one specified | kratsg/optimization,kratsg/optimization,kratsg/optimization | utils.py | utils.py | import csv
import re
import logging
logger = logging.getLogger("optimize")
logger.addHandler(logging.NullHandler())
def load_mass_windows(filename):
with open(filename, 'r') as f:
return {l[0]: tuple(l[1:4]) for l in csv.reader(f, delimiter='\t')}
#@echo(write=logger.debug)
did_regex = re.compile('\.?(?:00)?(\d{6,8})\.?')
def get_did(filename):
global did_regex
m = did_regex.search(filename.split("/")[-1])
if m is None:
logger.warning('Can\'t figure out the DID! Using input filename: {0}'.format(filename))
return filename.split("/")[-1]
return m.group(1)
| import csv
import re
import logging
logger = logging.getLogger("optimize")
def load_mass_windows(filename):
with open(filename, 'r') as f:
return {l[0]: tuple(l[1:4]) for l in csv.reader(f, delimiter='\t')}
#@echo(write=logger.debug)
did_regex = re.compile('\.?(?:00)?(\d{6,8})\.?')
def get_did(filename):
global did_regex
m = did_regex.search(filename.split("/")[-1])
if m is None:
logger.warning('Can\'t figure out the DID! Using input filename: {0}'.format(filename))
return filename.split("/")[-1]
return m.group(1)
| mit | Python |
a7e757bc1aadcd09ea080b719bfd40edf13c05dd | Replace new-bootstrap with bootstrap | allevato/swift,gribozavr/swift,parkera/swift,jckarter/swift,roambotics/swift,aschwaighofer/swift,glessard/swift,rudkx/swift,harlanhaskins/swift,glessard/swift,gribozavr/swift,tkremenek/swift,apple/swift,parkera/swift,benlangmuir/swift,ahoppen/swift,rudkx/swift,xwu/swift,atrick/swift,nathawes/swift,ahoppen/swift,roambotics/swift,tkremenek/swift,xwu/swift,aschwaighofer/swift,stephentyrone/swift,benlangmuir/swift,atrick/swift,gregomni/swift,aschwaighofer/swift,nathawes/swift,CodaFi/swift,roambotics/swift,nathawes/swift,atrick/swift,jmgc/swift,aschwaighofer/swift,hooman/swift,jmgc/swift,gribozavr/swift,harlanhaskins/swift,atrick/swift,allevato/swift,hooman/swift,jmgc/swift,CodaFi/swift,ahoppen/swift,gregomni/swift,glessard/swift,rudkx/swift,airspeedswift/swift,stephentyrone/swift,jckarter/swift,apple/swift,parkera/swift,harlanhaskins/swift,glessard/swift,jmgc/swift,hooman/swift,harlanhaskins/swift,benlangmuir/swift,apple/swift,hooman/swift,jckarter/swift,allevato/swift,airspeedswift/swift,hooman/swift,aschwaighofer/swift,stephentyrone/swift,ahoppen/swift,gribozavr/swift,jckarter/swift,jckarter/swift,aschwaighofer/swift,allevato/swift,harlanhaskins/swift,ahoppen/swift,gribozavr/swift,parkera/swift,xwu/swift,JGiola/swift,aschwaighofer/swift,JGiola/swift,atrick/swift,atrick/swift,hooman/swift,roambotics/swift,JGiola/swift,ahoppen/swift,parkera/swift,rudkx/swift,nathawes/swift,glessard/swift,harlanhaskins/swift,tkremenek/swift,airspeedswift/swift,airspeedswift/swift,gregomni/swift,parkera/swift,hooman/swift,airspeedswift/swift,parkera/swift,nathawes/swift,jckarter/swift,xwu/swift,rudkx/swift,allevato/swift,CodaFi/swift,JGiola/swift,stephentyrone/swift,jckarter/swift,tkremenek/swift,roambotics/swift,tkremenek/swift,JGiola/swift,benlangmuir/swift,glessard/swift,xwu/swift,jmgc/swift,gribozavr/swift,airspeedswift/swift,nathawes/swift,CodaFi/swift,allevato/swift,gregomni/swift,CodaFi/swift,airspeedswift/swift,jmgc/swift,apple/swift,JGiola/swift,jmgc/swift,gregomni/swift,xwu/swift,gregomni/swift,apple/swift,benlangmuir/swift,allevato/swift,parkera/swift,gribozavr/swift,roambotics/swift,rudkx/swift,xwu/swift,CodaFi/swift,gribozavr/swift,stephentyrone/swift,stephentyrone/swift,tkremenek/swift,benlangmuir/swift,nathawes/swift,harlanhaskins/swift,stephentyrone/swift,tkremenek/swift,apple/swift,CodaFi/swift | utils/swift_build_support/swift_build_support/products/swiftpm.py | utils/swift_build_support/swift_build_support/products/swiftpm.py | # swift_build_support/products/swiftpm.py -----------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2019 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
import os
from . import product
from .. import shell
class SwiftPM(product.Product):
@classmethod
def product_source_name(cls):
return "swiftpm"
@classmethod
def is_build_script_impl_product(cls):
return False
def should_build(self, host_target):
return True
def run_bootstrap_script(self, action, host_target, additional_params=[]):
script_path = os.path.join(
self.source_dir, 'Utilities', 'bootstrap')
toolchain_path = self.install_toolchain_path()
swiftc = os.path.join(toolchain_path, "usr", "bin", "swiftc")
# FIXME: We require llbuild build directory in order to build. Is
# there a better way to get this?
build_root = os.path.dirname(self.build_dir)
llbuild_build_dir = os.path.join(
build_root, '%s-%s' % ("llbuild", host_target))
helper_cmd = [script_path, action]
if self.is_release():
helper_cmd.append("--release")
helper_cmd += [
"--swiftc-path", swiftc,
"--clang-path", self.toolchain.cc,
"--cmake-path", self.toolchain.cmake,
"--ninja-path", self.toolchain.ninja,
"--build-dir", self.build_dir,
"--llbuild-build-dir", llbuild_build_dir
]
helper_cmd.extend(additional_params)
shell.call(helper_cmd)
def build(self, host_target):
self.run_bootstrap_script('build', host_target)
def should_test(self, host_target):
return self.args.test_swiftpm
def test(self, host_target):
self.run_bootstrap_script('test', host_target)
def should_install(self, host_target):
return self.args.install_swiftpm
def install(self, host_target):
install_prefix = self.args.install_destdir + self.args.install_prefix
self.run_bootstrap_script('install', host_target, [
'--prefix', install_prefix
])
| # swift_build_support/products/swiftpm.py -----------------------*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2019 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See https://swift.org/LICENSE.txt for license information
# See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
import os
from . import product
from .. import shell
class SwiftPM(product.Product):
@classmethod
def product_source_name(cls):
return "swiftpm"
@classmethod
def is_build_script_impl_product(cls):
return False
def should_build(self, host_target):
return True
def run_bootstrap_script(self, action, host_target, additional_params=[]):
script_path = os.path.join(
self.source_dir, 'Utilities', 'new-bootstrap')
toolchain_path = self.install_toolchain_path()
swiftc = os.path.join(toolchain_path, "usr", "bin", "swiftc")
# FIXME: We require llbuild build directory in order to build. Is
# there a better way to get this?
build_root = os.path.dirname(self.build_dir)
llbuild_build_dir = os.path.join(
build_root, '%s-%s' % ("llbuild", host_target))
helper_cmd = [script_path, action]
if self.is_release():
helper_cmd.append("--release")
helper_cmd += [
"--swiftc-path", swiftc,
"--clang-path", self.toolchain.cc,
"--cmake-path", self.toolchain.cmake,
"--ninja-path", self.toolchain.ninja,
"--build-dir", self.build_dir,
"--llbuild-build-dir", llbuild_build_dir
]
helper_cmd.extend(additional_params)
shell.call(helper_cmd)
def build(self, host_target):
self.run_bootstrap_script('build', host_target)
def should_test(self, host_target):
return self.args.test_swiftpm
def test(self, host_target):
self.run_bootstrap_script('test', host_target)
def should_install(self, host_target):
return self.args.install_swiftpm
def install(self, host_target):
install_prefix = self.args.install_destdir + self.args.install_prefix
self.run_bootstrap_script('install', host_target, [
'--prefix', install_prefix
])
| apache-2.0 | Python |
3fc05761dde38927924ec0bbee637d3fa917538b | add method _to_str(value) --> string | rlowrance/python_lib,rlowrance/python_lib | HpSpec.py | HpSpec.py | from abc import ABCMeta, abstractmethod
import unittest
class HpSpec(object):
'specification of a model name and its associated hyperparamters'
__metaclass__ = ABCMeta
@abstractmethod
def __str__(self):
'return parsable string representation'
pass
@staticmethod
@abstractmethod
def make_from_str(s):
'parse the representation returned by str(s) to create an instance'
pass
@abstractmethod
def iteritems(self):
'yield each (hyparameter name:str, hyperparameter value)'
pass
@abstractmethod
def __eq__(self, other):
pass
@abstractmethod
def __hash__(self):
pass
@abstractmethod
def __lt__(self, other):
pass
def _to_str(self, value):
def remove_trailing_zeroes(s):
return (
s if s[-1] != '0' else
remove_trailing_zeroes(s[:-1])
)
if value is None:
return ''
elif isinstance(value, float):
return remove_trailing_zeroes(('%f' % value).replace('.', '_'))
elif isinstance(value, int):
return '%d' % value
else:
return str(value)
class TestHpSpeC(unittest.TestCase):
def test_construction(self):
self.assertRaises(Exception, HpSpec, None)
if __name__ == '__main__':
unittest.main()
| from abc import ABCMeta, abstractmethod
import unittest
class HpSpec(object):
'specification of a model name and its associated hyperparamters'
__metaclass__ = ABCMeta
@abstractmethod
def __str__(self):
'return parsable string representation'
pass
@staticmethod
@abstractmethod
def make_from_str(s):
'parse the representation returned by str(s) to create an instance'
pass
@abstractmethod
def iteritems(self):
'yield each (hyparameter name:str, hyperparameter value)'
pass
@abstractmethod
def __eq__(self, other):
pass
@abstractmethod
def __hash__(self):
pass
@abstractmethod
def __lt__(self, other):
pass
class TestHpSpeC(unittest.TestCase):
def test_construction(self):
self.assertRaises(Exception, HpSpec, None)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | Python |
e14e6fbbd2bc5c262f6f7df0df5732edff8d7de6 | add new version (#22836) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/py-importlib-metadata/package.py | var/spack/repos/builtin/packages/py-importlib-metadata/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyImportlibMetadata(PythonPackage):
"""Read metadata from Python packages."""
homepage = "https://importlib-metadata.readthedocs.io/"
pypi = "importlib_metadata/importlib_metadata-1.2.0.tar.gz"
version('3.10.0', sha256='c9db46394197244adf2f0b08ec5bc3cf16757e9590b02af1fca085c16c0d600a')
version('2.0.0', sha256='77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da')
version('1.2.0', sha256='41e688146d000891f32b1669e8573c57e39e5060e7f5f647aa617cd9a9568278')
version('0.23', sha256='aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26')
version('0.19', sha256='23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8')
version('0.18', sha256='cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db')
depends_on('python@3.6:', type=('build', 'run'), when='@3:')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('py-setuptools-scm@3.4.1:+toml', type='build', when='@3:')
depends_on('py-zipp@0.5:', type=('build', 'run'))
depends_on('py-pathlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-contextlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-configparser@3.5:', when='^python@:2', type=('build', 'run'))
depends_on('py-typing-extensions@3.6.4:', type=('build', 'run'), when='@3: ^python@:3.7.999')
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyImportlibMetadata(PythonPackage):
"""Read metadata from Python packages."""
homepage = "https://importlib-metadata.readthedocs.io/"
pypi = "importlib_metadata/importlib_metadata-1.2.0.tar.gz"
version('2.0.0', sha256='77a540690e24b0305878c37ffd421785a6f7e53c8b5720d211b211de8d0e95da')
version('1.2.0', sha256='41e688146d000891f32b1669e8573c57e39e5060e7f5f647aa617cd9a9568278')
version('0.23', sha256='aa18d7378b00b40847790e7c27e11673d7fed219354109d0e7b9e5b25dc3ad26')
version('0.19', sha256='23d3d873e008a513952355379d93cbcab874c58f4f034ff657c7a87422fa64e8')
version('0.18', sha256='cb6ee23b46173539939964df59d3d72c3e0c1b5d54b84f1d8a7e912fe43612db')
depends_on('python@2.7:2.8,3.5:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('py-setuptools-scm', type='build')
depends_on('py-zipp@0.5:', type=('build', 'run'))
depends_on('py-pathlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-contextlib2', when='^python@:2', type=('build', 'run'))
depends_on('py-configparser@3.5:', when='^python@:2', type=('build', 'run'))
| lgpl-2.1 | Python |
d02ed340c275ccd008645b280716aa0d33067022 | Add cleanup for measurements that use tracing | SummerLW/Perf-Insight-Report,benschmaus/catapult,benschmaus/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,catapult-project/catapult-csm,sahiljain/catapult,sahiljain/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,catapult-project/catapult,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult-csm | telemetry/telemetry/page/page_measurement_unittest_base.py | telemetry/telemetry/page/page_measurement_unittest_base.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import util
from telemetry.page import page_runner
from telemetry.page import page as page_module
from telemetry.page import page_set
from telemetry.page import test_expectations
from telemetry.unittest import options_for_unittests
class PageMeasurementUnitTestBase(unittest.TestCase):
"""unittest.TestCase-derived class to help in the construction of unit tests
for a measurement."""
def CreatePageSetFromFileInUnittestDataDir(self, test_filename):
return self.CreatePageSet('file://' + test_filename)
def CreatePageSet(self, test_filename):
base_dir = util.GetUnittestDataDir()
ps = page_set.PageSet(file_path=base_dir)
page = page_module.Page(test_filename, ps, base_dir=base_dir)
setattr(page, 'RunSmoothness', {'action': 'scroll'})
setattr(page, 'RunRepaint',
{ "action": "repaint_continuously", "seconds": 2 })
ps.pages.append(page)
return ps
def RunMeasurement(self, measurement, ps,
expectations=test_expectations.TestExpectations(),
options=None):
"""Runs a measurement against a pageset, returning the rows its outputs."""
if options is None:
options = options_for_unittests.GetCopy()
assert options
temp_parser = options.CreateParser()
page_runner.AddCommandLineArgs(temp_parser)
measurement.AddCommandLineArgs(temp_parser)
measurement.SetArgumentDefaults(temp_parser)
defaults = temp_parser.get_default_values()
for k, v in defaults.__dict__.items():
if hasattr(options, k):
continue
setattr(options, k, v)
measurement.CustomizeBrowserOptions(options)
options.output_file = None
options.output_format = 'none'
options.output_trace_tag = None
page_runner.ProcessCommandLineArgs(temp_parser, options)
measurement.ProcessCommandLineArgs(temp_parser, options)
return page_runner.Run(measurement, ps, expectations, options)
def TestTracingCleanedUp(self, measurement_class, options=None):
ps = self.CreatePageSetFromFileInUnittestDataDir('blank.html')
start_tracing_called = [False]
stop_tracing_called = [False]
class BuggyMeasurement(measurement_class):
def __init__(self, *args, **kwargs):
measurement_class.__init__(self, *args, **kwargs)
# Inject fake tracing methods to browser
def TabForPage(self, page, browser):
ActualStartTracing = browser.StartTracing
def FakeStartTracing(*args, **kwargs):
ActualStartTracing(*args, **kwargs)
start_tracing_called[0] = True
raise Exception('Intentional exception')
browser.StartTracing = FakeStartTracing
ActualStopTracing = browser.StopTracing
def FakeStopTracing(*args, **kwargs):
ActualStopTracing(*args, **kwargs)
stop_tracing_called[0] = True
browser.StopTracing = FakeStopTracing
return measurement_class.TabForPage(self, page, browser)
measurement = BuggyMeasurement()
self.RunMeasurement(measurement, ps, options=options)
if start_tracing_called[0]:
self.assertTrue(stop_tracing_called[0])
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from telemetry.core import util
from telemetry.page import page_runner
from telemetry.page import page as page_module
from telemetry.page import page_set
from telemetry.page import test_expectations
from telemetry.unittest import options_for_unittests
class PageMeasurementUnitTestBase(unittest.TestCase):
"""unittest.TestCase-derived class to help in the construction of unit tests
for a measurement."""
def CreatePageSetFromFileInUnittestDataDir(self, test_filename):
return self.CreatePageSet('file://' + test_filename)
def CreatePageSet(self, test_filename):
base_dir = util.GetUnittestDataDir()
ps = page_set.PageSet(file_path=base_dir)
page = page_module.Page(test_filename, ps, base_dir=base_dir)
setattr(page, 'RunSmoothness', {'action': 'scroll'})
setattr(page, 'RunRepaint',
{ "action": "repaint_continuously", "seconds": 2 })
ps.pages.append(page)
return ps
def RunMeasurement(self, measurement, ps,
expectations=test_expectations.TestExpectations(),
options=None):
"""Runs a measurement against a pageset, returning the rows its outputs."""
if options is None:
options = options_for_unittests.GetCopy()
assert options
temp_parser = options.CreateParser()
page_runner.AddCommandLineArgs(temp_parser)
measurement.AddCommandLineArgs(temp_parser)
measurement.SetArgumentDefaults(temp_parser)
defaults = temp_parser.get_default_values()
for k, v in defaults.__dict__.items():
if hasattr(options, k):
continue
setattr(options, k, v)
measurement.CustomizeBrowserOptions(options)
options.output_file = None
options.output_format = 'none'
options.output_trace_tag = None
page_runner.ProcessCommandLineArgs(temp_parser, options)
measurement.ProcessCommandLineArgs(temp_parser, options)
return page_runner.Run(measurement, ps, expectations, options)
| bsd-3-clause | Python |
32a2a256eb792b3e7e5501e0f41e117cff8c1fdb | Fix ranks in CTFTime scoreboard feed | fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver,fausecteam/ctf-gameserver | web/ctf_gameserver/web/scoring/views.py | web/ctf_gameserver/web/scoring/views.py | from django.http import JsonResponse
from django.shortcuts import render
from . import models, calculations
from .decorators import competition_started_required
@competition_started_required
def scoreboard(request):
game_control = models.GameControl.objects.get()
if game_control.competition_over():
to_tick = game_control.current_tick
else:
to_tick = game_control.current_tick - 1
scores = calculations.scores()
statuses = calculations.team_statuses(to_tick, to_tick)
return render(request, 'scoreboard.html', {
'scores': scores,
'services': models.Service.objects.all(),
'statuses': statuses,
'tick': to_tick
})
def scoreboard_json(request):
"""
View which returns the scoreboard in CTFTime scoreboard feed format,
see https://ctftime.org/json-scoreboard-feed.
"""
game_control = models.GameControl.objects.get()
if not game_control.competition_running() and not game_control.competition_over():
return JsonResponse({'error': 'Scoreboard is not available yet'}, status=404)
tasks = ['Offense', 'Defense', 'SLA']
standings = []
scores = calculations.scores()
for rank, (team, points) in enumerate(scores.items(), start=1):
standings.append({
'pos': rank,
'team': team.user.username,
'score': points['total'],
'taskStats': {
'Offense': {'points': points['offense'][1]},
'Defense': {'points': points['defense'][1]},
'SLA': {'points': points['sla'][1]}
}
})
return JsonResponse({'tasks': tasks, 'standings': standings})
@competition_started_required
def service_status(request):
game_control = models.GameControl.objects.get()
to_tick = game_control.current_tick
from_tick = to_tick - 4
if from_tick < 0:
from_tick = 0
statuses = calculations.team_statuses(from_tick, to_tick)
return render(request, 'service_status.html', {
'statuses': statuses,
'ticks': range(from_tick, to_tick+1),
'services': models.Service.objects.all().order_by('name')
})
| from django.http import JsonResponse
from django.shortcuts import render
from . import models, calculations
from .decorators import competition_started_required
@competition_started_required
def scoreboard(request):
game_control = models.GameControl.objects.get()
if game_control.competition_over():
to_tick = game_control.current_tick
else:
to_tick = game_control.current_tick - 1
scores = calculations.scores()
statuses = calculations.team_statuses(to_tick, to_tick)
return render(request, 'scoreboard.html', {
'scores': scores,
'services': models.Service.objects.all(),
'statuses': statuses,
'tick': to_tick
})
def scoreboard_json(request):
"""
View which returns the scoreboard in CTFTime scoreboard feed format,
see https://ctftime.org/json-scoreboard-feed.
"""
game_control = models.GameControl.objects.get()
if not game_control.competition_running() and not game_control.competition_over():
return JsonResponse({'error': 'Scoreboard is not available yet'}, status=404)
tasks = ['Offense', 'Defense', 'SLA']
standings = []
scores = calculations.scores()
for rank, (team, points) in enumerate(scores.items()):
standings.append({
'pos': rank,
'team': team.user.username,
'score': points['total'],
'taskStats': {
'Offense': {'points': points['offense'][1]},
'Defense': {'points': points['defense'][1]},
'SLA': {'points': points['sla'][1]}
}
})
return JsonResponse({'tasks': tasks, 'standings': standings})
@competition_started_required
def service_status(request):
game_control = models.GameControl.objects.get()
to_tick = game_control.current_tick
from_tick = to_tick - 4
if from_tick < 0:
from_tick = 0
statuses = calculations.team_statuses(from_tick, to_tick)
return render(request, 'service_status.html', {
'statuses': statuses,
'ticks': range(from_tick, to_tick+1),
'services': models.Service.objects.all().order_by('name')
})
| isc | Python |
8450580b6d79979b015f8fd61cef71d909bc3db0 | correct url | edx/edx-notifications,edx/edx-notifications,edx/edx-notifications,edx/edx-notifications | testserver/test/acceptance/pages/__init__.py | testserver/test/acceptance/pages/__init__.py | base_url = 'http://127.0.0.1:8000/'
user_name = 'testuser1'
user_email = 'testuser1@edx.org'
password = 'ARbi12.,' | base_url = 'http://71c52982.ngrok.com'
user_name = 'testuser3'
user_email = 'testuser1@edx.org'
password = 'ARbi12.,' | agpl-3.0 | Python |
266aecd7e1cbca4a9add43fbaa53e0af5c6ab400 | Reformat score table | amalshehu/exercism-python | scrabble-score/scrabble_score.py | scrabble-score/scrabble_score.py | # File: scrabble_score.py
# Purpose: Write a program that, given a word, computes the scrabble score for that word.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Tuesday 27 September 2016, 01:27 PM
score_table = {
'a': 1, 'b': 3, 'c': 3, 'd': 2, 'e': 1,
'f': 4, 'g': 2, 'h': 4, 'i': 1, 'j': 8,
'k': 5, 'l': 1, 'm': 3, 'n': 1, 'o': 1,
'p': 3, 'q': 10, 'r': 1, 's': 1, 't': 1,
'u': 1, 'v': 4, 'w': 4, 'x': 8, 'y': 4,
'z': 10
}
| # File: sscrabble_score.py
# Purpose: Write a program that, given a word, computes the scrabble score for that word.
# Programmer: Amal Shehu
# Course: Exercism
# Date: Tuesday 27 September 2016, 01:27 PM
score_table = {
1: "A, E, I, O, U, L, N, R, S, T",
2: "D, G",
3: "B, C, M, P",
4: "F, H, V, W, Y",
5: "K",
8: "J, X",
10: "Q, Z"
}
| mit | Python |
8a75cc4626bd38faeec102aea894d4e7ac08646c | Update description of collection viewer example | almarklein/scikit-image,juliusbierk/scikit-image,paalge/scikit-image,Hiyorimi/scikit-image,chriscrosscutler/scikit-image,oew1v07/scikit-image,bennlich/scikit-image,paalge/scikit-image,warmspringwinds/scikit-image,chriscrosscutler/scikit-image,bennlich/scikit-image,warmspringwinds/scikit-image,rjeli/scikit-image,vighneshbirodkar/scikit-image,ajaybhat/scikit-image,emon10005/scikit-image,pratapvardhan/scikit-image,newville/scikit-image,chintak/scikit-image,SamHames/scikit-image,emon10005/scikit-image,rjeli/scikit-image,SamHames/scikit-image,SamHames/scikit-image,Britefury/scikit-image,WarrenWeckesser/scikits-image,ClinicalGraphics/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,jwiggins/scikit-image,bsipocz/scikit-image,michaelpacer/scikit-image,blink1073/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,ofgulban/scikit-image,ClinicalGraphics/scikit-image,oew1v07/scikit-image,dpshelio/scikit-image,michaelpacer/scikit-image,bsipocz/scikit-image,vighneshbirodkar/scikit-image,Midafi/scikit-image,newville/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,paalge/scikit-image,rjeli/scikit-image,keflavich/scikit-image,blink1073/scikit-image,almarklein/scikit-image,Midafi/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,michaelaye/scikit-image,chintak/scikit-image,ajaybhat/scikit-image,ofgulban/scikit-image,SamHames/scikit-image,Britefury/scikit-image,robintw/scikit-image,GaZ3ll3/scikit-image,youprofit/scikit-image,chintak/scikit-image,almarklein/scikit-image,youprofit/scikit-image,Hiyorimi/scikit-image,keflavich/scikit-image,juliusbierk/scikit-image,GaZ3ll3/scikit-image,robintw/scikit-image,almarklein/scikit-image | viewer_examples/viewers/collection_viewer.py | viewer_examples/viewers/collection_viewer.py | """
=====================
CollectionViewer demo
=====================
Demo of CollectionViewer for viewing collections of images. This demo uses
the different layers of the gaussian pyramid as image collection.
You can scroll through images with the slider, or you can interact with the
viewer using your keyboard:
left/right arrows
Previous/next image in collection.
number keys, 0--9
0% to 90% of collection. For example, "5" goes to the image in the
middle (i.e. 50%) of the collection.
home/end keys
First/last image in collection.
"""
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
from skimage.transform import build_gaussian_pyramid
img = data.lena()
img_collection = tuple(build_gaussian_pyramid(img))
view = CollectionViewer(img_collection)
view.show()
| """
=====================
CollectionViewer demo
=====================
Demo of CollectionViewer for viewing collections of images. This demo uses
successively darker versions of the same image to fake an image collection.
You can scroll through images with the slider, or you can interact with the
viewer using your keyboard:
left/right arrows
Previous/next image in collection.
number keys, 0--9
0% to 90% of collection. For example, "5" goes to the image in the
middle (i.e. 50%) of the collection.
home/end keys
First/last image in collection.
"""
import numpy as np
from skimage import data
from skimage.viewer import CollectionViewer
from skimage.transform import build_gaussian_pyramid
img = data.lena()
img_collection = tuple(build_gaussian_pyramid(img))
view = CollectionViewer(img_collection)
view.show()
| bsd-3-clause | Python |
023e60ccca10fd7149a5af20aa14623dc0855fe2 | Remove comma | GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples,GoogleCloudPlatform/python-docs-samples | vision/snippets/face_detection/faces_test.py | vision/snippets/face_detection/faces_test.py | # Copyright 2016 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from PIL import Image
from faces import main
RESOURCES = os.path.join(os.path.dirname(__file__), 'resources')
def test_main(tmpdir):
out_file = os.path.join(tmpdir.dirname, 'face-output.jpg')
in_file = os.path.join(RESOURCES, 'face-input.jpg')
# Make sure there isn't already a green box
im = Image.open(in_file)
pixels = im.getdata()
greens = sum(1 for (r, g, b) in pixels if r == 0 and g == 255 and b == 0)
assert greens < 1
main(in_file, out_file, 10)
# Make sure there now is some green drawn
im = Image.open(out_file)
pixels = im.getdata()
greens = sum(1 for (r, g, b) in pixels if r == 0 and g == 255 and b == 0)
assert greens > 10
| # Copyright 2016, Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from PIL import Image
from faces import main
RESOURCES = os.path.join(os.path.dirname(__file__), 'resources')
def test_main(tmpdir):
out_file = os.path.join(tmpdir.dirname, 'face-output.jpg')
in_file = os.path.join(RESOURCES, 'face-input.jpg')
# Make sure there isn't already a green box
im = Image.open(in_file)
pixels = im.getdata()
greens = sum(1 for (r, g, b) in pixels if r == 0 and g == 255 and b == 0)
assert greens < 1
main(in_file, out_file, 10)
# Make sure there now is some green drawn
im = Image.open(out_file)
pixels = im.getdata()
greens = sum(1 for (r, g, b) in pixels if r == 0 and g == 255 and b == 0)
assert greens > 10
| apache-2.0 | Python |
f93fcd5cee878c201dd1be2102a2a9433a63c4b5 | Make streamable artist updates as they happen, rather than commiting at the end of all artists | foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm,foocorp/gnu-fm | scripts/set-artist-streamable.py | scripts/set-artist-streamable.py | #!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
self.conn.commit()
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
| #!/usr/bin/env python
import psycopg2 as ordbms
import urllib, urllib2
import xml.etree.cElementTree as ElementTree
class SetArtistStreamable:
def __init__(self):
self.conn = ordbms.connect ("dbname='librefm'")
self.cursor = self.conn.cursor()
def updateAll(self):
"""Sets artists streamable property if they have streamable tracks already in the database"""
self.cursor.execute("SELECT DISTINCT(artist.name) FROM artist INNER JOIN track on artist.name=artist_name WHERE track.streamable = 1")
for artist in self.cursor.fetchall():
name = artist[0]
print "marking %s as streamable... " % name
self.cursor.execute("UPDATE artist SET streamable = 1 WHERE name = %s", (name,))
print "Applying changes... ",
self.conn.commit()
print "done."
if __name__ == '__main__':
sas = SetArtistStreamable()
sas.updateAll()
| agpl-3.0 | Python |
564d2eedf6e2b62152869c60bf1f3ba18287d8c0 | Add extra tag which displays the occurrence duration in a smart way | jonge-democraten/mezzanine-fullcalendar | fullcalendar/templatetags/fullcalendar.py | fullcalendar/templatetags/fullcalendar.py | from django import template
from django.utils import timezone
from fullcalendar.models import Occurrence
register = template.Library()
@register.inclusion_tag('events/agenda_tag.html')
def show_agenda(*args, **kwargs):
qs = Occurrence.objects.upcoming()
if 'limit' in kwargs:
qs = qs[:int(kwargs['limit'])]
return {
'occurrences': qs,
'all_sites': True,
}
@register.assignment_tag
def get_agenda(*args, **kwargs):
qs = Occurrence.site_related.upcoming()
if 'limit' in kwargs:
return qs[:int(kwargs['limit'])]
return qs
@register.inclusion_tag('events/agenda_tag.html')
def show_site_agenda(*args, **kwargs):
qs = Occurrence.site_related.upcoming()
if 'limit' in kwargs:
qs = qs[:int(kwargs['limit'])]
return {
'occurrences': qs
}
@register.assignment_tag
def get_site_agenda(*args, **kwargs):
qs = Occurrence.site_related.upcoming()
if 'limit' in kwargs:
return qs[:int(kwargs['limit'])]
return qs
@register.simple_tag
def occurrence_duration(occurrence):
start = timezone.localtime(occurrence.start_time)
end = timezone.localtime(occurrence.end_time)
result = start.strftime('%A, %d %B %Y %H:%M')
if (start.day == end.day and start.month == end.month and
start.year == end.year):
result += ' - {:%H:%M}'.format(end)
else:
result += ' - {:%A, %d %B %Y %H:%M}'.format(end)
return result
| from django import template
from fullcalendar.models import Occurrence
register = template.Library()
@register.inclusion_tag('events/agenda_tag.html')
def show_agenda(*args, **kwargs):
qs = Occurrence.objects.upcoming()
if 'limit' in kwargs:
qs = qs[:int(kwargs['limit'])]
return {
'occurrences': qs,
'all_sites': True,
}
@register.assignment_tag
def get_agenda(*args, **kwargs):
qs = Occurrence.site_related.upcoming()
if 'limit' in kwargs:
return qs[:int(kwargs['limit'])]
return qs
@register.inclusion_tag('events/agenda_tag.html')
def show_site_agenda(*args, **kwargs):
qs = Occurrence.site_related.upcoming()
if 'limit' in kwargs:
qs = qs[:int(kwargs['limit'])]
return {
'occurrences': qs
}
@register.assignment_tag
def get_site_agenda(*args, **kwargs):
qs = Occurrence.site_related.upcoming()
if 'limit' in kwargs:
return qs[:int(kwargs['limit'])]
return qs
| mit | Python |
c0af8c4e598b4ae3a8c260048c8468186af42009 | switch back to lab.kmol.info | kmolab/kmolab.github.io,kmolab/kmolab.github.io,kmolab/kmolab.github.io,kmolab/kmolab.github.io | publishconf.py | publishconf.py | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
# 因為 publishconf.py 在 pelicanconf.py 之後, 因此若兩處有相同變數的設定, 將以較後讀入的 publishconf.py 中的設定為主.
# 將所有靜態 html 檔案移到 blog 子目錄
# for github pages under kmolab at github
SITEURL = 'http://lab.kmol.info/blog'
# for fossil scm under 53
#SITEURL = 'https://kmol.info/lab/doc/trunk/blog'
# 此設定用於將資料送到 gh-pages, 因此使用絕對 URL 設定
RELATIVE_URLS = False
THEME = 'theme/attila'
BOOTSTRAP_THEME = 'united'
COLOR_SCHEME_CSS = 'tomorrow_night.css'
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "kmolab-github"
DISQUS_DISPLAY_COUNTS = False
#GOOGLE_ANALYTICS = ""
# 設定網誌以 md 檔案建立的 file system date 為準, 無需自行設定
DEFAULT_DATE = 'fs'
# 遠端的 code hightlight
#MD_EXTENSIONS = ['fenced_code', 'extra', 'codehilite(linenums=True)']
MARKDOWN = {
'extension_configs': {
'markdown.extensions.codehilite': {'css_class': 'highlight'},
'markdown.extensions.extra': {},
'markdown.extensions.meta': {},
},
'output_format': 'html5',
}
# 若要依照日期存檔呼叫
#ARTICLE_URL = 'posts/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
#ARTICLE_SAVE_AS = 'posts/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html'
PAGE_URL = 'pages/{slug}/'
PAGE_SAVE_AS = 'pages/{slug}/index.html'
SHOW_ARTICLE_AUTHOR = True | #!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
# This file is only used if you use `make publish` or
# explicitly specify it as your config file.
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
# 因為 publishconf.py 在 pelicanconf.py 之後, 因此若兩處有相同變數的設定, 將以較後讀入的 publishconf.py 中的設定為主.
# 將所有靜態 html 檔案移到 blog 子目錄
# for github pages under kmolab at github
SITEURL = 'https://kmol.info/lab/doc/trunk/blog'
# for fossil scm under 53
#SITEURL = 'https://kmol.info/lab/doc/trunk/blog'
# 此設定用於將資料送到 gh-pages, 因此使用絕對 URL 設定
RELATIVE_URLS = False
THEME = 'theme/attila'
BOOTSTRAP_THEME = 'united'
COLOR_SCHEME_CSS = 'tomorrow_night.css'
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = "kmolab-github"
DISQUS_DISPLAY_COUNTS = False
#GOOGLE_ANALYTICS = ""
# 設定網誌以 md 檔案建立的 file system date 為準, 無需自行設定
DEFAULT_DATE = 'fs'
# 遠端的 code hightlight
#MD_EXTENSIONS = ['fenced_code', 'extra', 'codehilite(linenums=True)']
MARKDOWN = {
'extension_configs': {
'markdown.extensions.codehilite': {'css_class': 'highlight'},
'markdown.extensions.extra': {},
'markdown.extensions.meta': {},
},
'output_format': 'html5',
}
# 若要依照日期存檔呼叫
#ARTICLE_URL = 'posts/{date:%Y}/{date:%m}/{date:%d}/{slug}/'
#ARTICLE_SAVE_AS = 'posts/{date:%Y}/{date:%m}/{date:%d}/{slug}/index.html'
PAGE_URL = 'pages/{slug}/'
PAGE_SAVE_AS = 'pages/{slug}/index.html'
SHOW_ARTICLE_AUTHOR = True | agpl-3.0 | Python |
5af8b02d8603500481af03483a2a3debeb01e586 | Update models.py | sterlingbaldwin/acme_workbench,sterlingbaldwin/acme_workbench,sterlingbaldwin/acme_workbench,sterlingbaldwin/acme_workbench,sterlingbaldwin/acme_workbench | workbench-backend/index/models.py | workbench-backend/index/models.py | from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from enum import Enum
class DataType(Enum):
"""
Enumeration of valid file types
"""
pass
# Leaving this blank until enum values are determined.
#NC=1
#DAT=2
#JSON=3
class BaseModel(models.Model):
"""
A simple base model to subclass from when we want to keep track of create and modify dates
"""
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now_add=True)
class Meta:
"""
Declare this class abstract
"""
abstract = True
# Create your models here.
class Notification(models.Model):
user = models.CharField(max_length=100)
notification_list = models.TextField()
class DataFile(BaseModel):
"""
Model of data stored by the users
Inherit date fields from BaseModel
"""
path = models.CharField(max_length=255)
display_name = models.CharField(max_length=255)
owner = models.ForeignKey(User)
allowed_access = models.ManyToManyField(User)
data_type = models.IntegerField(DataType)
| from __future__ import unicode_literals
from django.db import models
from django.contrib.auth.models import User
from enum import Enum
class DataType(Enum):
"""
Enumeration of valid file types
"""
pass
# Leaving this blank until enum values are determined.
#NC=1
#DAT=2
#JSON=3
class BaseModel(models.Model):
"""
A simple base model to subclass from when we want to keep track of create and modify dates
"""
created_date = models.DateTimeField(auto_now_add=True)
modified_date = models.DateTimeField(auto_now_add=True)
class Meta:
"""
Declare this class abstract
"""
abstract = True
# Create your models here.
class Notification(models.Model):
user = models.CharField(max_length=100)
notification_list = models.TextField()
class DataFile(BaseModel):
"""
Model of data stored by the users
Inherit date fields from BaseModel
"""
path = models.CharField(max_length=255)
display_name = models.CharField(max_length=255)
owner = models.ForeignKey('User')
allowed_access = models.ManyToManyField(User)
data_type = models.IntegerField(DataType) | bsd-2-clause | Python |
52375daaced4df191b6c253458d47e13f76c00a4 | update comment in TableFileNameComposer | alphatwirl/alphatwirl,alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl | AlphaTwirl/Configure/TableFileNameComposer.py | AlphaTwirl/Configure/TableFileNameComposer.py | # Tai Sakuma <tai.sakuma@cern.ch>
##__________________________________________________________________||
class TableFileNameComposer(object):
"""Compose a name of a file to store the table from the column names and indices.
For example, if column names are 'var1', 'var2', and 'var3' and
indices are 1, None and 2, the file name will be
'tbl_component_var1_1_var2_var3_2.txt'
"""
def __call__(self, columnNames, indices, prefix = 'tbl_n_component', suffix = '.txt'):
# for example, if columnNames = ('var1', 'var2', 'var3') and indices = (1, None, 2),
# l will be ['var1', '1', 'var2', 'var3', '2']
if indices is not None:
indices = [None if i == '*' else i for i in indices]
indices = [None if i == '(*)' else i for i in indices]
indices = [None if isinstance(i, basestring) and i.startswith('\\') else i for i in indices]
l = columnNames if indices is None else [str(e) for sublist in zip(columnNames, indices) for e in sublist if e is not None]
if l:
ret = prefix + '_' + '_'.join(l) + suffix # e.g. "tbl_n_component_var1_1_var2_var3_2.txt"
else:
ret = prefix + suffix # e.g. "tbl_n_component.txt"
return ret
##__________________________________________________________________||
| # Tai Sakuma <tai.sakuma@cern.ch>
##__________________________________________________________________||
class TableFileNameComposer(object):
"""Compose a name of a file to store the table from the column names and indices.
For example, if column names are 'var1', 'var2', and 'var3' and
indices are 1, None and 2, the file name will be
'tbl_component_var1_1_var2_var3_2.txt'
"""
def __call__(self, columnNames, indices, prefix = 'tbl_n_component', suffix = '.txt'):
# for example, if columnNames = ('var1', 'var2', 'var3') and indices = (1, None, 2),
# l will be ['var1', '1', 'var2', 'var3', '2']
if indices is not None:
indices = [None if i == '*' else i for i in indices]
indices = [None if i == '(*)' else i for i in indices]
indices = [None if isinstance(i, basestring) and i.startswith('\\') else i for i in indices]
l = columnNames if indices is None else [str(e) for sublist in zip(columnNames, indices) for e in sublist if e is not None]
if l:
ret = prefix + '_' + '_'.join(l) + suffix # e.g. "tbl_component_var1_1_var2_var3_2.txt"
else:
ret = prefix + suffix # e.g. "tbl_component.txt"
return ret
##__________________________________________________________________||
| bsd-3-clause | Python |
c59ea54efef2fcd1763988a535f3a265935b3afe | Mask in float format. | openconnectome/m2g,neurodata/ndgrutedb,openconnectome/m2g,openconnectome/m2g,neurodata/ndgrutedb,neurodata/ndgrutedb,openconnectome/m2g,openconnectome/m2g,neurodata/ndgrutedb,neurodata/ndgrutedb,openconnectome/m2g,openconnectome/m2g,neurodata/ndgrutedb,neurodata/ndmg,neurodata/ndgrutedb,openconnectome/m2g,neurodata/ndgrutedb | python/mask.py | python/mask.py | import sys
import os
import xml.dom.minidom
import numpy as np
#
# Makes a ton of assumptions about the XML data.
# Totally customize for MRCAP data. Needs to be checked with
# anything that's not the original 109 data files on braingraph1
#
class MaskXML:
""""Class to read the *.xml file and pull out important parameters"""
def __init__(self,filename):
# Parse the XML document
self._xmldoc = xml.dom.minidom.parse(filename)
# There are three extents. These are the dimensions.
def getShape ( self ):
# RBTODO should probably assert on the assumptions. Endianess what else?
dims = self._xmldoc.getElementsByTagName ( 'Extents' )
return [ int(dims[0].firstChild.nodeValue), int(dims[1].firstChild.nodeValue), int(dims[2].firstChild.nodeValue) ]
class MaskData:
"""Class to read mask data derived from MRCAP."""
# Get the dimension
def __init__(self, filename, dim):
self._filename = filename
self._fileobj = open(self._filename, mode='rb')
# file is a list of bytes
self.data = np.fromfile(self._fileobj, dtype='f', count=dim[0]*dim[1]*dim[2])
self.data = np.reshape ( self.data, dim, order='F' )
# Is the location in the brain or not?
def get ( self, index ):
"""Is the location in the brain or not?"""
if index[0] >= self.data.shape[0] or index[1] >= self.data.shape[1] or index[2] >= self.data.shape[2]:
return 0
return self.data [ index[0], index[1], index[2] ]
| import sys
import os
import xml.dom.minidom
import numpy as np
#
# Makes a ton of assumptions about the XML data.
# Totally customize for MRCAP data. Needs to be checked with
# anything that's not the original 109 data files on braingraph1
#
class MaskXML:
""""Class to read the *.xml file and pull out important parameters"""
def __init__(self,filename):
# Parse the XML document
self._xmldoc = xml.dom.minidom.parse(filename)
# There are three extents. These are the dimensions.
def getShape ( self ):
# RBTODO should probably assert on the assumptions. Endianess what else?
dims = self._xmldoc.getElementsByTagName ( 'Extents' )
return [ int(dims[0].firstChild.nodeValue), int(dims[1].firstChild.nodeValue), int(dims[2].firstChild.nodeValue) ]
class MaskData:
"""Class to read mask data derived from MRCAP."""
# Get the dimension
def __init__(self, filename, dim):
self._filename = filename
self._fileobj = open(self._filename, mode='rb')
# file is a list of bytes
self.data = np.fromfile(self._fileobj, dtype='b', count=dim[0]*dim[1]*dim[2])
self.data = np.reshape ( self.data, dim, order='F' )
# Is the location in the brain or not?
def get ( self, index ):
"""Is the location in the brain or not?"""
if index[0] >= self.data.shape[0] or index[1] >= self.data.shape[1] or index[2] >= self.data.shape[2]:
return 0
return self.data [ index[0], index[1], index[2] ]
| apache-2.0 | Python |
4ab814734454361c42560bae5d3331a26dcaad01 | Use correct matrix method | DarkAce65/rpi-led-matrix,DarkAce65/rpi-led-matrix | python/test.py | python/test.py | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
for i in range(100):
ledMatrix.SetPixel(randint(0, width), randint(0, height), randint(0, 255), randint(0, 255), randint(0, 255))
time.sleep(0.05)
time.sleep(5)
ledMatrix.Clear() | #!/usr/bin/env python
from rgbmatrix import RGBMatrix
from random import randint
import time
rows = 16
chains = 1
parallel = 1
ledMatrix = RGBMatrix(rows, chains, parallel)
height = ledMatrix.height
width = ledMatrix.width
for i in range(100):
ledMatrix.setPixel(randint(0, width), randint(0, height), randint(0, 255), randint(0, 255), randint(0, 255))
time.sleep(0.05)
time.sleep(5)
ledMatrix.Clear() | mit | Python |
071efd48020467792a6b147b283edc9235f9b4d5 | indent error | danielfalck/FCparametric | python/vise.py | python/vise.py | import Draft,Part
import FreeCAD, FreeCADGui
import FreeCADGui as Gui
from FreeCAD import Base
from PySide import QtGui, QtCore
from math import fabs
import utils
class Vise:
def __init__ (self, obj):
'''create a milling machine vise '''
obj.addProperty("App::PropertyFloat", "JawOpening", "Parallel", "How wide the jaws of the vise are open")
obj.JawOpening = 0.0
obj.Proxy = self
def execute(self, fp):
self.base = Part.Shape()
vise_base_url = "https://raw.githubusercontent.com/danielfalck/FCparametric/master/partfiles/step/vise_base.stp"
vise_base = utils.download(vise_base_url,force = True)
sel.base.read(vise_base)
self.jaw = Part.Shape()
vise_jaw_url = "https://raw.githubusercontent.com/danielfalck/FCparametric/master/partfiles/step/vise_jaw.stp"
vise_jaw = utils.download(vise_jaw_url,force = True)
self.jaw.read(vise_jaw)
if 0<=fabs(fp.JawOpening)<= 223.52:
self.jaw.Placement.Base.y = -(fabs(fp.JawOpening))
else:
self.jaw.Placement.Base.y = -223.52
fp.Shape =Part.makeCompound([self.base,self.jaw])
class ViewProviderVise:
def __init__(self, obj):
"Set this object to the proxy object of the actual view provider"
obj.Proxy = self
def getIcon(self):
vise_icon_url = "https://raw.githubusercontent.com/danielfalck/FCparametric/master/icons/vise.svg"
vise_icon = utils.download(vise_icon_url, force = True)
i =QtGui.QIcon(vise_icon)
p = i.pixmap(128,128)
a = QtCore.QByteArray()
b = QtCore.QBuffer(a)
b.open(QtCore.QIODevice.WriteOnly)
p.save(b,"XPM")
b.close()
return str(a)
'''how to use:
obj =FreeCAD.ActiveDocument.addObject("Part::FeaturePython",'Vise')
Vise(obj)
ViewProviderVise(obj.ViewObject)
FreeCAD.ActiveDocument.recompute()
Once you have a vise in the view-
change how much the jaw opening is in the Data tab of the Property panel.
'''
| import Draft,Part
import FreeCAD, FreeCADGui
import FreeCADGui as Gui
from FreeCAD import Base
from PySide import QtGui, QtCore
from math import fabs
import utils
class Vise:
'''
how to use:
obj =FreeCAD.ActiveDocument.addObject("Part::FeaturePython",'Vise')
Vise(obj)
ViewProviderVise(obj.ViewObject)
FreeCAD.ActiveDocument.recompute()
Once you have a vise in the view-
change how much the jaw opening is in the Data tab of the Property panel.
'''
def __init__ (self, obj):
'''create a milling machine vise '''
obj.addProperty("App::PropertyFloat", "JawOpening", "Parallel", "How wide the jaws of the vise are open")
obj.JawOpening = 0.0
obj.Proxy = self
def execute(self, fp):
self.base = Part.Shape()
vise_base_url = "https://raw.githubusercontent.com/danielfalck/FCparametric/master/partfiles/step/vise_base.stp"
vise_base = utils.download(vise_base_url,force = True)
sel.base.read(vise_base)
self.jaw = Part.Shape()
vise_jaw_url = "https://raw.githubusercontent.com/danielfalck/FCparametric/master/partfiles/step/vise_jaw.stp"
vise_jaw = utils.download(vise_jaw_url,force = True)
self.jaw.read(vise_jaw)
if 0<=fabs(fp.JawOpening)<= 223.52:
self.jaw.Placement.Base.y = -(fabs(fp.JawOpening))
else:
self.jaw.Placement.Base.y = -223.52
fp.Shape =Part.makeCompound([self.base,self.jaw])
class ViewProviderVise:
def __init__(self, obj):
"Set this object to the proxy object of the actual view provider"
obj.Proxy = self
def getIcon(self):
vise_icon_url = "https://raw.githubusercontent.com/danielfalck/FCparametric/master/icons/vise.svg"
vise_icon = utils.download(vise_icon_url, force = True)
i =QtGui.QIcon(vise_icon)
p = i.pixmap(128,128)
a = QtCore.QByteArray()
b = QtCore.QBuffer(a)
b.open(QtCore.QIODevice.WriteOnly)
p.save(b,"XPM")
b.close()
return str(a)
| lgpl-2.1 | Python |
8d391d0820e8e78cba504cfc447fbc2b1c48ecf4 | use a different shuffle algo | dylanaraps/pywal,dylanaraps/pywal,dylanaraps/pywal | pywal/image.py | pywal/image.py | """
Get the image file.
"""
import os
import random
import sys
from .settings import CACHE_DIR
from . import util
from . import wallpaper
def get_random_image(img_dir):
"""Pick a random image file from a directory."""
current_wall = wallpaper.get()
current_wall = os.path.basename(current_wall)
file_types = (".png", ".jpg", ".jpeg", ".jpe", ".gif",
".PNG", ".JPG", ".JPEG", ".JPE", ".GIF")
images = [img for img in os.scandir(img_dir)
if img.name.endswith(file_types) and img.name != current_wall]
if not images:
print("image: No new images found (nothing to do), exiting...")
sys.exit(1)
random.shuffle(images)
return os.path.join(img_dir, images[0].name)
def get(img, cache_dir=CACHE_DIR):
"""Validate image input."""
if os.path.isfile(img):
wal_img = img
elif os.path.isdir(img):
wal_img = get_random_image(img)
else:
print("error: No valid image file found.")
sys.exit(1)
wal_img = os.path.abspath(wal_img)
# Cache the image file path.
util.save_file(wal_img, os.path.join(cache_dir, "wal"))
print("image: Using image", wal_img)
return wal_img
| """
Get the image file.
"""
import os
import random
import sys
from .settings import CACHE_DIR
from . import util
from . import wallpaper
def get_random_image(img_dir):
"""Pick a random image file from a directory."""
current_wall = wallpaper.get()
current_wall = os.path.basename(current_wall)
file_types = (".png", ".jpg", ".jpeg", ".jpe", ".gif",
".PNG", ".JPG", ".JPEG", ".JPE", ".GIF")
images = [img for img in os.scandir(img_dir)
if img.name.endswith(file_types) and img.name != current_wall]
if not images:
print("image: No new images found (nothing to do), exiting...")
sys.exit(1)
return os.path.join(img_dir, random.choice(images).name)
def get(img, cache_dir=CACHE_DIR):
"""Validate image input."""
if os.path.isfile(img):
wal_img = img
elif os.path.isdir(img):
wal_img = get_random_image(img)
else:
print("error: No valid image file found.")
sys.exit(1)
wal_img = os.path.abspath(wal_img)
# Cache the image file path.
util.save_file(wal_img, os.path.join(cache_dir, "wal"))
print("image: Using image", wal_img)
return wal_img
| mit | Python |
473ea77a54e0b85403ceb00cfd78bb540cd8e2e3 | Remove pyqtVariants | goanpeca/qtpy,davvid/qtpy,goanpeca/qtpy,davvid/qtpy,spyder-ide/qtpy | qtpy/QtCore.py | qtpy/QtCore.py | # -*- coding: utf-8 -*-
#
# Copyright © 2014-2015 Colin Duquesnoy
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
"""
Provides QtCore classes and functions.
"""
from qtpy import PYQT5, PYQT4, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtCore import *
from PyQt5.QtCore import pyqtSignal as Signal
from PyQt5.QtCore import pyqtSlot as Slot
from PyQt5.QtCore import pyqtProperty as Property
from PyQt5.QtCore import QT_VERSION_STR as __version__
# Those are imported from `import *`
del pyqtSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR
elif PYQT4:
from PyQt4.QtCore import *
from PyQt4.QtCore import QCoreApplication
from PyQt4.QtCore import Qt
from PyQt4.QtCore import pyqtSignal as Signal
from PyQt4.QtCore import pyqtSlot as Slot
from PyQt4.QtCore import pyqtProperty as Property
from PyQt4.QtGui import (QItemSelection, QItemSelectionModel,
QItemSelectionRange, QSortFilterProxyModel)
from PyQt4.QtCore import QT_VERSION_STR as __version__
# Those are imported from `import *`
del pyqtSignal, pyqtSlot, pyqtProperty, QT_VERSION_STR
elif PYSIDE:
from PySide.QtCore import *
from PySide.QtGui import (QItemSelection, QItemSelectionModel,
QItemSelectionRange, QSortFilterProxyModel)
import PySide.QtCore
__version__ = PySide.QtCore.__version__
else:
raise PythonQtError('No Qt bindings could be found')
| # -*- coding: utf-8 -*-
#
# Copyright © 2014-2015 Colin Duquesnoy
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
"""
Provides QtCore classes and functions.
"""
from qtpy import PYQT5, PYQT4, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtCore import *
from PyQt5.QtCore import pyqtSignal as Signal
from PyQt5.QtCore import pyqtSlot as Slot
from PyQt5.QtCore import pyqtProperty as Property
from PyQt5.QtCore import QT_VERSION_STR as __version__
elif PYQT4:
from PyQt4.QtCore import *
from PyQt4.QtCore import QCoreApplication
from PyQt4.QtCore import Qt
from PyQt4.QtCore import pyqtSignal as Signal
from PyQt4.QtCore import pyqtSlot as Slot
from PyQt4.QtCore import pyqtProperty as Property
from PyQt4.QtGui import (QItemSelection, QItemSelectionModel,
QItemSelectionRange, QSortFilterProxyModel)
from PyQt4.QtCore import QT_VERSION_STR as __version__
elif PYSIDE:
from PySide.QtCore import *
from PySide.QtGui import (QItemSelection, QItemSelectionModel,
QItemSelectionRange, QSortFilterProxyModel)
import PySide.QtCore
__version__ = PySide.QtCore.__version__
else:
raise PythonQtError('No Qt bindings could be found')
| mit | Python |
74577faa2468a0b944cef3c88c9b8a82a4881ff1 | Change results page title to include query (or "Error" on error). | cdubz/rdap-explorer,cdubz/rdap-explorer | query/views.py | query/views.py | """
Views for the rdap_explorer project, query app.
"""
import ipwhois
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.views.decorators.cache import cache_page
from json import dumps
from .forms import QueryForm
def index(request):
if request.method == 'POST':
form = QueryForm(request.POST)
if form.is_valid():
return HttpResponseRedirect(reverse(
'query:results',
args=(form['query'].value(),)
))
else:
form = QueryForm()
return render(request, 'query/index.html', {
'title': 'Query',
'form': form
})
@cache_page(86400)
def results(request, query):
title = 'Results'
error = None
result = {}
form = QueryForm(initial={"query": query})
try:
ip = ipwhois.IPWhois(query)
result = ip.lookup_rdap(retry_count=1, depth=2, inc_raw=True)
title = ip.address_str
except (ValueError, ipwhois.exceptions.IPDefinedError) as e:
error = e
title = 'Error'
return render(request, 'query/index.html', {
'title': title,
'error': error,
'form': form,
'result': dumps(result)
})
| """
Views for the rdap_explorer project, query app.
"""
import ipwhois
from django.http import HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.views.decorators.cache import cache_page
from json import dumps
from .forms import QueryForm
def index(request):
if request.method == 'POST':
form = QueryForm(request.POST)
if form.is_valid():
return HttpResponseRedirect(reverse(
'query:results',
args=(form['query'].value(),)
))
else:
form = QueryForm()
return render(request, 'query/index.html', {
'title': 'Query',
'form': form
})
@cache_page(86400)
def results(request, query):
error = None
result = {}
form = QueryForm(initial={"query": query})
try:
ip = ipwhois.IPWhois(query)
result = ip.lookup_rdap(retry_count=1, depth=2, inc_raw=True)
except (ValueError, ipwhois.exceptions.IPDefinedError) as e:
error = e
return render(request, 'query/index.html', {
'title': 'Results',
'error': error,
'form': form,
'result': dumps(result)
})
| mit | Python |
27f47ef27654dfa9c68bb90d3b8fae2e3a281396 | Move out app setup to setup file to finish cleaning up the init file | rackerlabs/pitchfork,oldarmyc/pitchfork,oldarmyc/pitchfork,rackerlabs/pitchfork,rackerlabs/pitchfork,oldarmyc/pitchfork | pitchfork/__init__.py | pitchfork/__init__.py | # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setup_application
app, db = setup_application.create_app()
| # Copyright 2014 Dave Kludt
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from flask import Flask, g
from happymongo import HapPyMongo
from config import config
from adminbp import bp as admin_bp
from manage_globals import bp as manage_bp
from engine import bp as engine_bp
from inspect import getmembers, isfunction
import context_functions
import views
import template_filters
app = Flask(__name__)
app.config.from_object(config)
app.register_blueprint(admin_bp, url_prefix='/admin')
app.register_blueprint(manage_bp, url_prefix='/manage')
app.register_blueprint(engine_bp, url_prefix='/engine')
# Setup DB based on the app name
mongo, db = HapPyMongo(config)
custom_filters = {
name: function for name, function in getmembers(template_filters)
if isfunction(function)
}
app.jinja_env.filters.update(custom_filters)
app.context_processor(context_functions.utility_processor)
views.ProductsView.register(app)
views.MiscView.register(app)
@app.before_request
def before_request():
g.db = db
| apache-2.0 | Python |
9a98c6256a8e7c16f6b4194f5fa34447605022cd | Make `3_2_make_json_` really fast by using subprocess and `grep -v` | statgen/pheweb,statgen/pheweb,statgen/pheweb,statgen/pheweb,statgen/pheweb | data/3_2_make_json_for_each_pheno.py | data/3_2_make_json_for_each_pheno.py | #!/usr/bin/env python2
from __future__ import print_function, division, absolute_import
import glob
import heapq
import re
import os.path
import os
import json
import subprocess
def parse_marker_id(marker_id):
chr1, pos1, ref, alt, chr2, pos2 = re.match(r'([^:]+):([0-9]+)_([-ATCG]+)/([-ATCG]+)_([^:]+):([0-9]+)', marker_id).groups()
assert chr1 == chr2
assert pos1 == pos2
return chr1, int(pos1), ref, alt
tmp_file = '/var/pheweb_data/tmp_3_2.vcf'
files_to_convert = glob.glob('/var/pheweb_data/gwas-one-pheno/*.vcf.gz')
for filename in files_to_convert:
basename = os.path.basename(filename)
dest_filename = '/var/pheweb_data/gwas-json/{}.json'.format(basename.rstrip('.vcf.gz'))
if os.path.exists(dest_filename):
continue
print('{} -> {}'.format(filename, dest_filename))
# script = r'''/net/mario/cluster/bin/pigz -dc '{}' | perl -nale 'print if $F[4] < 0.001' > '{}' '''.format(filename, tmp_file)
script = '''/net/mario/cluster/bin/pigz -dc '{}' |'''.format(filename) + \
r'''grep -Pv '^([^\t]*\t){4}(1\t|0\.0{0,2}[1-9])' > ''' + \
''' '{}' '''.format(tmp_file)
subprocess.call(script, shell=True)
with open(tmp_file) as f:
header = f.readline().rstrip('\n').split('\t')
assert len(header) == 6
assert header[:4] == ['#CHROM', 'BEG', 'MARKER_ID', 'MAF']
assert re.match(r'[0-9]+(?:\.[0-9]+)?\.P', header[4])
assert re.match(r'[0-9]+(?:\.[0-9]+)?\.B', header[5])
variants = (line.rstrip('\n').split('\t') for line in f)
top_variants = heapq.nsmallest(2000, variants, key=lambda v:float(v[4]))
os.remove(tmp_file)
rv = []
for variant in top_variants:
chrom1, pos1, marker_id, maf, pval, beta = variant[0], int(variant[1]), variant[2], float(variant[3]), float(variant[4]), float(variant[5])
chrom2, pos2, ref, alt = parse_marker_id(variant[2])
assert chrom1 == chrom2
assert pos1 == pos2
rv.append({
'chrom': chrom1,
'pos': pos1,
'ref': ref,
'alt': alt,
'maf': maf,
'pval': pval,
# TODO: include beta
})
with open(dest_filename, 'w') as f:
json.dump(rv, f, sort_keys=True, indent=0)
| #!/usr/bin/env python2
from __future__ import print_function, division, absolute_import
import gzip
import glob
import heapq
import re
import os.path
import json
def parse_marker_id(marker_id):
chr1, pos1, ref, alt, chr2, pos2 = re.match(r'([^:]+):([0-9]+)_([-ATCG]+)/([-ATCG]+)_([^:]+):([0-9]+)', marker_id).groups()
assert chr1 == chr2
assert pos1 == pos2
return chr1, int(pos1), ref, alt
files_to_convert = glob.glob('/var/pheweb_data/gwas-one-pheno/*.vcf.gz')
for filename in files_to_convert:
basename = os.path.basename(filename)
dest_filename = '/var/pheweb_data/gwas-json/{}.json'.format(basename.rstrip('.vcf.gz'))
if os.path.exists(dest_filename):
continue
print('{} -> {}'.format(filename, dest_filename))
with gzip.open(filename) as f:
header = f.readline().rstrip('\n').split('\t')
assert len(header) == 6
assert header[:4] == ['#CHROM', 'BEG', 'MARKER_ID', 'MAF']
assert re.match(r'[0-9]+(?:\.[0-9]+)?\.P', header[4])
assert re.match(r'[0-9]+(?:\.[0-9]+)?\.B', header[5])
variants = (line.rstrip('\n').split('\t') for line in f)
top_variants = heapq.nsmallest(2000, variants, key=lambda v:float(v[4]))
rv = []
for variant in top_variants:
chrom1, pos1, marker_id, maf, pval, beta = variant[0], int(variant[1]), variant[2], float(variant[3]), float(variant[4]), float(variant[5])
chrom2, pos2, ref, alt = parse_marker_id(variant[2])
assert chrom1 == chrom2
assert pos1 == pos2
rv.append({
'chrom': chrom1,
'pos': pos1,
'ref': ref,
'alt': alt,
'maf': maf,
'pval': pval,
# TODO: include beta
})
with open(dest_filename, 'w') as f:
json.dump(rv, f, sort_keys=True, indent=0)
| agpl-3.0 | Python |
42a76adff446b79fec589394e2e6dc289f048151 | Update __version__.py | cvium/irc_bot | irc_bot/__version__.py | irc_bot/__version__.py | __version__ = '1.0.35'
| __version__ = '1.0.34'
| mit | Python |
13a8a702722047d0db727b5fdb57a5506bf33179 | Update random_walk.py | sdelaughter/misc | random_walk.py | random_walk.py | """Author: Samuel DeLaughter
12/6/14
This program graphs the probability of landing on each integer in a given interval for a 1D random-walk simulation
If called with a -i flag, it will prompt for user input on the following variables:
Boundary numbers, starting position, number of steps per simulation, number of simulations
"""
import sys
import random
import numpy as np
import matplotlib.pyplot as plt
#Set default values for the variables
MIN_POSITION = -10
MAX_POSITION = 10
START_POSITION = 0
NSTEPS = 12
NSIMS = 1000
if('-i' in sys.argv):
#Get user input for variables if called with the -i argument
MIN_POSITION = int(raw_input("Enter lower boundary: ") or MIN_POSITION)
MAX_POSITION = int(raw_input("Enter upper boundary: ") or MAX_POSITION)
#If the upper boundary is smaller than the lower boundary, swap them
sorted_bounds = sorted((MIN_POSITION, MAX_POSITION))
MIN_POSITION = sorted_bounds[0]
MAX_POSITION = sorted_bounds[1]
#If the two boundaries are equivalent, increment the upper bound
if(MIN_POSITION == MAX_POSITION):
MAX_POSITION+=1
START_POSITION = int(raw_input("Enter starting position: ") or START_POSITION)
#Make sure the starting position is within the min/max bounds
START_POSITION=sorted((MIN_POSITION, START_POSITION, MAX_POSITION))[1]
NSTEPS = int(raw_input("Enter number of steps for each simulation: ") or NSTEPS)
NSIMS = int(raw_input("Enter number of simulations to run: ") or NSIMS)
def move(position):
#Change the current position
if(position == MAX_POSITION):
#If you're at the upper boundary, either stay put or decrease
step=random.randint(-1, 0)
elif(position == MIN_POSITION):
#If you're at the lower boundary, either stay put or increase
step=random.randint(0, 1)
else:
#If you're within the boundaries, either stay put, decrease, or increase
step=random.randint(-1, 1)
position += step
return position
def sim():
#Run a simulation
position=START_POSITION
for i in range(NSTEPS):
position=move(position)
return position
def main():
results=[]
for i in range(NSIMS):
results.append(sim())
counts=[]
for i in range(MIN_POSITION, (MAX_POSITION+1)):
counts.append(results.count(i))
plt.plot(range(MIN_POSITION, (MAX_POSITION+1)), counts, 'ro')
#Set axis intervals
plt.xticks(np.arange(MIN_POSITION, (MAX_POSITION + 1), 1.0))
#plt.yticks(np.arange(0, max(counts), 100))
#Display gridlines
plt.grid(b=True, which=u'major', axis=u'both')
plt.show()
#Make the plot window visible
wm = plt.get_current_fig_manager()
wm.window.attributes('-topmost', 0)
if __name__ == '__main__':
main()
| #Author: Samuel DeLaughter
#12/6/14
#This program graphs the probability of landing on each integer in a given interval for a 1D random-walk simulation
#If called with a -i flag, it will prompt for user input on the following variables:
#Boundary numbers, starting position, number of steps per simulation, number of simulations
import sys
import random
import numpy as np
import matplotlib.pyplot as plt
#Set default values for the variables
MIN_POSITION = -10
MAX_POSITION = 10
START_POSITION = 0
NSTEPS = 12
NSIMS = 1000
if('-i' in sys.argv):
#Get user input for variables if called with the -i argument
MIN_POSITION = int(raw_input("Enter lower boundary: ") or MIN_POSITION)
MAX_POSITION = int(raw_input("Enter upper boundary: ") or MAX_POSITION)
#If the upper boundary is smaller than the lower boundary, swap them
sorted_bounds = sorted((MIN_POSITION, MAX_POSITION))
MIN_POSITION = sorted_bounds[0]
MAX_POSITION = sorted_bounds[1]
#If the two boundaries are equivalent, increment the upper bound
if(MIN_POSITION == MAX_POSITION):
MAX_POSITION+=1
START_POSITION = int(raw_input("Enter starting position: ") or START_POSITION)
#Make sure the starting position is within the min/max bounds
START_POSITION=sorted((MIN_POSITION, START_POSITION, MAX_POSITION))[1]
NSTEPS = int(raw_input("Enter number of steps for each simulation: ") or NSTEPS)
NSIMS = int(raw_input("Enter number of simulations to run: ") or NSIMS)
def move(position):
#Change the current position
if(position == MAX_POSITION):
#If you're at the upper boundary, either stay put or decrease
step=random.randint(-1, 0)
elif(position == MIN_POSITION):
#If you're at the lower boundary, either stay put or increase
step=random.randint(0, 1)
else:
#If you're within the boundaries, either stay put, decrease, or increase
step=random.randint(-1, 1)
position += step
return position
def sim():
#Run a simulation
position=START_POSITION
for i in range(NSTEPS):
position=move(position)
return position
def main():
results=[]
for i in range(NSIMS):
results.append(sim())
counts=[]
for i in range(MIN_POSITION, (MAX_POSITION+1)):
counts.append(results.count(i))
plt.plot(range(MIN_POSITION, (MAX_POSITION+1)), counts, 'ro')
#Set axis intervals
plt.xticks(np.arange(MIN_POSITION, (MAX_POSITION + 1), 1.0))
#plt.yticks(np.arange(0, max(counts), 100))
#Display gridlines
plt.grid(b=True, which=u'major', axis=u'both')
plt.show()
#Make the plot window visible
wm = plt.get_current_fig_manager()
wm.window.attributes('-topmost', 0)
if __name__ == '__main__':
main()
| mit | Python |
63cb8dc1449f6cab87bd7910276d0e06dfd0b228 | Set up basic webhook for Messenger | jabagawee/playing-with-kubernetes | rasdoor/app.py | rasdoor/app.py | from flask import Flask, abort, request
app = Flask(__name__)
VERIFY_TOKEN = 'temp_token_to_replace_with_secret'
@app.route('/')
def hello_world():
return 'Hello World'
@app.route('/webhook/facebook_messenger', methods=['GET', 'POST'])
def facebook_webhook():
if request.method == 'POST':
body = request.get_json()
if body['object'] == 'page':
for entry in body['entry']:
print(entry['messaging'][0])
return 'EVENT_RECEIVED'
abort(404)
else:
mode = request.args.get('hub.mode')
token = request.args.get('hub.verify_token')
challenge = request.args.get('hub.challenge')
if mode == 'subscribe' and token == VERIFY_TOKEN:
return challenge
abort(403)
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello_world():
return 'Hello World'
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| mit | Python |
a41dde152d0cecc9ee9ece77959f51536f9a9ec7 | make sure schema exists as part of database creation process | fedspendingtransparency/data-act-core,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,chambers-brian/SIG_Digital-Strategy_SI_ODP_Backend,fedspendingtransparency/data-act-broker-backend,fedspendingtransparency/data-act-core | dataactcore/scripts/databaseSetup.py | dataactcore/scripts/databaseSetup.py | import sqlalchemy
from sqlalchemy.exc import OperationalError
from sqlalchemy.schema import CreateSchema
from sqlalchemy.exc import ProgrammingError
from dataactcore.config import CONFIG_DB
def createDatabase(dbName):
"""Create specified database if it doesn't exist."""
connectString = "postgresql://{}:{}@{}:{}/{}".format(CONFIG_DB["username"],
CONFIG_DB["password"], CONFIG_DB["host"], CONFIG_DB["port"],
dbName)
db = sqlalchemy.create_engine(
connectString, isolation_level="AUTOCOMMIT")
try:
connect = db.connect()
try:
connect.execute(CreateSchema('public'))
except ProgrammingError as e:
if "already exists" in e.message:
# database schema is already present, so
# nothing to see here
pass
except OperationalError as e:
# Database doesn't exist, so create it
connectString = connectString.replace(dbName, CONFIG_DB["base_db_name"])
db = sqlalchemy.create_engine(
connectString, isolation_level="AUTOCOMMIT")
connect = db.connect()
connect.execute(
"CREATE DATABASE {}".format(dbName))
| import sqlalchemy
from sqlalchemy.exc import OperationalError
from dataactcore.config import CONFIG_DB
def createDatabase(dbName):
"""Create specified database if it doesn't exist."""
connectString = "postgresql://{}:{}@{}:{}/{}".format(CONFIG_DB["username"],
CONFIG_DB["password"], CONFIG_DB["host"], CONFIG_DB["port"],
dbName)
db = sqlalchemy.create_engine(
connectString, isolation_level="AUTOCOMMIT")
try:
connect = db.connect()
except OperationalError as e:
# Database doesn't exist, so create it
connectString = connectString.replace(dbName, CONFIG_DB["base_db_name"])
db = sqlalchemy.create_engine(
connectString, isolation_level="AUTOCOMMIT")
connect = db.connect()
connect.execute(
"CREATE DATABASE {}".format(dbName))
| cc0-1.0 | Python |
3db3d7b74080635a7475a9fc556e5c8577f58aa2 | Fix eta message slightly | nttks/edx-platform,xuxiao19910803/edx-platform,ZLLab-Mooc/edx-platform,louyihua/edx-platform,halvertoluke/edx-platform,waheedahmed/edx-platform,bigdatauniversity/edx-platform,ubc/edx-platform,UXE/local-edx,teltek/edx-platform,y12uc231/edx-platform,jazkarta/edx-platform-for-isc,jelugbo/tundex,dcosentino/edx-platform,leansoft/edx-platform,eduNEXT/edunext-platform,rismalrv/edx-platform,abdoosh00/edx-rtl-final,jazkarta/edx-platform-for-isc,devs1991/test_edx_docmode,gsehub/edx-platform,zerobatu/edx-platform,cecep-edu/edx-platform,chauhanhardik/populo,ubc/edx-platform,polimediaupv/edx-platform,xuxiao19910803/edx-platform,y12uc231/edx-platform,unicri/edx-platform,chauhanhardik/populo_2,jbassen/edx-platform,nagyistoce/edx-platform,fintech-circle/edx-platform,peterm-itr/edx-platform,polimediaupv/edx-platform,nanolearningllc/edx-platform-cypress-2,shubhdev/openedx,simbs/edx-platform,amir-qayyum-khan/edx-platform,adoosii/edx-platform,4eek/edx-platform,AkA84/edx-platform,lduarte1991/edx-platform,Softmotions/edx-platform,arifsetiawan/edx-platform,zerobatu/edx-platform,nttks/jenkins-test,longmen21/edx-platform,kamalx/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,Lektorium-LLC/edx-platform,halvertoluke/edx-platform,nanolearning/edx-platform,vikas1885/test1,lduarte1991/edx-platform,arifsetiawan/edx-platform,mitocw/edx-platform,edx-solutions/edx-platform,xingyepei/edx-platform,fly19890211/edx-platform,IITBinterns13/edx-platform-dev,SivilTaram/edx-platform,UOMx/edx-platform,nanolearningllc/edx-platform-cypress,PepperPD/edx-pepper-platform,TsinghuaX/edx-platform,jswope00/griffinx,BehavioralInsightsTeam/edx-platform,jbassen/edx-platform,alu042/edx-platform,rationalAgent/edx-platform-custom,hmcmooc/muddx-platform,devs1991/test_edx_docmode,morenopc/edx-platform,mcgachey/edx-platform,kursitet/edx-platform,nanolearningllc/edx-platform-cypress-2,kmoocdev2/edx-platform,chand3040/cloud_that,arifsetiawan/edx-platform,waheedahmed/edx-platform,SravanthiSinha/edx-platform,IndonesiaX/edx-platform,jonathan-beard/edx-platform,ampax/edx-platform-backup,IITBinterns13/edx-platform-dev,bitifirefly/edx-platform,doismellburning/edx-platform,ampax/edx-platform,a-parhom/edx-platform,hkawasaki/kawasaki-aio8-1,mcgachey/edx-platform,synergeticsedx/deployment-wipro,hkawasaki/kawasaki-aio8-0,jzoldak/edx-platform,fly19890211/edx-platform,benpatterson/edx-platform,naresh21/synergetics-edx-platform,wwj718/ANALYSE,deepsrijit1105/edx-platform,IndonesiaX/edx-platform,proversity-org/edx-platform,jazztpt/edx-platform,dcosentino/edx-platform,sameetb-cuelogic/edx-platform-test,teltek/edx-platform,simbs/edx-platform,dkarakats/edx-platform,leansoft/edx-platform,itsjeyd/edx-platform,xuxiao19910803/edx-platform,mahendra-r/edx-platform,eemirtekin/edx-platform,mushtaqak/edx-platform,procangroup/edx-platform,analyseuc3m/ANALYSE-v1,knehez/edx-platform,mahendra-r/edx-platform,appliedx/edx-platform,sameetb-cuelogic/edx-platform-test,morenopc/edx-platform,motion2015/a3,zubair-arbi/edx-platform,bigdatauniversity/edx-platform,prarthitm/edxplatform,EDUlib/edx-platform,shashank971/edx-platform,IndonesiaX/edx-platform,jbzdak/edx-platform,a-parhom/edx-platform,CredoReference/edx-platform,kxliugang/edx-platform,jjmiranda/edx-platform,cpennington/edx-platform,pepeportela/edx-platform,atsolakid/edx-platform,mahendra-r/edx-platform,xuxiao19910803/edx,stvstnfrd/edx-platform,appliedx/edx-platform,bitifirefly/edx-platform,ahmedaljazzar/edx-platform,kursitet/edx-platform,adoosii/edx-platform,xinjiguaike/edx-platform,jbzdak/edx-platform,synergeticsedx/deployment-wipro,Edraak/edraak-platform,vasyarv/edx-platform,LICEF/edx-platform,waheedahmed/edx-platform,ahmadiga/min_edx,jruiperezv/ANALYSE,benpatterson/edx-platform,motion2015/edx-platform,hkawasaki/kawasaki-aio8-0,BehavioralInsightsTeam/edx-platform,deepsrijit1105/edx-platform,kxliugang/edx-platform,simbs/edx-platform,edry/edx-platform,Endika/edx-platform,openfun/edx-platform,Edraak/edx-platform,vismartltd/edx-platform,AkA84/edx-platform,mjg2203/edx-platform-seas,EDUlib/edx-platform,IndonesiaX/edx-platform,nttks/jenkins-test,hkawasaki/kawasaki-aio8-2,rismalrv/edx-platform,TsinghuaX/edx-platform,shubhdev/edx-platform,utecuy/edx-platform,chudaol/edx-platform,syjeon/new_edx,mushtaqak/edx-platform,solashirai/edx-platform,Stanford-Online/edx-platform,chrisndodge/edx-platform,ESOedX/edx-platform,eduNEXT/edx-platform,UXE/local-edx,stvstnfrd/edx-platform,jonathan-beard/edx-platform,rhndg/openedx,UOMx/edx-platform,xuxiao19910803/edx,Edraak/circleci-edx-platform,cecep-edu/edx-platform,olexiim/edx-platform,Stanford-Online/edx-platform,4eek/edx-platform,bdero/edx-platform,abdoosh00/edraak,hamzehd/edx-platform,dkarakats/edx-platform,ZLLab-Mooc/edx-platform,martynovp/edx-platform,cyanna/edx-platform,shashank971/edx-platform,ak2703/edx-platform,jelugbo/tundex,dsajkl/reqiop,nikolas/edx-platform,tiagochiavericosta/edx-platform,vasyarv/edx-platform,ovnicraft/edx-platform,ampax/edx-platform,chauhanhardik/populo,hmcmooc/muddx-platform,RPI-OPENEDX/edx-platform,auferack08/edx-platform,solashirai/edx-platform,devs1991/test_edx_docmode,playm2mboy/edx-platform,Semi-global/edx-platform,nttks/jenkins-test,valtech-mooc/edx-platform,zubair-arbi/edx-platform,ahmedaljazzar/edx-platform,ampax/edx-platform-backup,shurihell/testasia,vismartltd/edx-platform,shurihell/testasia,ak2703/edx-platform,chand3040/cloud_that,philanthropy-u/edx-platform,proversity-org/edx-platform,torchingloom/edx-platform,a-parhom/edx-platform,shashank971/edx-platform,procangroup/edx-platform,Edraak/circleci-edx-platform,AkA84/edx-platform,shashank971/edx-platform,stvstnfrd/edx-platform,pabloborrego93/edx-platform,IITBinterns13/edx-platform-dev,xinjiguaike/edx-platform,chrisndodge/edx-platform,itsjeyd/edx-platform,motion2015/a3,caesar2164/edx-platform,y12uc231/edx-platform,CredoReference/edx-platform,MakeHer/edx-platform,J861449197/edx-platform,WatanabeYasumasa/edx-platform,shubhdev/openedx,kmoocdev/edx-platform,defance/edx-platform,LICEF/edx-platform,bdero/edx-platform,jbzdak/edx-platform,halvertoluke/edx-platform,shurihell/testasia,Softmotions/edx-platform,motion2015/a3,kmoocdev2/edx-platform,Kalyzee/edx-platform,CourseTalk/edx-platform,shabab12/edx-platform,beacloudgenius/edx-platform,mjg2203/edx-platform-seas,chrisndodge/edx-platform,cselis86/edx-platform,CourseTalk/edx-platform,devs1991/test_edx_docmode,morpheby/levelup-by,procangroup/edx-platform,UOMx/edx-platform,zofuthan/edx-platform,sameetb-cuelogic/edx-platform-test,JCBarahona/edX,antonve/s4-project-mooc,ferabra/edx-platform,jswope00/GAI,kmoocdev/edx-platform,alu042/edx-platform,benpatterson/edx-platform,defance/edx-platform,ahmadiga/min_edx,longmen21/edx-platform,hamzehd/edx-platform,kalebhartje/schoolboost,mjg2203/edx-platform-seas,shubhdev/edx-platform,edx/edx-platform,don-github/edx-platform,kmoocdev2/edx-platform,jazkarta/edx-platform,pelikanchik/edx-platform,EDUlib/edx-platform,prarthitm/edxplatform,EduPepperPDTesting/pepper2013-testing,bitifirefly/edx-platform,Unow/edx-platform,yokose-ks/edx-platform,nikolas/edx-platform,unicri/edx-platform,pdehaye/theming-edx-platform,jjmiranda/edx-platform,SravanthiSinha/edx-platform,lduarte1991/edx-platform,abdoosh00/edraak,zadgroup/edx-platform,rationalAgent/edx-platform-custom,itsjeyd/edx-platform,ZLLab-Mooc/edx-platform,nagyistoce/edx-platform,unicri/edx-platform,DNFcode/edx-platform,nanolearningllc/edx-platform-cypress,antonve/s4-project-mooc,mitocw/edx-platform,DNFcode/edx-platform,shubhdev/openedx,rismalrv/edx-platform,mahendra-r/edx-platform,Edraak/edx-platform,WatanabeYasumasa/edx-platform,utecuy/edx-platform,jbassen/edx-platform,edx/edx-platform,hmcmooc/muddx-platform,zhenzhai/edx-platform,ahmadio/edx-platform,pabloborrego93/edx-platform,jbassen/edx-platform,nanolearning/edx-platform,morenopc/edx-platform,morpheby/levelup-by,shabab12/edx-platform,dsajkl/reqiop,fintech-circle/edx-platform,MakeHer/edx-platform,IndonesiaX/edx-platform,ampax/edx-platform-backup,J861449197/edx-platform,Unow/edx-platform,tanmaykm/edx-platform,pepeportela/edx-platform,AkA84/edx-platform,eemirtekin/edx-platform,romain-li/edx-platform,dsajkl/123,hamzehd/edx-platform,10clouds/edx-platform,procangroup/edx-platform,kxliugang/edx-platform,10clouds/edx-platform,CourseTalk/edx-platform,motion2015/edx-platform,defance/edx-platform,bigdatauniversity/edx-platform,nanolearningllc/edx-platform-cypress,abdoosh00/edx-rtl-final,nttks/edx-platform,rismalrv/edx-platform,mcgachey/edx-platform,zhenzhai/edx-platform,nttks/jenkins-test,kursitet/edx-platform,rhndg/openedx,andyzsf/edx,hastexo/edx-platform,jazztpt/edx-platform,inares/edx-platform,J861449197/edx-platform,mtlchun/edx,gymnasium/edx-platform,jswope00/GAI,kalebhartje/schoolboost,bigdatauniversity/edx-platform,Unow/edx-platform,apigee/edx-platform,Shrhawk/edx-platform,gsehub/edx-platform,beni55/edx-platform,Shrhawk/edx-platform,leansoft/edx-platform,xuxiao19910803/edx-platform,Shrhawk/edx-platform,nttks/edx-platform,wwj718/edx-platform,chudaol/edx-platform,Edraak/circleci-edx-platform,praveen-pal/edx-platform,philanthropy-u/edx-platform,UOMx/edx-platform,eemirtekin/edx-platform,tiagochiavericosta/edx-platform,Unow/edx-platform,kalebhartje/schoolboost,atsolakid/edx-platform,playm2mboy/edx-platform,cyanna/edx-platform,abdoosh00/edraak,halvertoluke/edx-platform,rationalAgent/edx-platform-custom,angelapper/edx-platform,mtlchun/edx,prarthitm/edxplatform,LearnEra/LearnEraPlaftform,synergeticsedx/deployment-wipro,eemirtekin/edx-platform,eestay/edx-platform,nttks/jenkins-test,vasyarv/edx-platform,chand3040/cloud_that,martynovp/edx-platform,chudaol/edx-platform,4eek/edx-platform,prarthitm/edxplatform,SravanthiSinha/edx-platform,vikas1885/test1,marcore/edx-platform,gymnasium/edx-platform,chauhanhardik/populo,utecuy/edx-platform,4eek/edx-platform,nanolearningllc/edx-platform-cypress-2,a-parhom/edx-platform,Kalyzee/edx-platform,philanthropy-u/edx-platform,auferack08/edx-platform,beacloudgenius/edx-platform,IONISx/edx-platform,jazkarta/edx-platform,nagyistoce/edx-platform,jamiefolsom/edx-platform,kmoocdev/edx-platform,shubhdev/edxOnBaadal,jruiperezv/ANALYSE,philanthropy-u/edx-platform,JCBarahona/edX,cselis86/edx-platform,alu042/edx-platform,eduNEXT/edx-platform,ampax/edx-platform,dsajkl/123,zhenzhai/edx-platform,kalebhartje/schoolboost,marcore/edx-platform,Semi-global/edx-platform,auferack08/edx-platform,DNFcode/edx-platform,EduPepperPD/pepper2013,vikas1885/test1,shubhdev/edxOnBaadal,mushtaqak/edx-platform,RPI-OPENEDX/edx-platform,torchingloom/edx-platform,msegado/edx-platform,andyzsf/edx,hkawasaki/kawasaki-aio8-1,beacloudgenius/edx-platform,ESOedX/edx-platform,zubair-arbi/edx-platform,franosincic/edx-platform,simbs/edx-platform,wwj718/ANALYSE,ferabra/edx-platform,IONISx/edx-platform,Endika/edx-platform,jolyonb/edx-platform,playm2mboy/edx-platform,bdero/edx-platform,peterm-itr/edx-platform,alu042/edx-platform,dcosentino/edx-platform,msegado/edx-platform,hkawasaki/kawasaki-aio8-2,10clouds/edx-platform,shubhdev/edx-platform,synergeticsedx/deployment-wipro,knehez/edx-platform,antonve/s4-project-mooc,pku9104038/edx-platform,wwj718/edx-platform,mbareta/edx-platform-ft,nanolearningllc/edx-platform-cypress-2,arbrandes/edx-platform,kamalx/edx-platform,Edraak/edraak-platform,xinjiguaike/edx-platform,motion2015/edx-platform,Livit/Livit.Learn.EdX,solashirai/edx-platform,zofuthan/edx-platform,praveen-pal/edx-platform,SivilTaram/edx-platform,AkA84/edx-platform,doganov/edx-platform,fly19890211/edx-platform,etzhou/edx-platform,xingyepei/edx-platform,ovnicraft/edx-platform,jamesblunt/edx-platform,mcgachey/edx-platform,proversity-org/edx-platform,teltek/edx-platform,hkawasaki/kawasaki-aio8-2,mtlchun/edx,shurihell/testasia,morpheby/levelup-by,chauhanhardik/populo,torchingloom/edx-platform,Ayub-Khan/edx-platform,dcosentino/edx-platform,amir-qayyum-khan/edx-platform,zerobatu/edx-platform,TeachAtTUM/edx-platform,ahmedaljazzar/edx-platform,JCBarahona/edX,TeachAtTUM/edx-platform,cselis86/edx-platform,shubhdev/edxOnBaadal,EDUlib/edx-platform,jonathan-beard/edx-platform,tanmaykm/edx-platform,motion2015/a3,fly19890211/edx-platform,rationalAgent/edx-platform-custom,eduNEXT/edx-platform,SivilTaram/edx-platform,ahmadiga/min_edx,shabab12/edx-platform,edx/edx-platform,shubhdev/openedx,Ayub-Khan/edx-platform,stvstnfrd/edx-platform,OmarIthawi/edx-platform,kmoocdev2/edx-platform,mahendra-r/edx-platform,solashirai/edx-platform,TsinghuaX/edx-platform,pku9104038/edx-platform,RPI-OPENEDX/edx-platform,don-github/edx-platform,jjmiranda/edx-platform,EduPepperPDTesting/pepper2013-testing,10clouds/edx-platform,angelapper/edx-platform,Kalyzee/edx-platform,sudheerchintala/LearnEraPlatForm,MSOpenTech/edx-platform,eemirtekin/edx-platform,etzhou/edx-platform,rhndg/openedx,playm2mboy/edx-platform,EduPepperPD/pepper2013,inares/edx-platform,dkarakats/edx-platform,4eek/edx-platform,dcosentino/edx-platform,longmen21/edx-platform,romain-li/edx-platform,JCBarahona/edX,unicri/edx-platform,antoviaque/edx-platform,franosincic/edx-platform,pku9104038/edx-platform,mjirayu/sit_academy,eduNEXT/edunext-platform,IONISx/edx-platform,don-github/edx-platform,yokose-ks/edx-platform,edx/edx-platform,jelugbo/tundex,antoviaque/edx-platform,LearnEra/LearnEraPlaftform,jamesblunt/edx-platform,sameetb-cuelogic/edx-platform-test,PepperPD/edx-pepper-platform,motion2015/edx-platform,ubc/edx-platform,morpheby/levelup-by,abdoosh00/edx-rtl-final,devs1991/test_edx_docmode,unicri/edx-platform,EduPepperPD/pepper2013,jazztpt/edx-platform,kamalx/edx-platform,xuxiao19910803/edx,openfun/edx-platform,caesar2164/edx-platform,doismellburning/edx-platform,Edraak/edx-platform,Livit/Livit.Learn.EdX,zubair-arbi/edx-platform,syjeon/new_edx,longmen21/edx-platform,ak2703/edx-platform,pelikanchik/edx-platform,DNFcode/edx-platform,beacloudgenius/edx-platform,doganov/edx-platform,bitifirefly/edx-platform,cecep-edu/edx-platform,etzhou/edx-platform,sudheerchintala/LearnEraPlatForm,franosincic/edx-platform,jamiefolsom/edx-platform,mjg2203/edx-platform-seas,zerobatu/edx-platform,vikas1885/test1,shabab12/edx-platform,yokose-ks/edx-platform,miptliot/edx-platform,naresh21/synergetics-edx-platform,ak2703/edx-platform,jjmiranda/edx-platform,ZLLab-Mooc/edx-platform,hamzehd/edx-platform,nanolearning/edx-platform,B-MOOC/edx-platform,cpennington/edx-platform,devs1991/test_edx_docmode,dsajkl/123,appliedx/edx-platform,polimediaupv/edx-platform,jswope00/GAI,antoviaque/edx-platform,appsembler/edx-platform,deepsrijit1105/edx-platform,romain-li/edx-platform,raccoongang/edx-platform,adoosii/edx-platform,jolyonb/edx-platform,carsongee/edx-platform,ferabra/edx-platform,doganov/edx-platform,Edraak/edraak-platform,rationalAgent/edx-platform-custom,Shrhawk/edx-platform,tiagochiavericosta/edx-platform,apigee/edx-platform,ovnicraft/edx-platform,tiagochiavericosta/edx-platform,mbareta/edx-platform-ft,eestay/edx-platform,jzoldak/edx-platform,kamalx/edx-platform,alexthered/kienhoc-platform,iivic/BoiseStateX,angelapper/edx-platform,eduNEXT/edunext-platform,franosincic/edx-platform,xuxiao19910803/edx,bdero/edx-platform,wwj718/edx-platform,analyseuc3m/ANALYSE-v1,teltek/edx-platform,jamesblunt/edx-platform,xinjiguaike/edx-platform,msegado/edx-platform,mjirayu/sit_academy,PepperPD/edx-pepper-platform,zofuthan/edx-platform,xuxiao19910803/edx,mjirayu/sit_academy,MSOpenTech/edx-platform,mitocw/edx-platform,valtech-mooc/edx-platform,atsolakid/edx-platform,bitifirefly/edx-platform,beni55/edx-platform,Softmotions/edx-platform,edx-solutions/edx-platform,MakeHer/edx-platform,amir-qayyum-khan/edx-platform,chauhanhardik/populo_2,rue89-tech/edx-platform,iivic/BoiseStateX,Edraak/edraak-platform,jamesblunt/edx-platform,eduNEXT/edx-platform,ampax/edx-platform-backup,jazztpt/edx-platform,DefyVentures/edx-platform,carsongee/edx-platform,zofuthan/edx-platform,DefyVentures/edx-platform,ubc/edx-platform,hastexo/edx-platform,TsinghuaX/edx-platform,valtech-mooc/edx-platform,apigee/edx-platform,gymnasium/edx-platform,gsehub/edx-platform,edry/edx-platform,kxliugang/edx-platform,arbrandes/edx-platform,nikolas/edx-platform,chauhanhardik/populo,ovnicraft/edx-platform,ak2703/edx-platform,jazkarta/edx-platform,xingyepei/edx-platform,Ayub-Khan/edx-platform,xuxiao19910803/edx-platform,olexiim/edx-platform,torchingloom/edx-platform,romain-li/edx-platform,eestay/edx-platform,atsolakid/edx-platform,SravanthiSinha/edx-platform,EduPepperPD/pepper2013,sameetb-cuelogic/edx-platform-test,pepeportela/edx-platform,cecep-edu/edx-platform,morenopc/edx-platform,jbassen/edx-platform,halvertoluke/edx-platform,shubhdev/edx-platform,nikolas/edx-platform,jswope00/griffinx,kmoocdev/edx-platform,ahmadiga/min_edx,ahmadiga/min_edx,IONISx/edx-platform,alexthered/kienhoc-platform,jbzdak/edx-platform,atsolakid/edx-platform,ahmadio/edx-platform,leansoft/edx-platform,simbs/edx-platform,defance/edx-platform,hmcmooc/muddx-platform,rue89-tech/edx-platform,DefyVentures/edx-platform,kamalx/edx-platform,mtlchun/edx,Livit/Livit.Learn.EdX,gymnasium/edx-platform,chand3040/cloud_that,cpennington/edx-platform,torchingloom/edx-platform,ahmadio/edx-platform,praveen-pal/edx-platform,vismartltd/edx-platform,gsehub/edx-platform,appsembler/edx-platform,Stanford-Online/edx-platform,hkawasaki/kawasaki-aio8-1,louyihua/edx-platform,mushtaqak/edx-platform,eestay/edx-platform,miptliot/edx-platform,apigee/edx-platform,olexiim/edx-platform,hkawasaki/kawasaki-aio8-2,chudaol/edx-platform,CourseTalk/edx-platform,tanmaykm/edx-platform,BehavioralInsightsTeam/edx-platform,martynovp/edx-platform,marcore/edx-platform,PepperPD/edx-pepper-platform,beni55/edx-platform,marcore/edx-platform,zadgroup/edx-platform,vikas1885/test1,nttks/edx-platform,don-github/edx-platform,alexthered/kienhoc-platform,Lektorium-LLC/edx-platform,ampax/edx-platform-backup,jonathan-beard/edx-platform,jazkarta/edx-platform,OmarIthawi/edx-platform,rue89-tech/edx-platform,WatanabeYasumasa/edx-platform,chudaol/edx-platform,edry/edx-platform,wwj718/edx-platform,EduPepperPDTesting/pepper2013-testing,UXE/local-edx,B-MOOC/edx-platform,jruiperezv/ANALYSE,hkawasaki/kawasaki-aio8-0,nikolas/edx-platform,dkarakats/edx-platform,don-github/edx-platform,y12uc231/edx-platform,DefyVentures/edx-platform,Lektorium-LLC/edx-platform,openfun/edx-platform,IITBinterns13/edx-platform-dev,antonve/s4-project-mooc,pomegranited/edx-platform,edry/edx-platform,iivic/BoiseStateX,chauhanhardik/populo_2,Kalyzee/edx-platform,msegado/edx-platform,hamzehd/edx-platform,EduPepperPD/pepper2013,itsjeyd/edx-platform,hastexo/edx-platform,appsembler/edx-platform,jswope00/griffinx,rhndg/openedx,Softmotions/edx-platform,pelikanchik/edx-platform,appliedx/edx-platform,raccoongang/edx-platform,polimediaupv/edx-platform,motion2015/a3,ahmedaljazzar/edx-platform,andyzsf/edx,MakeHer/edx-platform,iivic/BoiseStateX,B-MOOC/edx-platform,knehez/edx-platform,motion2015/edx-platform,xingyepei/edx-platform,zadgroup/edx-platform,mbareta/edx-platform-ft,lduarte1991/edx-platform,bigdatauniversity/edx-platform,Shrhawk/edx-platform,MakeHer/edx-platform,ubc/edx-platform,angelapper/edx-platform,LICEF/edx-platform,dsajkl/reqiop,tanmaykm/edx-platform,zhenzhai/edx-platform,dsajkl/123,kxliugang/edx-platform,LICEF/edx-platform,sudheerchintala/LearnEraPlatForm,shashank971/edx-platform,jswope00/GAI,kmoocdev2/edx-platform,zerobatu/edx-platform,nanolearning/edx-platform,edx-solutions/edx-platform,pomegranited/edx-platform,kmoocdev/edx-platform,JioEducation/edx-platform,B-MOOC/edx-platform,knehez/edx-platform,fintech-circle/edx-platform,carsongee/edx-platform,doganov/edx-platform,kursitet/edx-platform,rue89-tech/edx-platform,Endika/edx-platform,doganov/edx-platform,doismellburning/edx-platform,jamesblunt/edx-platform,zubair-arbi/edx-platform,y12uc231/edx-platform,SivilTaram/edx-platform,MSOpenTech/edx-platform,chrisndodge/edx-platform,zadgroup/edx-platform,jswope00/griffinx,inares/edx-platform,jzoldak/edx-platform,jruiperezv/ANALYSE,arifsetiawan/edx-platform,shubhdev/openedx,adoosii/edx-platform,yokose-ks/edx-platform,auferack08/edx-platform,dkarakats/edx-platform,antonve/s4-project-mooc,hkawasaki/kawasaki-aio8-0,jolyonb/edx-platform,jruiperezv/ANALYSE,nanolearning/edx-platform,Lektorium-LLC/edx-platform,utecuy/edx-platform,MSOpenTech/edx-platform,cyanna/edx-platform,RPI-OPENEDX/edx-platform,deepsrijit1105/edx-platform,beni55/edx-platform,DNFcode/edx-platform,BehavioralInsightsTeam/edx-platform,jazztpt/edx-platform,Edraak/circleci-edx-platform,amir-qayyum-khan/edx-platform,JioEducation/edx-platform,longmen21/edx-platform,alexthered/kienhoc-platform,benpatterson/edx-platform,valtech-mooc/edx-platform,louyihua/edx-platform,TeachAtTUM/edx-platform,Edraak/edx-platform,franosincic/edx-platform,WatanabeYasumasa/edx-platform,romain-li/edx-platform,OmarIthawi/edx-platform,RPI-OPENEDX/edx-platform,Ayub-Khan/edx-platform,ahmadio/edx-platform,utecuy/edx-platform,TeachAtTUM/edx-platform,vasyarv/edx-platform,pomegranited/edx-platform,etzhou/edx-platform,devs1991/test_edx_docmode,appliedx/edx-platform,rue89-tech/edx-platform,jazkarta/edx-platform,cognitiveclass/edx-platform,jzoldak/edx-platform,EduPepperPDTesting/pepper2013-testing,Ayub-Khan/edx-platform,jelugbo/tundex,waheedahmed/edx-platform,cselis86/edx-platform,martynovp/edx-platform,LearnEra/LearnEraPlaftform,SravanthiSinha/edx-platform,martynovp/edx-platform,jazkarta/edx-platform-for-isc,JCBarahona/edX,wwj718/ANALYSE,mtlchun/edx,jamiefolsom/edx-platform,openfun/edx-platform,waheedahmed/edx-platform,doismellburning/edx-platform,raccoongang/edx-platform,naresh21/synergetics-edx-platform,ampax/edx-platform,mjirayu/sit_academy,CredoReference/edx-platform,mjirayu/sit_academy,etzhou/edx-platform,peterm-itr/edx-platform,shubhdev/edxOnBaadal,shubhdev/edx-platform,nagyistoce/edx-platform,hkawasaki/kawasaki-aio8-1,xinjiguaike/edx-platform,louyihua/edx-platform,edry/edx-platform,ESOedX/edx-platform,CredoReference/edx-platform,pku9104038/edx-platform,zhenzhai/edx-platform,Semi-global/edx-platform,cyanna/edx-platform,pomegranited/edx-platform,olexiim/edx-platform,naresh21/synergetics-edx-platform,pdehaye/theming-edx-platform,eestay/edx-platform,wwj718/edx-platform,nanolearningllc/edx-platform-cypress,Edraak/circleci-edx-platform,jamiefolsom/edx-platform,chauhanhardik/populo_2,cognitiveclass/edx-platform,rismalrv/edx-platform,ESOedX/edx-platform,alexthered/kienhoc-platform,pepeportela/edx-platform,dsajkl/123,msegado/edx-platform,inares/edx-platform,nagyistoce/edx-platform,pelikanchik/edx-platform,antoviaque/edx-platform,mushtaqak/edx-platform,wwj718/ANALYSE,raccoongang/edx-platform,dsajkl/reqiop,syjeon/new_edx,solashirai/edx-platform,ferabra/edx-platform,J861449197/edx-platform,andyzsf/edx,PepperPD/edx-pepper-platform,fly19890211/edx-platform,J861449197/edx-platform,morenopc/edx-platform,inares/edx-platform,kursitet/edx-platform,peterm-itr/edx-platform,edx-solutions/edx-platform,chauhanhardik/populo_2,EduPepperPDTesting/pepper2013-testing,praveen-pal/edx-platform,yokose-ks/edx-platform,polimediaupv/edx-platform,proversity-org/edx-platform,kalebhartje/schoolboost,arbrandes/edx-platform,abdoosh00/edx-rtl-final,analyseuc3m/ANALYSE-v1,jamiefolsom/edx-platform,Livit/Livit.Learn.EdX,JioEducation/edx-platform,shurihell/testasia,cecep-edu/edx-platform,mcgachey/edx-platform,Kalyzee/edx-platform,jazkarta/edx-platform-for-isc,cognitiveclass/edx-platform,pabloborrego93/edx-platform,jonathan-beard/edx-platform,cpennington/edx-platform,rhndg/openedx,vismartltd/edx-platform,nanolearningllc/edx-platform-cypress-2,playm2mboy/edx-platform,sudheerchintala/LearnEraPlatForm,SivilTaram/edx-platform,ZLLab-Mooc/edx-platform,iivic/BoiseStateX,olexiim/edx-platform,benpatterson/edx-platform,cognitiveclass/edx-platform,OmarIthawi/edx-platform,carsongee/edx-platform,arifsetiawan/edx-platform,jolyonb/edx-platform,pabloborrego93/edx-platform,UXE/local-edx,nanolearningllc/edx-platform-cypress,caesar2164/edx-platform,abdoosh00/edraak,hastexo/edx-platform,tiagochiavericosta/edx-platform,LearnEra/LearnEraPlaftform,MSOpenTech/edx-platform,EduPepperPDTesting/pepper2013-testing,B-MOOC/edx-platform,chand3040/cloud_that,ferabra/edx-platform,eduNEXT/edunext-platform,cyanna/edx-platform,openfun/edx-platform,zofuthan/edx-platform,pomegranited/edx-platform,beacloudgenius/edx-platform,Edraak/edx-platform,jswope00/griffinx,adoosii/edx-platform,beni55/edx-platform,Endika/edx-platform,cognitiveclass/edx-platform,zadgroup/edx-platform,cselis86/edx-platform,Semi-global/edx-platform,jbzdak/edx-platform,nttks/edx-platform,DefyVentures/edx-platform,mbareta/edx-platform-ft,jelugbo/tundex,fintech-circle/edx-platform,miptliot/edx-platform,miptliot/edx-platform,Softmotions/edx-platform,jazkarta/edx-platform-for-isc,Semi-global/edx-platform,vismartltd/edx-platform,LICEF/edx-platform,wwj718/ANALYSE,IONISx/edx-platform,JioEducation/edx-platform,devs1991/test_edx_docmode,Stanford-Online/edx-platform,syjeon/new_edx,vasyarv/edx-platform,xingyepei/edx-platform,knehez/edx-platform,leansoft/edx-platform,valtech-mooc/edx-platform,analyseuc3m/ANALYSE-v1,pdehaye/theming-edx-platform,ovnicraft/edx-platform,caesar2164/edx-platform,pdehaye/theming-edx-platform,ahmadio/edx-platform,mitocw/edx-platform,doismellburning/edx-platform,shubhdev/edxOnBaadal | lms/djangoapps/open_ended_grading/open_ended_grading_util.py | lms/djangoapps/open_ended_grading/open_ended_grading_util.py | def convert_seconds_to_human_readable(seconds):
if seconds < 60:
human_string = "{0} seconds".format(seconds)
elif seconds < 60 * 60:
human_string = "{0} minutes".format(round(seconds/60,1))
elif seconds < (24*60*60):
human_string = "{0} hours".format(round(seconds/(60*60),1))
else:
human_string = "{0} days".format(round(seconds/(60*60*24),1))
eta_string = "{0}".format(human_string)
return eta_string | def convert_seconds_to_human_readable(seconds):
if seconds < 60:
human_string = "{0} seconds".format(seconds)
elif seconds < 60 * 60:
human_string = "{0} minutes".format(round(seconds/60,1))
elif seconds < (24*60*60):
human_string = "{0} hours".format(round(seconds/(60*60),1))
else:
human_string = "{0} days".format(round(seconds/(60*60*24),1))
eta_string = "In {0}.".format(human_string)
return eta_string | agpl-3.0 | Python |
eed1e42e2a8b37621760b013c53af2a83d441a71 | Use the right model in migration (#190) | MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager,MozillaSecurity/FuzzManager | server/crashmanager/migrations/0013_init_cachedcrashinfo.py | server/crashmanager/migrations/0013_init_cachedcrashinfo.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
from django.db import migrations
def create_migration_tool(apps, schema_editor):
CrashEntry = apps.get_model("crashmanager", "CrashEntry")
for entry in CrashEntry.objects.filter(cachedCrashInfo=None):
entry.save(update_fields=['cachedCrashInfo'])
class Migration(migrations.Migration):
dependencies = [
('crashmanager', '0012_crashentry_cachedcrashinfo'),
]
operations = [
migrations.RunPython(
create_migration_tool,
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals, print_function
from django.db import migrations
from crashmanager.models import CrashEntry
def create_migration_tool(apps, schema_editor):
for entry in CrashEntry.objects.filter(cachedCrashInfo=None):
entry.save(update_fields=['cachedCrashInfo'])
class Migration(migrations.Migration):
dependencies = [
('crashmanager', '0012_crashentry_cachedcrashinfo'),
]
operations = [
migrations.RunPython(
create_migration_tool,
),
]
| mpl-2.0 | Python |
44a290cb4c541c98179dcecd0053beffec5c394c | Disable slow test. | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge | spec/puzzle/examples/msp/msp2017_06_21_pride_parade_spec.py | spec/puzzle/examples/msp/msp2017_06_21_pride_parade_spec.py | from data import warehouse
from puzzle.examples.msp import msp2017_06_21_pride_parade
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with _description('msp2017_06_21_pride_parade'):
with before.all:
warehouse.save()
prod_config.init()
self.puzzle = msp2017_06_21_pride_parade.get()
with after.all:
prod_config.reset()
warehouse.restore()
with description('solution'):
with it('scores the source as a LogicProblem'):
expect(logic_problem.LogicProblem.score(
msp2017_06_21_pride_parade.SOURCE.split('\n'))).to(equal(1))
with it('identifies puzzle type'):
problems = self.puzzle.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with _it('models puzzle'):
model = logic_problem._model(msp2017_06_21_pride_parade.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.puzzle.problems()[0]
expect(problem.solution).to(look_like(
msp2017_06_21_pride_parade.SOLUTION))
solutions = problem.solutions()
if len(solutions) > 1:
for solution in solutions:
print(solution)
expect(solutions).to(have_len(1))
| from data import warehouse
from puzzle.examples.msp import msp2017_06_21_pride_parade
from puzzle.problems import logic_problem
from puzzle.puzzlepedia import prod_config
from spec.mamba import *
with description('msp2017_06_21_pride_parade'):
with before.all:
warehouse.save()
prod_config.init()
self.puzzle = msp2017_06_21_pride_parade.get()
with after.all:
prod_config.reset()
warehouse.restore()
with description('solution'):
with it('scores the source as a LogicProblem'):
expect(logic_problem.LogicProblem.score(
msp2017_06_21_pride_parade.SOURCE.split('\n'))).to(equal(1))
with it('identifies puzzle type'):
problems = self.puzzle.problems()
expect(problems).to(have_len(1))
problem = problems[0]
expect(problem).to(be_a(logic_problem.LogicProblem))
with _it('models puzzle'):
model = logic_problem._model(msp2017_06_21_pride_parade.SOURCE.split('\n'))
print(str(model))
with it('exports a solution'):
problem = self.puzzle.problems()[0]
expect(problem.solution).to(look_like(
msp2017_06_21_pride_parade.SOLUTION))
solutions = problem.solutions()
if len(solutions) > 1:
for solution in solutions:
print(solution)
expect(solutions).to(have_len(1))
| mit | Python |
11bf9b0e286df595961d17b121d01237b69be85d | Use django.utils.six.iteritems in wagtail.utils.utils.deep_update. | rsalmaso/wagtail,mixxorz/wagtail,thenewguy/wagtail,nimasmi/wagtail,kaedroho/wagtail,nealtodd/wagtail,torchbox/wagtail,mixxorz/wagtail,mixxorz/wagtail,jnns/wagtail,timorieber/wagtail,torchbox/wagtail,zerolab/wagtail,kaedroho/wagtail,mikedingjan/wagtail,zerolab/wagtail,gasman/wagtail,kaedroho/wagtail,gasman/wagtail,thenewguy/wagtail,jnns/wagtail,rsalmaso/wagtail,kaedroho/wagtail,thenewguy/wagtail,zerolab/wagtail,thenewguy/wagtail,nimasmi/wagtail,rsalmaso/wagtail,zerolab/wagtail,jnns/wagtail,takeflight/wagtail,jnns/wagtail,FlipperPA/wagtail,torchbox/wagtail,mixxorz/wagtail,nimasmi/wagtail,nealtodd/wagtail,gasman/wagtail,thenewguy/wagtail,wagtail/wagtail,wagtail/wagtail,FlipperPA/wagtail,zerolab/wagtail,mikedingjan/wagtail,nimasmi/wagtail,kaedroho/wagtail,takeflight/wagtail,timorieber/wagtail,takeflight/wagtail,torchbox/wagtail,wagtail/wagtail,nealtodd/wagtail,mikedingjan/wagtail,takeflight/wagtail,gasman/wagtail,wagtail/wagtail,gasman/wagtail,FlipperPA/wagtail,timorieber/wagtail,mixxorz/wagtail,mikedingjan/wagtail,timorieber/wagtail,rsalmaso/wagtail,rsalmaso/wagtail,wagtail/wagtail,FlipperPA/wagtail,nealtodd/wagtail | wagtail/utils/utils.py | wagtail/utils/utils.py | from __future__ import absolute_import, unicode_literals
import collections
from django.utils import six
def deep_update(source, overrides):
"""Update a nested dictionary or similar mapping.
Modify ``source`` in place.
"""
items = six.iteritems(overrides)
for key, value in items:
if isinstance(value, collections.Mapping) and value:
returned = deep_update(source.get(key, {}), value)
source[key] = returned
else:
source[key] = overrides[key]
return source
| from __future__ import absolute_import, unicode_literals
import collections
import sys
def deep_update(source, overrides):
"""Update a nested dictionary or similar mapping.
Modify ``source`` in place.
"""
if sys.version_info >= (3, 0):
items = overrides.items()
else:
items = overrides.iteritems()
for key, value in items:
if isinstance(value, collections.Mapping) and value:
returned = deep_update(source.get(key, {}), value)
source[key] = returned
else:
source[key] = overrides[key]
return source
| bsd-3-clause | Python |
ffc6e870672784d3514631d99d5c68c6ddd8556b | bump version to 0.2.3 | Kaggle/learntools,Kaggle/learntools | learntools/__init__.py | learntools/__init__.py | from . import advanced_pandas, core, deep_learning, gans, machine_learning, python, ml_insights
__version__ = '0.2.3'
| from . import advanced_pandas, core, deep_learning, gans, machine_learning, python
__version__ = '0.2.2'
| apache-2.0 | Python |
3591a4422a34b718cd3400266ac6f92c8421e82a | Bump to version 0.1.3 | 3YOURMIND/django-migration-linter | django_migration_linter/constants.py | django_migration_linter/constants.py | # Copyright 2019 3YOURMIND GmbH
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from appdirs import user_cache_dir
__version__ = "0.1.3"
MIGRATION_FOLDER_NAME = "migrations"
DEFAULT_CACHE_PATH = user_cache_dir("django-migration-linter", version=__version__)
| # Copyright 2019 3YOURMIND GmbH
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from appdirs import user_cache_dir
__version__ = "0.1.2"
MIGRATION_FOLDER_NAME = "migrations"
DEFAULT_CACHE_PATH = user_cache_dir("django-migration-linter", version=__version__)
| apache-2.0 | Python |
243056d9f6f61ceb84fca2ea2f578f7a4d9eae68 | add @parse_with flask-restful utility | fusic-com/flask-todo,fusic-com/flask-todo,paulvisen/flask-todo,paulvisen/flask-todo,paulvisen/flask-todo | utils/flaskutils/restful.py | utils/flaskutils/restful.py | from functools import wraps
from flask import request
from flask.ext.restful import Api
from flask.ext.restful.reqparse import RequestParser
def patched_to_marshallable_type(obj):
"""adds __marshallable__ support; see https://github.com/twilio/flask-restful/pull/32"""
if obj is None:
return None # make it idempotent for None
if hasattr(obj, '__getitem__'):
return obj # it is indexable it is ok
if hasattr(obj, '__marshallable__'):
return obj.__marshallable__()
return dict(obj.__dict__)
class BetterErrorHandlingApi(Api):
# HACK: see https://github.com/twilio/flask-restful/issues/8
# and https://github.com/twilio/flask-restful/pull/29
def __init__(self, app, prefix='', default_mediatype='application/json',
decorators=None):
self.saved_handle_exception = app.handle_exception
self.saved_handle_user_exception = app.handle_user_exception
super(BetterErrorHandlingApi, self).__init__(app, prefix, default_mediatype, decorators)
app.handle_exception = self.handle_exception
app.handle_user_exception = self.handle_user_exception
self.endpoints = set()
def add_resource(self, resource, *urls, **kwargs):
endpoint = kwargs.setdefault('endpoint', resource.__name__.lower())
self.endpoints.add(endpoint)
return super(BetterErrorHandlingApi, self).add_resource(resource, *urls, **kwargs)
def handle_exception(self, e):
return self.handle_error(self.saved_handle_exception, e)
def handle_user_exception(self, e):
return self.handle_error(self.saved_handle_user_exception, e)
def handle_error(self, original, e):
rv = original(e) # call original error handler, so any side-effect causing handling (sentry, etc) will happen
if not request.url_rule or request.url_rule.endpoint not in self.endpoints:
return rv
return super(BetterErrorHandlingApi, self).handle_error(e)
def parse_with(*arguments, **kwargs):
"""This decorator allows you to easily augment any method (typically a
view method) to access reqparse based arguments, i.e.:
class Users(Resource):
@parse_with(Argument('profession'))
def post(self, params, username):
create_new_user(username, params.profession)
return 'CREATED', 201
api.add_resource(Users, '/<username>', endpoint='users')
"""
parser = kwargs.pop('parser', RequestParser())
if kwargs: # mimic py3k style named-arguments after *args, i.e., def f(a, *b, c=1)
raise TypeError("unexpected keyword argument '%s'" % (kwargs.popitem()[0],))
for argument in arguments:
parser.args.append(argument)
def decor(func):
@wraps(func)
def inner(self, *args, **kwargs):
return func(self, parser.parse_args(), *args, **kwargs)
return inner
return decor
| from flask import request
from flask.ext.restful import Api
def patched_to_marshallable_type(obj):
"""adds __marshallable__ support; see https://github.com/twilio/flask-restful/pull/32"""
if obj is None:
return None # make it idempotent for None
if hasattr(obj, '__getitem__'):
return obj # it is indexable it is ok
if hasattr(obj, '__marshallable__'):
return obj.__marshallable__()
return dict(obj.__dict__)
class BetterErrorHandlingApi(Api):
# HACK: see https://github.com/twilio/flask-restful/issues/8
# and https://github.com/twilio/flask-restful/pull/29
def __init__(self, app, prefix='', default_mediatype='application/json',
decorators=None):
self.saved_handle_exception = app.handle_exception
self.saved_handle_user_exception = app.handle_user_exception
super(BetterErrorHandlingApi, self).__init__(app, prefix, default_mediatype, decorators)
app.handle_exception = self.handle_exception
app.handle_user_exception = self.handle_user_exception
self.endpoints = set()
def add_resource(self, resource, *urls, **kwargs):
endpoint = kwargs.setdefault('endpoint', resource.__name__.lower())
self.endpoints.add(endpoint)
return super(BetterErrorHandlingApi, self).add_resource(resource, *urls, **kwargs)
def handle_exception(self, e):
return self.handle_error(self.saved_handle_exception, e)
def handle_user_exception(self, e):
return self.handle_error(self.saved_handle_user_exception, e)
def handle_error(self, original, e):
rv = original(e) # call original error handler, so any side-effect causing handling (sentry, etc) will happen
if not request.url_rule or request.url_rule.endpoint not in self.endpoints:
return rv
return super(BetterErrorHandlingApi, self).handle_error(e)
| mit | Python |
bce6bc91779fe35d5194d224508294387c417b1b | Complete common prefix bit sol | bowen0701/algorithms_data_structures | lc0201_bitwise_and_of_numbers_range.py | lc0201_bitwise_and_of_numbers_range.py | """Leetcode 201. Bitwise AND of Numbers Range
Medium
URL: https://leetcode.com/problems/bitwise-and-of-numbers-range/
Given a range [m, n] where 0 <= m <= n <= 2147483647,
return the bitwise AND of all numbers in this range, inclusive.
Example 1:
Input: [5,7]
Output: 4
Example 2:
Input: [0,1]
Output: 0
"""
class SolutionBruteForce(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
Time limit exceeded.
Time complexity: O(n-m).
Space complexity: O(1).
"""
# Edge case when m = 0.
if m == 0:
return 0
# Apply brute force method.
result = m
for i in range(m + 1, n + 1):
result &= i
return result
class SolutionCommonPrefixBit(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
Time complexity: O(logn).
Space complexity: O(1).
"""
# Edge case.
if m == 0:
return 0
# Find common prefix binary code by right-shifting m & n.
n_shifts = 0
while m != n:
m >>= 1
n >>= 1
n_shifts += 1
# Left-shift m by n_shifts digits.
return m <<= n_shifts
def main():
# Output: 4
m, n = 5, 7
print SolutionBruteForce().rangeBitwiseAnd(m, n)
print SolutionCommonPrefixBit().rangeBitwiseAnd(m, n)
# Output: 0
m, n = 0, 1
print SolutionBruteForce().rangeBitwiseAnd(m, n)
print SolutionCommonPrefixBit().rangeBitwiseAnd(m, n)
if __name__ == '__main__':
main()
| """Leetcode 201. Bitwise AND of Numbers Range
Medium
URL: https://leetcode.com/problems/bitwise-and-of-numbers-range/
Given a range [m, n] where 0 <= m <= n <= 2147483647,
return the bitwise AND of all numbers in this range, inclusive.
Example 1:
Input: [5,7]
Output: 4
Example 2:
Input: [0,1]
Output: 0
"""
class SolutionBruteForce(object):
def rangeBitwiseAnd(self, m, n):
"""
:type m: int
:type n: int
:rtype: int
Time limit exceeded.
Time complexity: O(n-m).
Space complexity: O(1).
"""
if m == 0:
return 0
result = m
for i in range(m + 1, n + 1):
result &= i
return result
def main():
# Output: 4
m, n = 5, 7
print SolutionBruteForce().rangeBitwiseAnd(m, n)
# Output: 0
m, n = 0, 1
print SolutionBruteForce().rangeBitwiseAnd(m, n)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
4ded6343e8c28a427af757e9dda2e6a10be40657 | enable custom envs for executed commands | dirk-thomas/vcstool,dirk-thomas/vcstool | vcstool/clients/vcs_base.py | vcstool/clients/vcs_base.py | import os
import subprocess
class VcsClientBase(object):
type = None
def __init__(self, path):
self.path = path
def __getattribute__(self, name):
if name == 'import':
try:
return self.import_
except AttributeError:
pass
return super(VcsClientBase, self).__getattribute__(name)
def _not_applicable(self, command):
return {
'cmd': '%s.%s(%s)' % (self.__class__.type, 'push', command.__class__.command),
'output': "Command '%s' not applicable for client '%s'" % (command.__class__.command, self.__class__.type),
'returncode': NotImplemented
}
def _run_command(self, cmd, env=None):
return run_command(cmd, os.path.abspath(self.path), env=env)
def _create_path(self):
if not os.path.exists(self.path):
try:
os.makedirs(self.path)
except os.error as e:
return {
'cmd': 'os.makedirs(%s)' % self.path,
'cwd': self.path,
'output': "Could not create directory '%s': %s" % (self.path, e),
'returncode': 1
}
return None
def find_executable(file_name):
for path in os.getenv('PATH').split(os.path.pathsep):
file_path = os.path.join(path, file_name)
if os.path.isfile(file_path):
return file_path
return None
def run_command(cmd, cwd, env=None):
result = {'cmd': ' '.join(cmd), 'cwd': cwd}
try:
proc = subprocess.Popen(cmd, cwd=cwd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env)
result['output'], _ = proc.communicate()
result['returncode'] = 0
except subprocess.CalledProcessError as e:
result['output'] = e.output
result['returncode'] = e.returncode
return result
| import os
import subprocess
class VcsClientBase(object):
type = None
def __init__(self, path):
self.path = path
def __getattribute__(self, name):
if name == 'import':
try:
return self.import_
except AttributeError:
pass
return super(VcsClientBase, self).__getattribute__(name)
def _not_applicable(self, command):
return {
'cmd': '%s.%s(%s)' % (self.__class__.type, 'push', command.__class__.command),
'output': "Command '%s' not applicable for client '%s'" % (command.__class__.command, self.__class__.type),
'returncode': NotImplemented
}
def _run_command(self, cmd):
return run_command(cmd, os.path.abspath(self.path))
def _create_path(self):
if not os.path.exists(self.path):
try:
os.makedirs(self.path)
except os.error as e:
return {
'cmd': 'os.makedirs(%s)' % self.path,
'cwd': self.path,
'output': "Could not create directory '%s': %s" % (self.path, e),
'returncode': 1
}
return None
def find_executable(file_name):
for path in os.getenv('PATH').split(os.path.pathsep):
file_path = os.path.join(path, file_name)
if os.path.isfile(file_path):
return file_path
return None
def run_command(cmd, cwd):
result = {'cmd': ' '.join(cmd), 'cwd': cwd}
try:
result['output'] = subprocess.check_output(cmd, cwd=cwd, stderr=subprocess.STDOUT).rstrip()
result['returncode'] = 0
except subprocess.CalledProcessError as e:
result['output'] = e.output
result['returncode'] = e.returncode
return result
| apache-2.0 | Python |
ef5305a23b953765cc3b55bdb764487e4b5b180d | Allow for specifying not using the random module (#32763) | saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt | salt/utils/pycrypto.py | salt/utils/pycrypto.py |
# -*- coding: utf-8 -*-
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
from __future__ import absolute_import
import re
import string
import random
# Import 3rd-party libs
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
try:
# Windows does not have the crypt module
import crypt
HAS_CRYPT = True
except ImportError:
HAS_CRYPT = False
# Import salt libs
from salt.exceptions import SaltInvocationError
def secure_password(length=20, use_random=True):
'''
Generate a secure password.
'''
length = int(length)
pw = ''
while len(pw) < length:
if HAS_RANDOM and use_random:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
else:
pw += random.SystemRandom().choice(string.ascii_letters + string.digits)
return pw
def gen_hash(crypt_salt=None, password=None, algorithm='sha512'):
'''
Generate /etc/shadow hash
'''
if not HAS_CRYPT:
raise SaltInvocationError('No crypt module for windows')
hash_algorithms = dict(
md5='$1$', blowfish='$2a$', sha256='$5$', sha512='$6$'
)
if algorithm not in hash_algorithms:
raise SaltInvocationError(
'Algorithm \'{0}\' is not supported'.format(algorithm)
)
if password is None:
password = secure_password()
if crypt_salt is None:
crypt_salt = secure_password(8)
crypt_salt = hash_algorithms[algorithm] + crypt_salt
return crypt.crypt(password, crypt_salt)
|
# -*- coding: utf-8 -*-
'''
Use pycrypto to generate random passwords on the fly.
'''
# Import python libraries
from __future__ import absolute_import
import re
import string
import random
# Import 3rd-party libs
try:
import Crypto.Random # pylint: disable=E0611
HAS_RANDOM = True
except ImportError:
HAS_RANDOM = False
try:
# Windows does not have the crypt module
import crypt
HAS_CRYPT = True
except ImportError:
HAS_CRYPT = False
# Import salt libs
from salt.exceptions import SaltInvocationError
def secure_password(length=20):
'''
Generate a secure password.
'''
length = int(length)
pw = ''
while len(pw) < length:
if HAS_RANDOM:
pw += re.sub(r'\W', '', Crypto.Random.get_random_bytes(1))
else:
pw += random.SystemRandom().choice(string.ascii_letters + string.digits)
return pw
def gen_hash(crypt_salt=None, password=None, algorithm='sha512'):
'''
Generate /etc/shadow hash
'''
if not HAS_CRYPT:
raise SaltInvocationError('No crypt module for windows')
hash_algorithms = dict(
md5='$1$', blowfish='$2a$', sha256='$5$', sha512='$6$'
)
if algorithm not in hash_algorithms:
raise SaltInvocationError(
'Algorithm \'{0}\' is not supported'.format(algorithm)
)
if password is None:
password = secure_password()
if crypt_salt is None:
crypt_salt = secure_password(8)
crypt_salt = hash_algorithms[algorithm] + crypt_salt
return crypt.crypt(password, crypt_salt)
| apache-2.0 | Python |
5e21a1f8f1c1543faabe65fc9b7272ce53bc4e3c | Update withings endpoints | python-social-auth/social-core,python-social-auth/social-core | social_core/backends/withings.py | social_core/backends/withings.py | from .oauth import BaseOAuth1
class WithingsOAuth(BaseOAuth1):
name = 'withings'
AUTHORIZATION_URL = 'https://developer.health.nokia.com/account/authorize'
REQUEST_TOKEN_URL = 'https://developer.health.nokia.com/account/request_token'
ACCESS_TOKEN_URL = 'https://developer.health.nokia.com/account/access_token'
ID_KEY = 'userid'
def get_user_details(self, response):
"""Return user details from Withings account"""
return {'userid': response['access_token']['userid'],
'email': ''}
| from .oauth import BaseOAuth1
class WithingsOAuth(BaseOAuth1):
name = 'withings'
AUTHORIZATION_URL = 'https://oauth.withings.com/account/authorize'
REQUEST_TOKEN_URL = 'https://oauth.withings.com/account/request_token'
ACCESS_TOKEN_URL = 'https://oauth.withings.com/account/access_token'
ID_KEY = 'userid'
def get_user_details(self, response):
"""Return user details from Withings account"""
return {'userid': response['access_token']['userid'],
'email': ''}
| bsd-3-clause | Python |
d5b09f5beb5162fcb7d9751abfe699da53009351 | rewrite plot2rst example. | matteoicardi/mpltools,tonysyu/mpltools | doc/examples/sphinx/plot_plot2rst.py | doc/examples/sphinx/plot_plot2rst.py | #!/usr/bin/env python
"""
====================
`plot2rst` extension
====================
`plot2rst` is a sphinx extension that converts a normal python file into
reStructuredText. All strings in the python file are converted into regular
reStructuredText, while all python code is converted into code blocks.
This extension is named `plot2rst` because the conversion also intelligently
handles plots. In particular, you can write a block of code that creates
a plot, and then follow that up with a discussion that has the plot added
inline. To do so, you just need to add a call to the Sphinx image directive and
set the image link to a special tag::
.. image:: PLOT2RST. current_figure
**Note** that there shouldn't be a space after the period in a real call---it's
added here to prevent `plot2rst` from replacing the tag with an image path.
All the code that runs before this call will be executed, the current figure
will be saved, and the tag will be replaced with the path to that figure.
So here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
plt.figure(figsize=(4, 3))
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
"""
This plot can be displayed inline with a call the ``current_figure`` tag:
.. image:: PLOT2RST.current_figure
And here's a second plot
"""
plt.figure(figsize=(3, 3))
plt.imshow(np.random.random(size=(20, 20)))
"""
which gets displayed as:
.. image:: PLOT2RST.current_figure
You can also add to plots created in previous code blocks:
"""
x = np.linspace(0, 19)
plt.plot(x, 5 * np.sin(x) + 10, alpha=0.5, lw=5)
plt.margins(0)
"""
.. image:: PLOT2RST.current_figure
There's some subtle differences between strings and comments which I'll
demonstrate below. (Some of this only makes sense if you look at the raw python
file.)
# Comments in text blocks remain nested in the text.
"""
def dummy():
"""Dummy function to make sure docstrings don't get rendered as text"""
pass
# Code comments are not strings and are left in code blocks.
"Any string that's not saved to a variable is converted to text"
string = """
Triple-quoted string which tries to break parser but doesn't.
"""
"""
Finally, I'll call ``show`` at the end just so someone running the python code
directly will see the plots; this is not necessary for creating the docs.
"""
plt.show()
| #!/usr/bin/env python
"""
================
Tutorial example
================
Here's a line plot:
"""
import numpy as np
import matplotlib.pyplot as plt
'normal string'
x = np.linspace(0, 2*np.pi)
plt.plot(x, np.sin(x))
def dummy():
"""Dummy docstring"""
pass
"""
.. image:: PLOT2RST.current_figure
Here's an image plot:
"""
# code comment
plt.figure()
plt.imshow(np.random.random(size=(20, 20)))
"""
.. image:: PLOT2RST.current_figure
# docstring comment
"""
string = """
Triple-quoted string which tries to break parser.
"""
plt.show()
| bsd-3-clause | Python |
ebd9ae7f0ed83a328555a330b6565343454d8e4f | Bump to final version 0.7.0 | learningequality/ricecooker,learningequality/ricecooker,learningequality/ricecooker,learningequality/ricecooker | ricecooker/__init__.py | ricecooker/__init__.py | # -*- coding: utf-8 -*-
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = "0.7.0"
import sys
if sys.version_info < (3, 6, 0):
raise RuntimeError("Ricecooker only supports Python 3.6+")
| # -*- coding: utf-8 -*-
__author__ = "Learning Equality"
__email__ = "info@learningequality.org"
__version__ = "0.7.0b6"
import sys
if sys.version_info < (3, 6, 0):
raise RuntimeError("Ricecooker only supports Python 3.6+")
| mit | Python |
16d6dd0ba2b5218d211c25e3e197d65fe163b09a | Fix broken Helsinki OIDC provider links | City-of-Helsinki/django-helusers,City-of-Helsinki/django-helusers | helusers/providers/helsinki_oidc/views.py | helusers/providers/helsinki_oidc/views.py | import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
access_token_url = 'https://api.hel.fi/sso/openid/token/'
authorize_url = 'https://api.hel.fi/sso/openid/authorize/'
profile_url = 'https://api.hel.fi/sso/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
| import requests
from allauth.socialaccount.providers.oauth2.views import (
OAuth2Adapter, OAuth2LoginView, OAuth2CallbackView
)
from .provider import HelsinkiOIDCProvider
class HelsinkiOIDCOAuth2Adapter(OAuth2Adapter):
provider_id = HelsinkiOIDCProvider.id
access_token_url = 'https://api.hel.fi/sso-test/openid/token/'
authorize_url = 'https://api.hel.fi/sso-test/openid/authorize/'
profile_url = 'https://api.hel.fi/sso-test/openid/userinfo/'
def complete_login(self, request, app, token, **kwargs):
headers = {'Authorization': 'Bearer {0}'.format(token.token)}
resp = requests.get(self.profile_url, headers=headers)
assert resp.status_code == 200
extra_data = resp.json()
return self.get_provider().sociallogin_from_response(request,
extra_data)
oauth2_login = OAuth2LoginView.adapter_view(HelsinkiOIDCOAuth2Adapter)
oauth2_callback = OAuth2CallbackView.adapter_view(HelsinkiOIDCOAuth2Adapter)
| bsd-2-clause | Python |
84db360ec3542daa63c93011215c341ba047ed62 | remove answers extension line | GEM-benchmark/NL-Augmenter | transformations/redundant_context_for_qa/transformation.py | transformations/redundant_context_for_qa/transformation.py | from typing import Tuple, List
from interfaces.QuestionAnswerOperation import QuestionAnswerOperation
from tasks.TaskTypes import TaskType
"""
Simple perturbation to demonstrate a question answering perturbation. This perturbation repeats the context blindly
and expects the answers still to be the same. Note that this perturbation might not apply for event related tasks.
"""
class RedundantContextForQa(QuestionAnswerOperation):
tasks = [TaskType.QUESTION_ANSWERING, TaskType.QUESTION_GENERATION]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1):
super().__init__(seed, max_outputs=max_outputs)
def generate(
self, context: str, question: str, answers: [str]
) -> List[Tuple[str, str, List[str]]]:
context = context.rstrip() + " " + context.lstrip()
return [(context, question, answers)]
class QuestionInCaps(QuestionAnswerOperation):
tasks = [TaskType.QUESTION_ANSWERING, TaskType.QUESTION_GENERATION]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1):
super().__init__(seed, max_outputs=max_outputs)
def generate(
self, context: str, question: str, answers: [str]
) -> List[Tuple[str, str, List[str]]]:
return [(context, question.upper(), answers)]
"""
# Sample code to demonstrate adding test cases.
if __name__ == '__main__':
import json
from TestRunner import convert_to_snake_case
tf = RedundantContextForQa()
test_cases = []
context = "Steam engines are external combustion engines, where the working fluid is separate from the combustion products. " \
"Non-combustion heat sources such as solar power, nuclear power or geothermal energy may be used."
question = "Along with geothermal and nuclear, what is a notable non-combustion heat source?"
answers = [
"solar",
"solar power",
"solar power, nuclear power or geothermal energy"
]
perturbs = tf.generate(context, question, answers)
test_cases.append({
"class": tf.name(),
"inputs": {"context": context, "question": question, "answers": answers},
"outputs": []}
)
for p_context, p_question, p_answers in perturbs:
test_cases[0]["outputs"].append({"context": p_context, "question": p_question, "answers": p_answers})
json_file = {"type": convert_to_snake_case(tf.name()), "test_cases": test_cases}
print(json.dumps(json_file))
"""
| from typing import Tuple, List
from interfaces.QuestionAnswerOperation import QuestionAnswerOperation
from tasks.TaskTypes import TaskType
"""
Simple perturbation to demonstrate a question answering perturbation. This perturbation repeats the context blindly
and expects the answers still to be the same. Note that this perturbation might not apply for event related tasks.
"""
class RedundantContextForQa(QuestionAnswerOperation):
tasks = [TaskType.QUESTION_ANSWERING, TaskType.QUESTION_GENERATION]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1):
super().__init__(seed, max_outputs=max_outputs)
def generate(
self, context: str, question: str, answers: [str]
) -> List[Tuple[str, str, List[str]]]:
context = context.rstrip() + " " + context.lstrip()
return [(context, question, answers)]
class QuestionInCaps(QuestionAnswerOperation):
tasks = [TaskType.QUESTION_ANSWERING, TaskType.QUESTION_GENERATION]
languages = ["en"]
def __init__(self, seed=0, max_outputs=1):
super().__init__(seed, max_outputs=max_outputs)
def generate(
self, context: str, question: str, answers: [str]
) -> List[Tuple[str, str, List[str]]]:
answers.extend([answer.upper() for answer in answers])
return [(context, question.upper(), answers)]
"""
# Sample code to demonstrate adding test cases.
if __name__ == '__main__':
import json
from TestRunner import convert_to_snake_case
tf = RedundantContextForQa()
test_cases = []
context = "Steam engines are external combustion engines, where the working fluid is separate from the combustion products. " \
"Non-combustion heat sources such as solar power, nuclear power or geothermal energy may be used."
question = "Along with geothermal and nuclear, what is a notable non-combustion heat source?"
answers = [
"solar",
"solar power",
"solar power, nuclear power or geothermal energy"
]
perturbs = tf.generate(context, question, answers)
test_cases.append({
"class": tf.name(),
"inputs": {"context": context, "question": question, "answers": answers},
"outputs": []}
)
for p_context, p_question, p_answers in perturbs:
test_cases[0]["outputs"].append({"context": p_context, "question": p_question, "answers": p_answers})
json_file = {"type": convert_to_snake_case(tf.name()), "test_cases": test_cases}
print(json.dumps(json_file))
"""
| mit | Python |
efe8c878d2bf2d31c67427bbc040f58d142458e3 | Use popen method to add user vmmaster and fix copying of vmmaster/home files | sh0ked/vmmaster,2gis/vmmaster,2gis/vmmaster,sh0ked/vmmaster,2gis/vmmaster | vmmaster/core/utils/init.py | vmmaster/core/utils/init.py | import subprocess
import crypt
import os
from os.path import expanduser
from .print_utils import cin, cout, OKGREEN, WARNING, FAIL
from vmmaster import package_dir
from .system_utils import run_command
from .utils import change_user_vmmaster
def files(path):
for path, subdirs, filenames in os.walk(path):
for filename in filenames:
yield os.path.join(path, filename)
def useradd(home):
password = 'vmmaster'
encrypted_password = crypt.crypt(password, "22")
shell = '/bin/bash'
group = 'libvirtd'
user_add = subprocess.Popen(
["sudo", "useradd",
"--create-home", "--home-dir=%s" % home,
"--groups=%s" % group,
"--shell=%s" % shell,
"-p", encrypted_password,
"vmmaster"], stdin=subprocess.PIPE
)
output, err = user_add.communicate()
if err:
cout(repr(err), color=FAIL)
exit(1)
def copy_files_to_home(home):
copy = ["/bin/cp", "-r", package_dir() + "home" + os.sep + ".", home]
return_code, output = run_command(copy)
change_user_vmmaster()
if return_code != 0:
cout("\nFailed to copy files to home dir: %s\n" % home_dir(), color=FAIL)
exit(output)
def home_dir():
user_path = "~%s" % "vmmaster"
home = expanduser(user_path)
if user_path == home:
return None
return home
def init():
home = '/var/lib/vmmaster'
cout("Please input absolute path to home directory for 'vmmaster'\n")
cout("[default:%s]:" % home, color=WARNING)
abspath = cin()
abspath = abspath.strip()
if abspath:
home = abspath
useradd(home)
copy_files_to_home(home)
cout("\nvmmaster successfully inited in %s\n" % home_dir(), color=OKGREEN)
| import subprocess
import crypt
import os
from os.path import expanduser
from .print_utils import cin, cout, OKGREEN, WARNING, FAIL
from vmmaster import package_dir
from .system_utils import run_command
from .utils import change_user_vmmaster
def files(path):
for path, subdirs, filenames in os.walk(path):
for filename in filenames:
yield os.path.join(path, filename)
def useradd(home):
password = 'vmmaster'
encrypted_password = crypt.crypt(password, "22")
shell = '/bin/bash'
group = 'libvirtd'
subprocess.call(
["sudo", "/usr/sbin/useradd",
"--create-home", "--home-dir=%s" % home,
"--groups=%s" % group,
"--shell=%s" % shell,
"-p", encrypted_password,
"vmmaster"]
)
def copy_files_to_home(home):
copy = ["/bin/cp", "-r", package_dir() + "home" + os.sep + ".", home]
change_user_vmmaster()
return_code, output = run_command(copy)
if return_code != 0:
cout("\nFailed to copy files to home dir: %s\n" % home_dir(), color=FAIL)
exit(output)
def home_dir():
user_path = "~%s" % "vmmaster"
home = expanduser(user_path)
if user_path == home:
return None
return home
def init():
home = '/var/lib/vmmaster'
cout("Please input absolute path to home directory for 'vmmaster'\n")
cout("[default:%s]:" % home, color=WARNING)
abspath = cin()
abspath = abspath.strip()
if abspath:
home = abspath
useradd(home)
copy_files_to_home(home)
cout("\nvmmaster successfully inited in %s\n" % home_dir(), color=OKGREEN)
| mit | Python |
bbbeeb0099138730746ce539174d806ab172351f | remove wsgi service | vmthunder/virtman | vmthunder/cmd/vmthunderd.py | vmthunder/cmd/vmthunderd.py | #!/usr/bin/env python
import sys
import threading
import time
from oslo.config import cfg
from vmthunder import compute
from vmthunder.openstack.common import log as logging
#TODO: Auto determine host ip if not filled in conf file
host_opts = [
cfg.StrOpt('host_ip',
default='10.107.14.170',
help='localhost ip provide VMThunder service'),
cfg.StrOpt('host_port',
default='8001',
help='localhost port to provide VMThunder service'),
cfg.IntOpt('heartbeat_interval',
default=20,
help='localhost heartbeat interval'),
]
CONF = cfg.CONF
CONF.register_opts(host_opts)
def start():
cn = compute.Compute()
class HeartBeater(threading.Thread):
def __init__(self, thread_name):
super(HeartBeater, self).__init__(name=thread_name)
def run(self):
def clock():
LOG = logging.getLogger(__name__)
LOG.debug("At %s heartbeat once" % time.asctime())
cn.heartbeat()
time.sleep(CONF.heartbeat_interval)
#TODO: the max depth of recursion
clock()
clock()
heartbeat = HeartBeater('heartbeat')
heartbeat.start()
#TODO:!!!
server = wsgi.Server('vmthunder-api', path='/root/packages/VMThunder/etc/vmthunder/api-paste.ini') #or path = ${a specified path} like '/root/VMThunder/etc/api-paste.ini'
server.start()
server.wait()
if __name__ == '__main__':
CONF(sys.argv[1:], project='vmthunder',
default_config_files = ['/root/packages/VMThunder/etc/vmthunder/vmthunder.conf'])
logging.setup('vmthunder')
start()
| #!/usr/bin/env python
import sys
import threading
import time
from oslo.config import cfg
from vmthunder import compute
from vmthunder.common import wsgi
from vmthunder.openstack.common import log as logging
#TODO: Auto determine host ip if not filled in conf file
host_opts = [
cfg.StrOpt('host_ip',
default='10.107.14.170',
help='localhost ip provide VMThunder service'),
cfg.StrOpt('host_port',
default='8001',
help='localhost port to provide VMThunder service'),
cfg.IntOpt('heartbeat_interval',
default=20,
help='localhost heartbeat interval'),
]
CONF = cfg.CONF
CONF.register_opts(host_opts)
def start():
cn = compute.Compute()
class HeartBeater(threading.Thread):
def __init__(self, thread_name):
super(HeartBeater, self).__init__(name=thread_name)
def run(self):
def clock():
LOG = logging.getLogger(__name__)
LOG.debug("At %s heartbeat once" % time.asctime())
cn.heartbeat()
time.sleep(CONF.heartbeat_interval)
#TODO: the max depth of recursion
clock()
clock()
heartbeat = HeartBeater('heartbeat')
heartbeat.start()
#TODO:!!!
server = wsgi.Server('vmthunder-api', path='/root/packages/VMThunder/etc/vmthunder/api-paste.ini') #or path = ${a specified path} like '/root/VMThunder/etc/api-paste.ini'
server.start()
server.wait()
if __name__ == '__main__':
CONF(sys.argv[1:], project='vmthunder',
default_config_files = ['/root/packages/VMThunder/etc/vmthunder/vmthunder.conf'])
logging.setup('vmthunder')
start()
| apache-2.0 | Python |
cde2ef098ee5eb444c16ab96aa00d5ef6390d936 | rename test case | yngcan/patentprocessor,yngcan/patentprocessor,nikken1/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,nikken1/patentprocessor,funginstitute/patentprocessor,funginstitute/patentprocessor,yngcan/patentprocessor | lib/new_xml_parsing/test_xml_driver.py | lib/new_xml_parsing/test_xml_driver.py | #!/usr/bin/env python
import os
import re
import unittest
from xml_driver import XMLElement, XMLHandler
from xml.sax import make_parser, handler
# Directory of test files
xml_files = [x for x in os.listdir('test_xml_files')
if re.match(r"2012_\d.xml", x) != None] # Match fixtures
parsed_xml = []
for xf in xml_files:
parser = make_parser()
xmlhandler = XMLHandler()
parser.setContentHandler(xmlhandler)
parser.setFeature(handler.feature_external_ges, False)
parser.parse('test_xml_files/'+xf)
parsed_xml.append(xmlhandler.root)
class Test_XMLElement(unittest.TestCase):
def setUp(self):
# sanity check
self.assertTrue(xml_files)
def test_basic_xml_tag_counts(self):
xmlhandler = XMLHandler()
parser.setContentHandler(xmlhandler)
parser.setFeature(handler.feature_external_ges, False)
parser.parse('test_xml_files/basic.xml')
self.assertTrue(xmlhandler.root)
root = xmlhandler.root
self.assertTrue(len(root.a.b) == 2)
unittest.main()
| #!/usr/bin/env python
import os
import re
import unittest
from xml_driver import XMLElement, XMLHandler
from xml.sax import make_parser, handler
# Directory of test files
xml_files = [x for x in os.listdir('test_xml_files')
if re.match(r"2012_\d.xml", x) != None] # Match fixtures
parsed_xml = []
for xf in xml_files:
parser = make_parser()
xmlhandler = XMLHandler()
parser.setContentHandler(xmlhandler)
parser.setFeature(handler.feature_external_ges, False)
parser.parse('test_xml_files/'+xf)
parsed_xml.append(xmlhandler.root)
class Test_XMLElement(unittest.TestCase):
def setUp(self):
# sanity check
self.assertTrue(xml_files)
def test_basic_xml(self):
xmlhandler = XMLHandler()
parser.setContentHandler(xmlhandler)
parser.setFeature(handler.feature_external_ges, False)
parser.parse('test_xml_files/basic.xml')
self.assertTrue(xmlhandler.root)
root = xmlhandler.root
self.assertTrue(len(root.a.b) == 2)
unittest.main()
| bsd-2-clause | Python |
5dc9f2f376b5ac918c1872e1270a782a9ef45ac9 | Make sure that auto-detected task only has one sub-task | CKrawczyk/python-reducers-for-caesar | panoptes_aggregation/extractors/workflow_extractor_config.py | panoptes_aggregation/extractors/workflow_extractor_config.py | def workflow_extractor_config(tasks):
extractor_config = {}
for task_key, task in tasks.items():
if task['type'] == 'drawing':
tools_config = {}
for tdx, tool in enumerate(task['tools']):
if ((tool['type'] == 'polygon') and
(len(tool['details']) == 1) and
(tool['details'][0]['type'] == 'text')):
# this is very ugly but I can't think of a better way to auto detect this
tools_config.setdefault('poly_line_text_extractor'.format(tool['type']), []).append(tdx)
else:
tools_config.setdefault('{0}_extractor'.format(tool['type']), []).append(tdx)
extractor_config[task_key] = tools_config
elif task['type'] in ['single', 'multiple']:
extractor_config[task_key] = 'question_extractor'
elif task['type'] == 'survey':
extractor_config[task_key] = 'survey_extractor'
return extractor_config
| def workflow_extractor_config(tasks):
extractor_config = {}
for task_key, task in tasks.items():
if task['type'] == 'drawing':
tools_config = {}
for tdx, tool in enumerate(task['tools']):
if ((tool['type'] == 'polygon') and
(len(tool['details']) > 0) and
(tool['details'][0]['type'] == 'text')):
# this is very ugly but I can't think of a better way to auto detect this
tools_config.setdefault('poly_line_text_extractor'.format(tool['type']), []).append(tdx)
else:
tools_config.setdefault('{0}_extractor'.format(tool['type']), []).append(tdx)
extractor_config[task_key] = tools_config
elif task['type'] in ['single', 'multiple']:
extractor_config[task_key] = 'question_extractor'
elif task['type'] == 'survey':
extractor_config[task_key] = 'survey_extractor'
return extractor_config
| apache-2.0 | Python |
401db4ee8c67d065b4383e36d7921f6614e4e2c4 | fix tabs blowing up unit tests that use a test client.. we agree not to have perfect tabs during testing | caktus/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,dimagi/rapidsms,lsgunth/rapidsms,eHealthAfrica/rapidsms,eHealthAfrica/rapidsms,peterayeni/rapidsms,unicefuganda/edtrac,unicefuganda/edtrac,catalpainternational/rapidsms,catalpainternational/rapidsms,dimagi/rapidsms,ken-muturi/rapidsms,unicefuganda/edtrac,catalpainternational/rapidsms,caktus/rapidsms,lsgunth/rapidsms,lsgunth/rapidsms,caktus/rapidsms,catalpainternational/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,ken-muturi/rapidsms,peterayeni/rapidsms,ehealthafrica-ci/rapidsms,ehealthafrica-ci/rapidsms,ehealthafrica-ci/rapidsms,ken-muturi/rapidsms | lib/rapidsms/templatetags/tabs_tags.py | lib/rapidsms/templatetags/tabs_tags.py | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import types
import threading
from functools import wraps
from django import template
from django.conf import settings
from django.core.urlresolvers import get_resolver, reverse, RegexURLPattern
from django.utils.importlib import import_module
from django.template import Variable
register = template.Library()
class Tab(object):
def __init__(self, view, caption=None):
self._caption = caption
self._view = view
def _auto_caption(self):
func_name = self._view.split('.')[-1] # my_view
return func_name.replace("_", " ").title() # My View
@property
def url(self):
"""
Return the URL of this tab.
Warning: If this tab's view function cannot be reversed, Django
will silently ignore the exception, and return the value of the
TEMPLATE_STRING_IF_INVALID setting.
"""
return reverse(self._view)
@property
def caption(self):
return self._caption or self._auto_caption()
# adapted from ubernostrum's django-template-utils. it didn't seem
# substantial enough to add a dependency, so i've just pasted it.
class TabsNode(template.Node):
def __init__(self, tabs, varname):
self.tabs = tabs
self.varname = varname
def render(self, context):
# try to find a request variable, but don't blow up entirely if we don't find it
# (this no blow up property is mostly used during testing)
try:
request = Variable("request").resolve(context)
except Exception as e:
return ""
for tab in self.tabs:
tab.is_active = tab.url == request.get_full_path()
context[self.varname] = self.tabs
return ""
@register.tag
def get_tabs(parser, token):
"""
Retrive a list of the tabs for this project, and store them in a
named context variable. Returns nothing, via `ContextUpdatingNode`.
Syntax::
{% get_tabs as [varname] %}
Example::
{% get_tabs as tabs %}
"""
args = token.contents.split()
tag_name = args.pop(0)
if len(args) != 2:
raise template.TemplateSyntaxError(
"The {%% %s %%} tag requires two arguments" % (tag_name))
if args[0] != "as":
raise template.TemplateSyntaxError(
'The second argument to the {%% %s %%} tag must be "as"' %
(tag_name))
tabs = [Tab(view, caption) for view, caption in settings.RAPIDSMS_TABS]
return TabsNode(tabs, str(args[1]))
| #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import types
import threading
from functools import wraps
from django import template
from django.conf import settings
from django.core.urlresolvers import get_resolver, reverse, RegexURLPattern
from django.utils.importlib import import_module
from django.template import Variable
register = template.Library()
class Tab(object):
def __init__(self, view, caption=None):
self._caption = caption
self._view = view
def _auto_caption(self):
func_name = self._view.split('.')[-1] # my_view
return func_name.replace("_", " ").title() # My View
@property
def url(self):
"""
Return the URL of this tab.
Warning: If this tab's view function cannot be reversed, Django
will silently ignore the exception, and return the value of the
TEMPLATE_STRING_IF_INVALID setting.
"""
return reverse(self._view)
@property
def caption(self):
return self._caption or self._auto_caption()
# adapted from ubernostrum's django-template-utils. it didn't seem
# substantial enough to add a dependency, so i've just pasted it.
class TabsNode(template.Node):
def __init__(self, tabs, varname):
self.tabs = tabs
self.varname = varname
def render(self, context):
request = Variable("request").resolve(context)
for tab in self.tabs:
tab.is_active = tab.url == request.get_full_path()
context[self.varname] = self.tabs
return ""
@register.tag
def get_tabs(parser, token):
"""
Retrive a list of the tabs for this project, and store them in a
named context variable. Returns nothing, via `ContextUpdatingNode`.
Syntax::
{% get_tabs as [varname] %}
Example::
{% get_tabs as tabs %}
"""
args = token.contents.split()
tag_name = args.pop(0)
if len(args) != 2:
raise template.TemplateSyntaxError(
"The {%% %s %%} tag requires two arguments" % (tag_name))
if args[0] != "as":
raise template.TemplateSyntaxError(
'The second argument to the {%% %s %%} tag must be "as"' %
(tag_name))
tabs = [Tab(view, caption) for view, caption in settings.RAPIDSMS_TABS]
return TabsNode(tabs, str(args[1]))
| bsd-3-clause | Python |
78dea51cb04d6c5bd20dacd1eacae6d9e270dfb6 | allow create registration for Event Manager | it-projects-llc/website-addons,it-projects-llc/website-addons,it-projects-llc/website-addons | website_event_attendee_signup/models/event_registration.py | website_event_attendee_signup/models/event_registration.py | # -*- coding: utf-8 -*-
from odoo import api, models
class EventRegistration(models.Model):
_inherit = "event.registration"
@api.model
def create(self, vals):
res = super(EventRegistration, self).create(vals)
if res.event_id.attendee_signup and res.attendee_partner_id:
login = res.attendee_partner_id.email
user = self.env['res.users']\
.search([('login', '=ilike', login)])
if not user:
user = self.env['res.users']\
.sudo()\
._signup_create_user({
'login': login,
'partner_id': res.attendee_partner_id.id,
})
user.partner_id.signup_prepare()
return res
| # -*- coding: utf-8 -*-
from odoo import api, models
class EventRegistration(models.Model):
_inherit = "event.registration"
@api.model
def create(self, vals):
res = super(EventRegistration, self).create(vals)
if res.event_id.attendee_signup and res.attendee_partner_id:
login = res.attendee_partner_id.email
user = self.env['res.users']\
.search([('login', '=ilike', login)])
if not user:
user = self.env['res.users']\
._signup_create_user({
'login': login,
'partner_id': res.attendee_partner_id.id,
})
user.partner_id.signup_prepare()
return res
| mit | Python |
9a7c51054f52ce845408b99680e8b169a38e5089 | handle struct columns with NA elements | cpcloud/ibis,ibis-project/ibis,cpcloud/ibis,ibis-project/ibis,ibis-project/ibis,cpcloud/ibis,ibis-project/ibis,cpcloud/ibis | ibis/backends/pandas/execution/structs.py | ibis/backends/pandas/execution/structs.py | """Pandas backend execution of struct fields and literals."""
import collections
import functools
import pandas as pd
from pandas.core.groupby import SeriesGroupBy
import ibis.expr.operations as ops
from ibis.backends.pandas.dispatch import execute_node
@execute_node.register(ops.StructField, collections.abc.Mapping)
def execute_node_struct_field_dict(op, data, **kwargs):
return data[op.field]
@execute_node.register(ops.StructField, type(None))
def execute_node_struct_field_none(op, data, **kwargs):
return None
@execute_node.register(ops.StructField, pd.Series)
def execute_node_struct_field_series(op, data, **kwargs):
field = op.field
return data.map(functools.partial(_safe_getter, field=field)).rename(field)
def _safe_getter(value, field: str):
try:
return value[field]
except TypeError:
return value
@execute_node.register(ops.StructField, SeriesGroupBy)
def execute_node_struct_field_series_group_by(op, data, **kwargs):
field = op.field
return (
data.obj.map(functools.partial(_safe_getter, field=field))
.rename(field)
.groupby(data.grouper.groupings)
)
| """Pandas backend execution of struct fields and literals."""
import collections
import operator
import pandas as pd
from pandas.core.groupby import SeriesGroupBy
import ibis.expr.operations as ops
from ibis.backends.pandas.dispatch import execute_node
@execute_node.register(ops.StructField, collections.abc.Mapping)
def execute_node_struct_field_dict(op, data, **kwargs):
return data[op.field]
@execute_node.register(ops.StructField, pd.Series)
def execute_node_struct_field_series(op, data, **kwargs):
field = op.field
return data.map(operator.itemgetter(field)).rename(field)
@execute_node.register(ops.StructField, SeriesGroupBy)
def execute_node_struct_field_series_group_by(op, data, **kwargs):
field = op.field
return (
data.obj.map(operator.itemgetter(field))
.rename(field)
.groupby(data.grouper.groupings)
)
| apache-2.0 | Python |
f7cc06046786d6345cbaa7712eab5038f8fbe9f6 | Remove debug prints | waltermoreira/dockeransible,waltermoreira/dockeransible | app_builder/app_builder_image/concat_roles.py | app_builder/app_builder_image/concat_roles.py | import glob
import os
import shutil
import subprocess
import yaml
def create_role(role):
ret = subprocess.check_output(
'ansible-galaxy init {}'.format(role).split())
if not ret.strip().endswith('created successfully'):
raise Exception('could not create role "{}"'.format(role))
def get_metadata(role):
main = open(os.path.join(role, 'meta/main.yml'))
return yaml.load(main)
def set_metadata(role, metadata):
new_main = os.path.join(role, 'meta/main.yml.new')
orig_main = os.path.join(role, 'meta/main.yml')
with open(new_main, 'w') as out:
yaml.dump(metadata, out, default_flow_style=False, explicit_start=True)
os.rename(new_main, orig_main)
def add_dependency(src_role, target_role):
"""Add metadata saying that 'target_role' depends on 'src_role'"""
md = get_metadata(target_role)
deps = md.setdefault('dependencies', [])
deps.append(os.path.join(target_role, 'roles', src_role))
set_metadata(target_role, md)
def sub_roles(role):
try:
return glob.glob(os.path.join(role, 'roles/*'))
except OSError:
return []
def fix_dependency(role, for_destination):
metadata = get_metadata(role)
deps = metadata.setdefault('dependencies', [])
def it():
for dep in deps:
yield os.path.join(for_destination, 'roles', dep)
metadata['dependencies'] = list(it())
set_metadata(role, metadata)
def fix_dependencies(src_role, for_destination):
for role in sub_roles(src_role):
fix_dependencies(role, for_destination)
fix_dependency(src_role, for_destination)
def move(src_role, target_role, copy=False):
op = shutil.copytree if copy else shutil.move
try:
os.makedirs(os.path.join(target_role, 'roles'))
except OSError:
pass
fix_dependencies(src_role, for_destination=target_role)
op(src_role, os.path.join(target_role, 'roles', src_role))
add_dependency(src_role, target_role)
def concat(role1, role2, into, copy=False):
create_role(into)
move(role1, target_role=into, copy=copy)
move(role2, target_role=into, copy=copy)
| import glob
import os
import shutil
import subprocess
import yaml
def create_role(role):
ret = subprocess.check_output(
'ansible-galaxy init {}'.format(role).split())
if not ret.strip().endswith('created successfully'):
raise Exception('could not create role "{}"'.format(role))
def get_metadata(role):
main = open(os.path.join(role, 'meta/main.yml'))
return yaml.load(main)
def set_metadata(role, metadata):
new_main = os.path.join(role, 'meta/main.yml.new')
orig_main = os.path.join(role, 'meta/main.yml')
with open(new_main, 'w') as out:
yaml.dump(metadata, out, default_flow_style=False, explicit_start=True)
os.rename(new_main, orig_main)
def add_dependency(src_role, target_role):
"""Add metadata saying that 'target_role' depends on 'src_role'"""
md = get_metadata(target_role)
deps = md.setdefault('dependencies', [])
deps.append(os.path.join(target_role, 'roles', src_role))
set_metadata(target_role, md)
def sub_roles(role):
try:
return glob.glob(os.path.join(role, 'roles/*'))
except OSError:
return []
def fix_dependency(role, for_destination):
metadata = get_metadata(role)
deps = metadata.setdefault('dependencies', [])
def it():
for dep in deps:
print('dep: {}'.format(dep))
print('role: {}'.format(role))
print(' dep.endswith(role)?: {}'.format(dep.endswith(role)))
yield os.path.join(for_destination, 'roles', dep)
metadata['dependencies'] = list(it())
set_metadata(role, metadata)
def fix_dependencies(src_role, for_destination):
for role in sub_roles(src_role):
fix_dependencies(role, for_destination)
fix_dependency(src_role, for_destination)
def move(src_role, target_role, copy=False):
op = shutil.copytree if copy else shutil.move
try:
os.makedirs(os.path.join(target_role, 'roles'))
except OSError:
pass
fix_dependencies(src_role, for_destination=target_role)
op(src_role, os.path.join(target_role, 'roles', src_role))
add_dependency(src_role, target_role)
def concat(role1, role2, into, copy=False):
create_role(into)
move(role1, target_role=into, copy=copy)
move(role2, target_role=into, copy=copy)
| mit | Python |
ec1698c9b9d4d6fe417d80b94ef2c5c88b036de2 | bump version to 0.2.1 for adding an RST readme | kratsg/optimization,kratsg/optimization,kratsg/optimization | root_optimize/__init__.py | root_optimize/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-,
from __future__ import absolute_import
from __future__ import print_function
__version__ = '0.2.1'
__all__ = ['json_encoder',
'utils']
| #!/usr/bin/env python
# -*- coding: utf-8 -*-,
from __future__ import absolute_import
from __future__ import print_function
__version__ = '0.2.0'
__all__ = ['json_encoder',
'utils']
| mit | Python |
e743f8003e5c876b0d9c71c201c1058b5bc02340 | update plotting example for new colorbar API | jdreaver/vispy,Eric89GXL/vispy,kkuunnddaannkk/vispy,inclement/vispy,kkuunnddaannkk/vispy,QuLogic/vispy,srinathv/vispy,RebeccaWPerry/vispy,michaelaye/vispy,bollu/vispy,bollu/vispy,Eric89GXL/vispy,kkuunnddaannkk/vispy,ghisvail/vispy,inclement/vispy,Eric89GXL/vispy,drufat/vispy,RebeccaWPerry/vispy,inclement/vispy,drufat/vispy,jdreaver/vispy,srinathv/vispy,jdreaver/vispy,drufat/vispy,ghisvail/vispy,dchilds7/Deysha-Star-Formation,julienr/vispy,ghisvail/vispy,michaelaye/vispy,srinathv/vispy,dchilds7/Deysha-Star-Formation,julienr/vispy,bollu/vispy,RebeccaWPerry/vispy,QuLogic/vispy,julienr/vispy,dchilds7/Deysha-Star-Formation,michaelaye/vispy,QuLogic/vispy | examples/basics/plotting/colorbar.py | examples/basics/plotting/colorbar.py | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# vispy: gallery 1
"""
Plot different styles of ColorBar using vispy.plot
"""
from vispy import plot as vp
fig = vp.Fig(size=(800, 400), show=False)
plot = fig[0, 0]
# note: "bottom" could also be used, but this would
# conflict with top.
positions = ["top", "bottom", "left", "right"]
for position in positions:
plot.colorbar(position=position,
label=position,
clim=(0, 100),
cmap="winter",
border_width=1,
border_color="#212121")
if __name__ == '__main__':
fig.show(run=True)
| # -*- coding: utf-8 -*-
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# vispy: gallery 1
"""
Plot different styles of ColorBar using vispy.plot
"""
from vispy import plot as vp
fig = vp.Fig(size=(800, 400), show=False)
plot = fig[0, 0]
# note: "bottom" could also be used, but this would
# conflict with top.
orientations = ["top", "left", "right"]
for orientation in orientations:
cbar = plot.colorbar(orientation=orientation,
label=orientation,
clim=(0, 100),
cmap="winter",
border_width=1,
border_color="#212121")
if __name__ == '__main__':
fig.show(run=True)
| bsd-3-clause | Python |
068c1a2111ec31acdc91e3ac85f5182fc738b4d6 | Add missing pandas import to plotting/server/elements.py | eteq/bokeh,ericdill/bokeh,ptitjano/bokeh,khkaminska/bokeh,maxalbert/bokeh,roxyboy/bokeh,mindriot101/bokeh,ericmjl/bokeh,rhiever/bokeh,birdsarah/bokeh,xguse/bokeh,justacec/bokeh,dennisobrien/bokeh,timsnyder/bokeh,htygithub/bokeh,deeplook/bokeh,muku42/bokeh,draperjames/bokeh,bokeh/bokeh,tacaswell/bokeh,draperjames/bokeh,maxalbert/bokeh,maxalbert/bokeh,saifrahmed/bokeh,aiguofer/bokeh,rothnic/bokeh,canavandl/bokeh,bsipocz/bokeh,muku42/bokeh,PythonCharmers/bokeh,rhiever/bokeh,saifrahmed/bokeh,timothydmorton/bokeh,timsnyder/bokeh,daodaoliang/bokeh,xguse/bokeh,khkaminska/bokeh,msarahan/bokeh,timsnyder/bokeh,srinathv/bokeh,philippjfr/bokeh,stuart-knock/bokeh,phobson/bokeh,birdsarah/bokeh,Karel-van-de-Plassche/bokeh,rothnic/bokeh,almarklein/bokeh,laurent-george/bokeh,stuart-knock/bokeh,paultcochrane/bokeh,justacec/bokeh,ptitjano/bokeh,aiguofer/bokeh,percyfal/bokeh,ChinaQuants/bokeh,ahmadia/bokeh,azjps/bokeh,aiguofer/bokeh,canavandl/bokeh,caseyclements/bokeh,ericmjl/bokeh,caseyclements/bokeh,tacaswell/bokeh,stonebig/bokeh,jakirkham/bokeh,paultcochrane/bokeh,aavanian/bokeh,aiguofer/bokeh,srinathv/bokeh,gpfreitas/bokeh,jakirkham/bokeh,awanke/bokeh,matbra/bokeh,carlvlewis/bokeh,stonebig/bokeh,percyfal/bokeh,quasiben/bokeh,lukebarnard1/bokeh,ericmjl/bokeh,awanke/bokeh,jakirkham/bokeh,canavandl/bokeh,msarahan/bokeh,Karel-van-de-Plassche/bokeh,aiguofer/bokeh,dennisobrien/bokeh,akloster/bokeh,saifrahmed/bokeh,philippjfr/bokeh,abele/bokeh,aavanian/bokeh,josherick/bokeh,DuCorey/bokeh,paultcochrane/bokeh,josherick/bokeh,phobson/bokeh,CrazyGuo/bokeh,paultcochrane/bokeh,rothnic/bokeh,tacaswell/bokeh,timsnyder/bokeh,msarahan/bokeh,rhiever/bokeh,gpfreitas/bokeh,xguse/bokeh,khkaminska/bokeh,bokeh/bokeh,deeplook/bokeh,schoolie/bokeh,ChristosChristofidis/bokeh,KasperPRasmussen/bokeh,timothydmorton/bokeh,eteq/bokeh,ptitjano/bokeh,eteq/bokeh,abele/bokeh,khkaminska/bokeh,alan-unravel/bokeh,justacec/bokeh,satishgoda/bokeh,akloster/bokeh,matbra/bokeh,schoolie/bokeh,gpfreitas/bokeh,KasperPRasmussen/bokeh,quasiben/bokeh,satishgoda/bokeh,srinathv/bokeh,azjps/bokeh,htygithub/bokeh,tacaswell/bokeh,sahat/bokeh,ChristosChristofidis/bokeh,timothydmorton/bokeh,percyfal/bokeh,bsipocz/bokeh,deeplook/bokeh,dennisobrien/bokeh,lukebarnard1/bokeh,lukebarnard1/bokeh,clairetang6/bokeh,stonebig/bokeh,laurent-george/bokeh,draperjames/bokeh,rs2/bokeh,stuart-knock/bokeh,ericdill/bokeh,aavanian/bokeh,roxyboy/bokeh,satishgoda/bokeh,bokeh/bokeh,matbra/bokeh,rs2/bokeh,stuart-knock/bokeh,DuCorey/bokeh,mindriot101/bokeh,PythonCharmers/bokeh,bokeh/bokeh,jplourenco/bokeh,ericdill/bokeh,roxyboy/bokeh,sahat/bokeh,alan-unravel/bokeh,PythonCharmers/bokeh,ericmjl/bokeh,jplourenco/bokeh,philippjfr/bokeh,evidation-health/bokeh,eteq/bokeh,phobson/bokeh,laurent-george/bokeh,dennisobrien/bokeh,abele/bokeh,quasiben/bokeh,canavandl/bokeh,bsipocz/bokeh,mindriot101/bokeh,Karel-van-de-Plassche/bokeh,ChristosChristofidis/bokeh,ptitjano/bokeh,caseyclements/bokeh,phobson/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,josherick/bokeh,abele/bokeh,daodaoliang/bokeh,timsnyder/bokeh,almarklein/bokeh,clairetang6/bokeh,ChinaQuants/bokeh,azjps/bokeh,aavanian/bokeh,ericmjl/bokeh,CrazyGuo/bokeh,clairetang6/bokeh,deeplook/bokeh,aavanian/bokeh,evidation-health/bokeh,josherick/bokeh,percyfal/bokeh,awanke/bokeh,jakirkham/bokeh,bokeh/bokeh,stonebig/bokeh,ChinaQuants/bokeh,ChinaQuants/bokeh,draperjames/bokeh,akloster/bokeh,akloster/bokeh,satishgoda/bokeh,caseyclements/bokeh,birdsarah/bokeh,clairetang6/bokeh,CrazyGuo/bokeh,ericdill/bokeh,carlvlewis/bokeh,rothnic/bokeh,KasperPRasmussen/bokeh,DuCorey/bokeh,ahmadia/bokeh,gpfreitas/bokeh,bsipocz/bokeh,jakirkham/bokeh,carlvlewis/bokeh,timothydmorton/bokeh,daodaoliang/bokeh,evidation-health/bokeh,KasperPRasmussen/bokeh,carlvlewis/bokeh,azjps/bokeh,schoolie/bokeh,ptitjano/bokeh,justacec/bokeh,laurent-george/bokeh,muku42/bokeh,awanke/bokeh,dennisobrien/bokeh,evidation-health/bokeh,sahat/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,lukebarnard1/bokeh,matbra/bokeh,phobson/bokeh,almarklein/bokeh,jplourenco/bokeh,xguse/bokeh,KasperPRasmussen/bokeh,azjps/bokeh,PythonCharmers/bokeh,ahmadia/bokeh,rs2/bokeh,ahmadia/bokeh,maxalbert/bokeh,htygithub/bokeh,mutirri/bokeh,birdsarah/bokeh,jplourenco/bokeh,alan-unravel/bokeh,saifrahmed/bokeh,ChristosChristofidis/bokeh,mindriot101/bokeh,msarahan/bokeh,roxyboy/bokeh,DuCorey/bokeh,muku42/bokeh,DuCorey/bokeh,daodaoliang/bokeh,draperjames/bokeh,mutirri/bokeh,alan-unravel/bokeh,mutirri/bokeh,srinathv/bokeh,philippjfr/bokeh,CrazyGuo/bokeh,rs2/bokeh,rhiever/bokeh,mutirri/bokeh,schoolie/bokeh,philippjfr/bokeh,schoolie/bokeh,percyfal/bokeh | examples/plotting/server/elements.py | examples/plotting/server/elements.py | import pandas as pd
from bokeh.plotting import *
from bokeh.sampledata import periodic_table
elements = periodic_table.elements
elements = elements[elements['atomic number'] <= 82]
elements = elements[~pd.isnull(elements['melting point'])]
mass = [float(x.strip('[]')) for x in elements['atomic mass']]
elements['atomic mass'] = mass
palette = list(reversed([
'#67001f','#b2182b','#d6604d','#f4a582','#fddbc7','#f7f7f7','#d1e5f0','#92c5de','#4393c3','#2166ac','#053061'
]))
melting_points = elements['melting point']
low = min(melting_points)
high= max(melting_points)
melting_point_inds = [int(10*(x-low)/(high-low)) for x in melting_points] #gives items in colors a value from 0-10
meltingpointcolors = [palette[i] for i in melting_point_inds]
output_server("elements")
hold()
circle(elements['atomic mass'], elements['density'] ,
color=meltingpointcolors, plot_width=1200, line_color='black',fill_alpha=0.8,
size=12, title='Density vs Atomic Weight of Elements (colored by melting point)',
background_fill= '#cccccc', tools='pan, wheel_zoom, box_zoom, reset')
text(elements['atomic mass'], elements['density'] +0.3,
text=elements['symbol'],angle=0, text_color='#333333',
text_align="center", text_font_size="10pt")
xaxis().axis_label='atomic weight (amu)'
yaxis().axis_label='density (g/cm^3)'
grid().grid_line_color='white'
show()
| from bokeh.plotting import *
from bokeh.sampledata import periodic_table
elements = periodic_table.elements
elements = elements[elements['atomic number'] <= 82]
elements = elements[~pd.isnull(elements['melting point'])]
mass = [float(x.strip('[]')) for x in elements['atomic mass']]
elements['atomic mass'] = mass
palette = list(reversed([
'#67001f','#b2182b','#d6604d','#f4a582','#fddbc7','#f7f7f7','#d1e5f0','#92c5de','#4393c3','#2166ac','#053061'
]))
melting_points = elements['melting point']
low = min(melting_points)
high= max(melting_points)
melting_point_inds = [int(10*(x-low)/(high-low)) for x in melting_points] #gives items in colors a value from 0-10
meltingpointcolors = [palette[i] for i in melting_point_inds]
output_server("elements")
hold()
circle(elements['atomic mass'], elements['density'] ,
color=meltingpointcolors, plot_width=1200, line_color='black',fill_alpha=0.8,
size=12, title='Density vs Atomic Weight of Elements (colored by melting point)',
background_fill= '#cccccc', tools='pan, wheel_zoom, box_zoom, reset')
text(elements['atomic mass'], elements['density'] +0.3,
text=elements['symbol'],angle=0, text_color='#333333',
text_align="center", text_font_size="10pt")
xaxis().axis_label='atomic weight (amu)'
yaxis().axis_label='density (g/cm^3)'
grid().grid_line_color='white'
show()
| bsd-3-clause | Python |
b71ee1ee1a4a2222bf3afcab8aa87f09dea7ef7c | Add data_src attr as URL source in image_extractor (#58) | googleinterns/stampify,googleinterns/stampify,googleinterns/stampify | extraction/content_extractors/image_extractor.py | extraction/content_extractors/image_extractor.py | """This script checks whether DOM has image tag or not and
creates and returns the Image object"""
import bs4
from data_models.image import Image
from extraction.content_extractors.interface_content_extractor import \
IContentExtractor
from extraction.utils import media_extraction_utils as utils
class ImageExtractor(IContentExtractor):
"""This class inherits IContentExtractor to extract Images"""
def validate_and_extract(self, node: bs4.element):
if node.name == 'img' and node.has_attr('src'):
return self.__create_image(node)
if node.name == 'figure':
img_tag = node.find('img')
if img_tag and img_tag.has_attr('src'):
return self.__create_image(img_tag, node.find('figcaption'))
return None
def __create_image(self, node, caption_tag=None):
"""This method extracts all attributes for the image
and creates Image object instance"""
'''
Prioritizing data-src if present for extracting URL over src
as src is used as lazy loading when data-src is present.
'''
if node.has_attr('data-src'):
image_url = node['data-src']
else:
image_url = node['src']
image_title, image_caption = None, None
if node.has_attr('title'):
image_title = node['title']
image_height, image_width = utils.get_media_size(node)
is_image_gif = image_url.endswith('.gif')
if caption_tag:
image_caption = caption_tag.get_text()
return Image(image_url,
image_height,
image_width,
is_image_gif,
img_caption=image_caption,
img_title=image_title)
| """This script checks whether DOM has image tag or not and
creates and returns the Image object"""
import bs4
from data_models.image import Image
from extraction.content_extractors.interface_content_extractor import \
IContentExtractor
from extraction.utils import media_extraction_utils as utils
class ImageExtractor(IContentExtractor):
"""This class inherits IContentExtractor to extract Images"""
def validate_and_extract(self, node: bs4.element):
if node.name == 'img' and node.has_attr('src'):
return self.__create_image(node)
if node.name == 'figure':
img_tag = node.find('img')
if img_tag and img_tag.has_attr('src'):
return self.__create_image(img_tag, node.find('figcaption'))
return None
def __create_image(self, node, caption_tag=None):
"""This method extracts all attributes for the image
and creates Image object instance"""
image_url = node['src']
image_title, image_caption = None, None
if node.has_attr('title'):
image_title = node['title']
image_height, image_width = utils.get_media_size(node)
is_image_gif = image_url.endswith('.gif')
if caption_tag:
image_caption = caption_tag.get_text()
return Image(image_url,
image_height,
image_width,
is_image_gif,
img_caption=image_caption,
img_title=image_title)
| apache-2.0 | Python |
9b5af6525c1ec2c187c43b10074709f7a93fcb5e | Save correctly transfer screen | alfredoavanzosc/odoomrp-wip-1,jobiols/odoomrp-wip,jorsea/odoomrp-wip,Daniel-CA/odoomrp-wip-public,windedge/odoomrp-wip,factorlibre/odoomrp-wip,alhashash/odoomrp-wip,xpansa/odoomrp-wip,sergiocorato/odoomrp-wip,odoomrp/odoomrp-wip,Eficent/odoomrp-wip,slevenhagen/odoomrp-wip-npg,odoocn/odoomrp-wip,michaeljohn32/odoomrp-wip,ddico/odoomrp-wip,Daniel-CA/odoomrp-wip-public,esthermm/odoomrp-wip,agaldona/odoomrp-wip-1,odoomrp/odoomrp-wip,jobiols/odoomrp-wip,Endika/odoomrp-wip,sergiocorato/odoomrp-wip,oihane/odoomrp-wip,Antiun/odoomrp-wip,invitu/odoomrp-wip,diagramsoftware/odoomrp-wip,raycarnes/odoomrp-wip,agaldona/odoomrp-wip-1,Eficent/odoomrp-wip,esthermm/odoomrp-wip,InakiZabala/odoomrp-wip,dvitme/odoomrp-wip,factorlibre/odoomrp-wip,diagramsoftware/odoomrp-wip,maljac/odoomrp-wip,oihane/odoomrp-wip | stock_picking_package_info/wizard/stock_transfer_details.py | stock_picking_package_info/wizard/stock_transfer_details.py | # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
from datetime import datetime
class StockTransferDetails(models.TransientModel):
_inherit = 'stock.transfer_details'
@api.one
def do_detailed_transfer(self):
result = super(StockTransferDetails, self).do_detailed_transfer()
self.picking_id._catch_operations()
return result
@api.one
def do_save_for_later(self):
operation_obj = self.env['stock.pack.operation'].with_context(
no_recompute=True)
# Create new and update existing pack operations
for lstits in [self.item_ids, self.packop_ids]:
for prod in lstits:
pack_datas = {
'product_id': prod.product_id.id,
'product_uom_id': prod.product_uom_id.id,
'product_qty': prod.quantity,
'package_id': prod.package_id.id,
'lot_id': prod.lot_id.id,
'location_id': prod.sourceloc_id.id,
'location_dest_id': prod.destinationloc_id.id,
'result_package_id': prod.result_package_id.id,
'date': prod.date if prod.date else datetime.now(),
'owner_id': prod.owner_id.id,
}
if prod.packop_id:
prod.packop_id.with_context(no_recompute=True).write(
pack_datas)
else:
operation_obj.create(pack_datas)
self.picking_id._catch_operations()
return True
| # -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, api
from datetime import datetime
class StockTransferDetails(models.TransientModel):
_inherit = 'stock.transfer_details'
@api.one
def do_detailed_transfer(self):
result = super(StockTransferDetails, self).do_detailed_transfer()
self.picking_id._catch_operations()
return result
@api.one
def do_save_for_later(self):
operation_obj = self.env['stock.pack.operation']
# Create new and update existing pack operations
for lstits in [self.item_ids, self.packop_ids]:
for prod in lstits:
pack_datas = {
'product_id': prod.product_id.id,
'product_uom_id': prod.product_uom_id.id,
'product_qty': prod.quantity,
'package_id': prod.package_id.id,
'lot_id': prod.lot_id.id,
'location_id': prod.sourceloc_id.id,
'location_dest_id': prod.destinationloc_id.id,
'result_package_id': prod.result_package_id.id,
'date': prod.date if prod.date else datetime.now(),
'owner_id': prod.owner_id.id,
}
if prod.packop_id:
if prod.packop_id.product_qty != prod.quantity:
qty = prod.packop_id.product_qty - prod.quantity
prod.packop_id.write({'product_qty': qty})
pack_datas['picking_id'] = self.picking_id.id
operation_obj.create(pack_datas)
else:
prod.packop_id.write(pack_datas)
self.picking_id._catch_operations()
return True
| agpl-3.0 | Python |
e8ae5f2c876426547f10a3d0628c7e6b3602826c | Remove mixin Select2FieldMixin | Polyconseil/django-select2-rocks,Polyconseil/django-select2-rocks,Polyconseil/django-select2-rocks,Polyconseil/django-select2-rocks | select2rocks/fields.py | select2rocks/fields.py | from django import forms
from select2rocks.widgets import AjaxSelect2Widget
def label_from_instance_with_pk(self, obj, val):
"""Add pk to label to associate label to input in multiple fields"""
return "{pk}:{val}".format(pk=obj.pk, val=val)
class Select2ModelChoiceField(forms.ModelChoiceField):
widget = AjaxSelect2Widget
def __init__(self, queryset, empty_label="---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, label_from_instance=None,
*args, **kwargs):
super(Select2ModelChoiceField, self).__init__(
queryset, empty_label, cache_choices,
required, widget, label, initial,
help_text, to_field_name, *args, **kwargs)
self._label_from_instance = label_from_instance or super(Select2ModelChoiceField, self).label_from_instance
self.widget.field = self
def label_from_instance(self, obj):
label = self._label_from_instance(obj)
return label_from_instance_with_pk(obj, label)
class Select2ModelMultipleChoiceField(forms.ModelMultipleChoiceField):
widget = AjaxSelect2Widget
def __init__(self, queryset, empty_label="---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, label_from_instance=None,
*args, **kwargs):
super(Select2ModelMultipleChoiceField, self).__init__(
queryset, cache_choices, required, widget,
label, initial, help_text, *args, **kwargs)
if not self.widget.select2_options:
self.widget.select2_options = {}
self.widget.select2_options.update({'multiple': True})
self._label_from_instance = label_from_instance or super(Select2ModelMultipleChoiceField, self).label_from_instance
self.widget.field = self
def label_from_instance(self, objects):
return ','.join([label_from_instance_with_pk(obj, self._label_from_instance(obj)) for obj in objects])
def to_python(self, values):
return super(Select2ModelMultipleChoiceField, self).to_python(values.split(','))
| from django import forms
from select2rocks.widgets import AjaxSelect2Widget
class Select2FieldMixin(object):
widget = AjaxSelect2Widget
def label_from_instance(self, obj):
if self._label_from_instance is not None:
val = self._label_from_instance(obj)
else:
val = super(Select2FieldMixin, self).label_from_instance(obj)
return "{key}:{val}".format(key=obj.pk, val=val)
class Select2ModelChoiceField(Select2FieldMixin, forms.ModelChoiceField):
def __init__(self, queryset, empty_label="---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, label_from_instance=None,
*args, **kwargs):
super(Select2ModelChoiceField, self).__init__(
queryset, empty_label, cache_choices,
required, widget, label, initial,
help_text, to_field_name, *args, **kwargs)
self._label_from_instance = label_from_instance
self.widget.field = self
class Select2ModelMultipleChoiceField(Select2FieldMixin, forms.ModelMultipleChoiceField):
def __init__(self, queryset, empty_label="---------", cache_choices=False,
required=True, widget=None, label=None, initial=None,
help_text='', to_field_name=None, label_from_instance=None,
*args, **kwargs):
super(Select2ModelMultipleChoiceField, self).__init__(
queryset, cache_choices, required, widget,
label, initial, help_text, *args, **kwargs)
if not self.widget.select2_options:
self.widget.select2_options = {}
self.widget.select2_options.update({'multiple': True})
self._label_from_instance = label_from_instance
self.widget.field = self
def label_from_instance(self, objects):
return ','.join([super(Select2ModelMultipleChoiceField, self).label_from_instance(obj) for obj in objects])
def to_python(self, values):
return super(Select2ModelMultipleChoiceField, self).to_python(values.split(','))
| bsd-2-clause | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.