commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
1ba88cf7d087c2783306854ea3fbc16c5fe17df4
|
wagtail/utils/compat.py
|
wagtail/utils/compat.py
|
def get_related_model(rel):
# In Django 1.7 and under, the related model is accessed by doing: rel.model
# This was renamed in Django 1.8 to rel.related_model. rel.model now returns
# the base model.
return getattr(rel, 'related_model', rel.model)
|
import django
def get_related_model(rel):
# In Django 1.7 and under, the related model is accessed by doing: rel.model
# This was renamed in Django 1.8 to rel.related_model. rel.model now returns
# the base model.
if django.VERSION >= (1, 8):
return rel.related_model
else:
return rel.model
|
Check Django version instead of hasattr
|
Check Django version instead of hasattr
|
Python
|
bsd-3-clause
|
mayapurmedia/wagtail,chrxr/wagtail,darith27/wagtail,mjec/wagtail,rv816/wagtail,rsalmaso/wagtail,stevenewey/wagtail,KimGlazebrook/wagtail-experiment,kurtw/wagtail,serzans/wagtail,m-sanders/wagtail,KimGlazebrook/wagtail-experiment,JoshBarr/wagtail,JoshBarr/wagtail,inonit/wagtail,kaedroho/wagtail,zerolab/wagtail,FlipperPA/wagtail,m-sanders/wagtail,wagtail/wagtail,mikedingjan/wagtail,janusnic/wagtail,takeshineshiro/wagtail,rv816/wagtail,inonit/wagtail,WQuanfeng/wagtail,iansprice/wagtail,rsalmaso/wagtail,serzans/wagtail,inonit/wagtail,Toshakins/wagtail,nimasmi/wagtail,nutztherookie/wagtail,rjsproxy/wagtail,bjesus/wagtail,Tivix/wagtail,marctc/wagtail,Tivix/wagtail,mikedingjan/wagtail,gasman/wagtail,gasman/wagtail,kaedroho/wagtail,bjesus/wagtail,quru/wagtail,timorieber/wagtail,iho/wagtail,stevenewey/wagtail,mephizzle/wagtail,taedori81/wagtail,nimasmi/wagtail,Klaudit/wagtail,mikedingjan/wagtail,FlipperPA/wagtail,timorieber/wagtail,nrsimha/wagtail,thenewguy/wagtail,takeflight/wagtail,tangentlabs/wagtail,bjesus/wagtail,JoshBarr/wagtail,mayapurmedia/wagtail,FlipperPA/wagtail,Pennebaker/wagtail,mixxorz/wagtail,kurtrwall/wagtail,nilnvoid/wagtail,kurtw/wagtail,nrsimha/wagtail,takeflight/wagtail,takeflight/wagtail,darith27/wagtail,Klaudit/wagtail,kurtrwall/wagtail,rjsproxy/wagtail,marctc/wagtail,m-sanders/wagtail,nilnvoid/wagtail,bjesus/wagtail,thenewguy/wagtail,mephizzle/wagtail,jordij/wagtail,hanpama/wagtail,takeshineshiro/wagtail,hamsterbacke23/wagtail,quru/wagtail,FlipperPA/wagtail,hanpama/wagtail,nimasmi/wagtail,zerolab/wagtail,stevenewey/wagtail,taedori81/wagtail,darith27/wagtail,m-sanders/wagtail,torchbox/wagtail,kurtw/wagtail,hanpama/wagtail,nealtodd/wagtail,taedori81/wagtail,mikedingjan/wagtail,nrsimha/wagtail,Pennebaker/wagtail,mjec/wagtail,wagtail/wagtail,zerolab/wagtail,nilnvoid/wagtail,hanpama/wagtail,timorieber/wagtail,Klaudit/wagtail,quru/wagtail,mephizzle/wagtail,serzans/wagtail,Toshakins/wagtail,Pennebaker/wagtail,mixxorz/wagtail,jordij/wagtail,tangentlabs/wagtail,mjec/wagtail,WQuanfeng/wagtail,janusnic/wagtail,rsalmaso/wagtail,nealtodd/wagtail,thenewguy/wagtail,chrxr/wagtail,KimGlazebrook/wagtail-experiment,taedori81/wagtail,davecranwell/wagtail,Tivix/wagtail,nilnvoid/wagtail,torchbox/wagtail,hamsterbacke23/wagtail,nealtodd/wagtail,nutztherookie/wagtail,marctc/wagtail,KimGlazebrook/wagtail-experiment,janusnic/wagtail,nrsimha/wagtail,torchbox/wagtail,gogobook/wagtail,hamsterbacke23/wagtail,davecranwell/wagtail,rjsproxy/wagtail,torchbox/wagtail,takeshineshiro/wagtail,kurtrwall/wagtail,davecranwell/wagtail,jordij/wagtail,iho/wagtail,taedori81/wagtail,nimasmi/wagtail,rsalmaso/wagtail,mayapurmedia/wagtail,mixxorz/wagtail,wagtail/wagtail,chrxr/wagtail,gogobook/wagtail,thenewguy/wagtail,quru/wagtail,darith27/wagtail,thenewguy/wagtail,iansprice/wagtail,iho/wagtail,kaedroho/wagtail,gasman/wagtail,jordij/wagtail,hamsterbacke23/wagtail,WQuanfeng/wagtail,marctc/wagtail,rsalmaso/wagtail,Toshakins/wagtail,tangentlabs/wagtail,nutztherookie/wagtail,gasman/wagtail,nutztherookie/wagtail,takeshineshiro/wagtail,janusnic/wagtail,rv816/wagtail,gogobook/wagtail,rjsproxy/wagtail,kurtrwall/wagtail,kaedroho/wagtail,mjec/wagtail,mephizzle/wagtail,nealtodd/wagtail,WQuanfeng/wagtail,iansprice/wagtail,Tivix/wagtail,wagtail/wagtail,JoshBarr/wagtail,stevenewey/wagtail,Pennebaker/wagtail,Klaudit/wagtail,wagtail/wagtail,iansprice/wagtail,tangentlabs/wagtail,chrxr/wagtail,timorieber/wagtail,rv816/wagtail,mayapurmedia/wagtail,jnns/wagtail,zerolab/wagtail,jnns/wagtail,zerolab/wagtail,serzans/wagtail,gasman/wagtail,mixxorz/wagtail,inonit/wagtail,davecranwell/wagtail,jnns/wagtail,Toshakins/wagtail,kaedroho/wagtail,jnns/wagtail,mixxorz/wagtail,iho/wagtail,gogobook/wagtail,takeflight/wagtail,kurtw/wagtail
|
c6f2ff563c08eb43ba3f33bc9aaa2647e78701d2
|
fenced_code_plus/__init__.py
|
fenced_code_plus/__init__.py
|
from fenced_code_plus import FencedCodePlusExtension
from fenced_code_plus import makeExtension
|
from __future__ import absolute_import
from fenced_code_plus.fenced_code_plus import FencedCodePlusExtension
from fenced_code_plus.fenced_code_plus import makeExtension
|
Make import compatable with python3.5
|
Make import compatable with python3.5
|
Python
|
bsd-3-clause
|
amfarrell/fenced-code-plus
|
8b127a3d934470aa20fbff83d06ded2e37d00579
|
deferrable/delay.py
|
deferrable/delay.py
|
"""This may seem like a silly module right now, but we had to
separate this out so that deferrable.py and its sub-modules
could all import it without circular imports."""
MAXIMUM_DELAY_SECONDS = 900
|
"""This may seem like a silly module right now, but we had to
separate this out so that deferrable.py and its sub-modules
could all import it without circular imports."""
# SQS has a hard limit of 900 seconds, and Dockets
# delay queues incur heavy performance penalties,
# so this seems like a reasonable limit for all
MAXIMUM_DELAY_SECONDS = 900
|
Add back some reasoning on the 900 number
|
Add back some reasoning on the 900 number
|
Python
|
mit
|
gamechanger/deferrable
|
e2909520e93e85286bd4393426377e48db243615
|
hastexo_social_auth/oauth2.py
|
hastexo_social_auth/oauth2.py
|
from social.backends.oauth import BaseOAuth2
class HastexoOAuth2(BaseOAuth2):
"""Hastexo OAuth2 authentication backend"""
name = 'hastexo'
AUTHORIZATION_URL = 'https://store.hastexo.com/o/authorize/'
ACCESS_TOKEN_URL = 'https://store.hastexo.com/o/token/'
ACCESS_TOKEN_METHOD = 'POST'
SCOPE_SEPARATOR = ' '
def get_user_details(self, response):
"""Return user details from hastexo account"""
return {
'username': response['username'],
'email': response.get('email', ''),
'first_name': '',
'last_name': '',
}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://store.hastexo.com/api/users/', params={
'access_token': access_token
})
|
from social.backends.oauth import BaseOAuth2
class HastexoOAuth2(BaseOAuth2):
"""Hastexo OAuth2 authentication backend"""
name = 'hastexo'
AUTHORIZATION_URL = 'https://store.hastexo.com/o/authorize/'
ACCESS_TOKEN_URL = 'https://store.hastexo.com/o/token/'
ACCESS_TOKEN_METHOD = 'POST'
SCOPE_SEPARATOR = ' '
def get_user_details(self, response):
"""Return user details from hastexo account"""
return {
'username': response.get('username'),
'email': response.get('email', ''),
'first_name': response.get('first_name', ''),
'last_name': response.get('last_name', '')
}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
return self.get_json('https://store.hastexo.com/api/login/', params={
'access_token': access_token
})
|
Update user details API call
|
Update user details API call
|
Python
|
bsd-3-clause
|
hastexo/python-social-auth-hastexo,arbrandes/python-social-auth-hastexo
|
52cb80dd92ceabd7d2efe67c0a89f76cd701283b
|
statirator/main.py
|
statirator/main.py
|
import os
import sys
def main():
# init is a special case, cause we want to add statirator.core to
# INSTALLED_APPS, and have the command picked up. we'll handle it in here
if 'init' in sys.argv:
from django.conf import settings
settings.configure(INSTALLED_APPS=('statirator.core', ))
elif 'test' in sys.argv:
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "statirator.test_settings")
from django.core import management
management.execute_from_command_line()
if __name__ == '__main__':
main()
|
import os
import sys
def main():
if 'test' in sys.argv:
os.environ.setdefault(
"DJANGO_SETTINGS_MODULE", "statirator.test_settings")
else:
from django.conf import settings
settings.configure(INSTALLED_APPS=('statirator.core', ))
from django.core import management
management.execute_from_command_line()
if __name__ == '__main__':
main()
|
Add statirator.core for all commands except test
|
Add statirator.core for all commands except test
|
Python
|
mit
|
MeirKriheli/statirator,MeirKriheli/statirator,MeirKriheli/statirator
|
c7ec2805d1c3dde9ff3bf8caacf0bac474a1d468
|
cybox/utils.py
|
cybox/utils.py
|
#Common utility methods
#Test if a dictionary value is not None and has a length greater than 0
def test_value(value):
if value.get('value') is not None:
if value.get('value') is not None and len(str(value.get('value'))) > 0:
return True
else:
return False
else:
return False
|
"""Common utility methods"""
def test_value(value):
"""
Test if a dictionary contains a "value" key whose value is not None
and has a length greater than 0.
We explicitly want to return True even if the value is False or 0, since
some parts of the standards are boolean or allow a 0 value, and we want to
distinguish the case where the "value" key is omitted entirely.
"""
v = value.get('value', None)
return (v is not None) and (len(str(v)) > 0)
|
Clean up and document 'test_value' function.
|
Clean up and document 'test_value' function.
|
Python
|
bsd-3-clause
|
CybOXProject/python-cybox
|
3fc94b4cffcfd08b439386fb2b01aa1e12fec6d5
|
iati/core/tests/test_data.py
|
iati/core/tests/test_data.py
|
"""A module containing tests for the library representation of IATI data."""
import iati.core.data
class TestDatasets(object):
"""A container for tests relating to Datasets"""
pass
|
"""A module containing tests for the library representation of IATI data."""
import iati.core.data
class TestDatasets(object):
"""A container for tests relating to Datasets"""
def test_dataset_no_params(self):
"""Test Dataset creation with no parameters."""
pass
def test_dataset_valid_xml_string(self):
"""Test Dataset creation with a valid XML string that is not IATI data."""
pass
def test_dataset_valid_iati_string(self):
"""Test Dataset creation with a valid IATI XML string."""
pass
def test_dataset_invalid_xml_string(self):
"""Test Dataset creation with a string that is not valid XML."""
pass
def test_dataset_tree(self):
"""Test Dataset creation with an etree that is not valid IATI data."""
pass
def test_dataset_iati_tree(self):
"""Test Dataset creation with a valid IATI etree."""
pass
def test_dataset_no_params_strict(self):
"""Test Dataset creation with no parameters.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_xml_string_strict(self):
"""Test Dataset creation with a valid XML string that is not IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_valid_iati_string_strict(self):
"""Test Dataset creation with a valid IATI XML string.
Strict IATI checks are enabled.
"""
pass
def test_dataset_invalid_xml_string_strict(self):
"""Test Dataset creation with a string that is not valid XML.
Strict IATI checks are enabled.
"""
pass
def test_dataset_tree_strict(self):
"""Test Dataset creation with an etree that is not valid IATI data.
Strict IATI checks are enabled.
"""
pass
def test_dataset_iati_tree_strict(self):
"""Test Dataset creation with a valid IATI etree.
Strict IATI checks are enabled.
"""
pass
|
Test stubs for dataset creation
|
Test stubs for dataset creation
|
Python
|
mit
|
IATI/iati.core,IATI/iati.core
|
da314ab34cb13c1de66b96da2eab1484639e124b
|
fiona/compat.py
|
fiona/compat.py
|
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
|
import collections
from six.moves import UserDict
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
# Users can pass in objects that subclass a few different objects
# More specifically, rasterio has a CRS() class that subclasses UserDict()
# In Python 2 UserDict() is in its own module and does not subclass Mapping()
DICT_TYPES = (dict, collections.Mapping, UserDict)
|
Add a DICT_TYPES variable so we can do isinstance() checks against all the builtin dict-like objects
|
Add a DICT_TYPES variable so we can do isinstance() checks against all the builtin dict-like objects
|
Python
|
bsd-3-clause
|
Toblerity/Fiona,rbuffat/Fiona,rbuffat/Fiona,Toblerity/Fiona
|
1c2a981e007b9a205db1302370dff6a6ea15bf8c
|
iati/versions.py
|
iati/versions.py
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import re
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
iati_version_re = re.compile(r'^((1\.0[1-9])|(((1\d+)|([2-9](\d+)?))\.0[1-9](\d+)?))$')
if not iati_version_re.match(version_string):
raise ValueError('A valid version number must be specified.')
|
"""A module containing components that describe the IATI Standard itself (rather than the parts it is made up of)."""
import re
import iati.constants
class Version(object):
"""Representation of an IATI Standard Version Number."""
def __init__(self, version_string):
"""Initialise a Version Number.
Args:
version_string (str): A string representation of an IATI version number.
Raises:
TypeError: If an attempt to pass something that is not a string is made.
ValueError: If a provided string is not a version number.
"""
if not isinstance(version_string, str):
raise TypeError('A Version object must be created from a string, not a {0}'.format(type(version_string)))
# a regex for what makes a valid IATIver Version Number format string
iativer_re = re.compile(r'^((1\.0[1-9])|(((1\d+)|([2-9](\d+)?))\.0[1-9](\d+)?))$')
if not iativer_re.match(version_string):
raise ValueError('A valid version number must be specified.')
|
Add a comment to make some sense of a regex
|
Add a comment to make some sense of a regex
|
Python
|
mit
|
IATI/iati.core,IATI/iati.core
|
0ab048e8363a60d47ba780cb622a72343aaf65f2
|
tests/test_urls.py
|
tests/test_urls.py
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from django.contrib import admin
from django.http.response import HttpResponse
admin.autodiscover()
def empty_view(request):
return HttpResponse()
urlpatterns = [
url(r'^home/', empty_view, name="home"),
url(r'^admin/', admin.site.urls),
url(r'^djstripe/', include("djstripe.urls", namespace="djstripe")),
url(r'^testapp/', include('tests.apps.testapp.urls')),
url(r'^__debug__/', include('tests.apps.testapp.urls')),
url(
r'^testapp_namespaced/',
include('tests.apps.testapp_namespaced.urls', namespace="testapp_namespaced")
),
# Represents protected content
url(r'^testapp_content/', include('tests.apps.testapp_content.urls')),
# For testing fnmatches
url(r"test_fnmatch/extra_text/$", empty_view, name="test_fnmatch"),
# Default for DJSTRIPE_SUBSCRIPTION_REDIRECT
url(r"subscribe/$", empty_view, name="test_url_subscribe")
]
|
# -*- coding: utf-8 -*-
from django.conf.urls import include, url
from django.contrib import admin
from django.http.response import HttpResponse
admin.autodiscover()
def empty_view(request):
return HttpResponse()
urlpatterns = [
url(r'^home/', empty_view, name="home"),
url(r'^admin/', admin.site.urls),
url(r'^djstripe/', include("djstripe.urls", namespace="djstripe")),
url(r'^testapp/', include('tests.apps.testapp.urls')),
url(
r'^testapp_namespaced/',
include('tests.apps.testapp_namespaced.urls', namespace="testapp_namespaced")
),
# Represents protected content
url(r'^testapp_content/', include('tests.apps.testapp_content.urls')),
# For testing fnmatches
url(r"test_fnmatch/extra_text/$", empty_view, name="test_fnmatch"),
# Default for DJSTRIPE_SUBSCRIPTION_REDIRECT
url(r"subscribe/$", empty_view, name="test_url_subscribe")
]
|
Remove useless url from test urls
|
Remove useless url from test urls
|
Python
|
mit
|
pydanny/dj-stripe,kavdev/dj-stripe,dj-stripe/dj-stripe,kavdev/dj-stripe,pydanny/dj-stripe,dj-stripe/dj-stripe
|
305d04fc0841035bf744480261017c14ae3045b0
|
syntax_makefile.py
|
syntax_makefile.py
|
import wx.stc
ident = "makefile"
name = "Makefile"
extensions = ["Makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
stylespecs = (
(wx.stc.STC_STYLE_DEFAULT, ""),
)
keywords = ""
|
import wx.stc
ident = "makefile"
name = "Makefile"
extensions = ["*Makefile", "*makefile", "*.mk"]
lexer = wx.stc.STC_LEX_MAKEFILE
indent = 8
use_tabs = True
stylespecs = (
(wx.stc.STC_STYLE_DEFAULT, ""),
)
keywords = ""
|
Make files ending in makefile or Makefile.
|
Make files ending in makefile or Makefile.
|
Python
|
mit
|
shaurz/devo
|
19ac41a14875c6df2ed9ddf7b7b315ffb5c70819
|
tests/specs/test_yaml_file.py
|
tests/specs/test_yaml_file.py
|
import unittest
try:
from unittest import mock
except ImportError:
import mock
from conda_env import env
from conda_env.specs.yaml_file import YamlFileSpec
class TestYAMLFile(unittest.TestCase):
def test_no_environment_file(self):
spec = YamlFileSpec(name=None, filename='not-a-file')
self.assertEqual(spec.can_handle(), False)
def test_environment_file_exist(self):
with mock.patch.object(env, 'from_file', return_value={}):
spec = YamlFileSpec(name=None, filename='environment.yaml')
self.assertTrue(spec.can_handle())
def test_get_environment(self):
with mock.patch.object(env, 'from_file', return_value={}):
spec = YamlFileSpec(name=None, filename='environment.yaml')
self.assertIsInstance(spec.environment, dict)
|
import unittest
import random
try:
from unittest import mock
except ImportError:
import mock
from conda_env import env
from conda_env.specs.yaml_file import YamlFileSpec
class TestYAMLFile(unittest.TestCase):
def test_no_environment_file(self):
spec = YamlFileSpec(name=None, filename='not-a-file')
self.assertEqual(spec.can_handle(), False)
def test_environment_file_exist(self):
with mock.patch.object(env, 'from_file', return_value={}):
spec = YamlFileSpec(name=None, filename='environment.yaml')
self.assertTrue(spec.can_handle())
def test_get_environment(self):
r = random.randint(100, 200)
with mock.patch.object(env, 'from_file', return_value=r):
spec = YamlFileSpec(name=None, filename='environment.yaml')
self.assertEqual(spec.environment, r)
def test_filename(self):
filename = "filename_{}".format(random.randint(100, 200))
with mock.patch.object(env, 'from_file') as from_file:
spec = YamlFileSpec(filename=filename)
spec.environment
from_file.assert_called_with(filename)
|
Add more tests to YamlFile class
|
Add more tests to YamlFile class
|
Python
|
bsd-3-clause
|
ESSS/conda-env,phobson/conda-env,conda/conda-env,asmeurer/conda-env,conda/conda-env,mikecroucher/conda-env,isaac-kit/conda-env,ESSS/conda-env,isaac-kit/conda-env,dan-blanchard/conda-env,phobson/conda-env,nicoddemus/conda-env,dan-blanchard/conda-env,asmeurer/conda-env,nicoddemus/conda-env,mikecroucher/conda-env
|
ecc3a9c90d20699c6f0bf18600cf9bd755b56d65
|
rollbar/contrib/fastapi/utils.py
|
rollbar/contrib/fastapi/utils.py
|
import logging
log = logging.getLogger(__name__)
class FastAPIVersionError(Exception):
def __init__(self, version, reason=''):
err_msg = f'FastAPI {version}+ is required'
if reason:
err_msg += f' {reason}'
log.error(err_msg)
return super().__init__(err_msg)
|
import functools
import logging
import fastapi
log = logging.getLogger(__name__)
class FastAPIVersionError(Exception):
def __init__(self, version, reason=''):
err_msg = f'FastAPI {version}+ is required'
if reason:
err_msg += f' {reason}'
log.error(err_msg)
return super().__init__(err_msg)
class fastapi_min_version:
def __init__(self, min_version):
self.min_version = min_version
def __call__(self, func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
if fastapi.__version__ < self.min_version:
raise FastAPIVersionError(
'0.41.0', reason=f'to use {func.__name__}() function'
)
return func(*args, **kwargs)
return wrapper
|
Add decorator to check minimum required FastAPI version
|
Add decorator to check minimum required FastAPI version
|
Python
|
mit
|
rollbar/pyrollbar
|
8582126efa9907b06e9f9b183a0919feba9fb6b0
|
indra/literature/dart_client.py
|
indra/literature/dart_client.py
|
import logging
import requests
from indra.config import CONFIG_DICT
logger = logging.getLogger(__name__)
dart_uname = CONFIG_DICT['DART_WM_USERNAME']
dart_pwd = CONFIG_DICT['DART_WM_PASSWORD']
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
'/dart/api/v1/readers/query'
def query_dart_notifications(readers=None, versions=None, document_ids=None,
timestamp=None):
"""
Parameters
----------
readers : list
versions : list
document_ids : list
timestamp : dict("on"|"before"|"after",str)
Returns
-------
dict
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
return {}
pd = {}
if readers:
pd['readers'] = readers
if versions:
pd['versions'] = versions
if document_ids:
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pass # Check
res = requests.post(
dart_url,
data={'metadata':
None
},
auth=(dart_uname, dart_pwd)
)
if res.status_code != 200:
logger.warning(f'Dart Notifications Endpoint returned with status'
f' {res.status_code}: {res.text}')
return {}
return res.json()
|
import logging
import requests
from indra.config import get_config
logger = logging.getLogger(__name__)
dart_uname = get_config('DART_WM_USERNAME')
dart_pwd = get_config('DART_WM_PASSWORD')
dart_url = 'https://indra-ingest-pipeline-rest-1.prod.dart.worldmodelers.com' \
'/dart/api/v1/readers/query'
def query_dart_notifications(readers=None, versions=None, document_ids=None,
timestamp=None):
"""
Parameters
----------
readers : list
versions : list
document_ids : list
timestamp : dict("on"|"before"|"after",str)
Returns
-------
dict
"""
if all(v is None for v in [readers, versions, document_ids, timestamp]):
return {}
pd = {}
if readers:
pd['readers'] = readers
if versions:
pd['versions'] = versions
if document_ids:
pd['document_ids'] = document_ids
if isinstance(timestamp, dict):
pass # Check
res = requests.post(
dart_url,
data={'metadata':
None
},
auth=(dart_uname, dart_pwd)
)
if res.status_code != 200:
logger.warning(f'Dart Notifications Endpoint returned with status'
f' {res.status_code}: {res.text}')
return {}
return res.json()
|
Use get_config instead of CONFIG_DICT
|
Use get_config instead of CONFIG_DICT
|
Python
|
bsd-2-clause
|
johnbachman/indra,johnbachman/belpy,johnbachman/belpy,sorgerlab/belpy,bgyori/indra,sorgerlab/belpy,johnbachman/indra,johnbachman/belpy,bgyori/indra,bgyori/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/indra,sorgerlab/belpy,johnbachman/indra
|
33c595574921a64bec79de5ca72a62c22d09eb94
|
django_todolist/todo/models.py
|
django_todolist/todo/models.py
|
from django.db import models
class Todo(models.Model):
"""
Todo Model: name, description, created
"""
name = models.CharField(max_length=100, unique=True)
description = models.TextField()
created = models.DateTimeField()
def __unicode__(self):
return self.name
|
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Todo(models.Model):
"""
Todo Model: name, description, created
"""
name = models.CharField(max_length=100, unique=True)
description = models.TextField()
created = models.DateTimeField()
def __str__(self):
return self.name
|
Add Python portability to model
|
Add Python portability to model
|
Python
|
bsd-3-clause
|
andresgz/django_todolist,andresgz/django_todolist,andresgz/django_todolist,andresgz/django_todolist
|
16cca2bc9aa8d5ecf6eb4d829de00905d3d15759
|
conveyor/store.py
|
conveyor/store.py
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, connection=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.StrictRedis(**connection)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
class BaseStore(object):
def set(self, key, value):
raise NotImplementedError
def get(self, key):
raise NotImplementedError
class InMemoryStore(BaseStore):
def __init__(self, *args, **kwargs):
super(InMemoryStore, self).__init__(*args, **kwargs)
self._data = {}
def set(self, key, value):
self._data[key] = value
def get(self, key):
return self._data[key]
class RedisStore(BaseStore):
def __init__(self, url=None, prefix=None, *args, **kwargs):
super(RedisStore, self).__init__(*args, **kwargs)
import redis
self.redis = redis.from_url(url)
self.prefix = prefix
def set(self, key, value):
if self.prefix is not None:
key = self.prefix + key
self.redis.set(key, value)
def get(self, key):
if self.prefix is not None:
key = self.prefix + key
return self.redis.get(key)
|
Switch redis to use the from_url method
|
Switch redis to use the from_url method
|
Python
|
bsd-2-clause
|
crateio/carrier
|
ef42117ec2bd2a275dcea5f5a2d57322bbd21faa
|
wafer/talks/tests/fixtures.py
|
wafer/talks/tests/fixtures.py
|
from wafer.talks.models import Talk, TalkType
from wafer.tests.utils import create_user
def create_talk_type(name):
"""Create a talk type"""
return TalkType.objects.create(name=name)
def create_talk(title, status, username=None, user=None, talk_type=None):
if username:
user = create_user(username)
talk = Talk.objects.create(
title=title, status=status, corresponding_author_id=user.id)
talk.authors.add(user)
talk.notes = "Some notes for talk %s" % title
talk.private_notes = "Some private notes for talk %s" % title
talk.save()
if talk_type:
talk.talk_type = talk_type
talk.save()
return talk
|
from wafer.talks.models import Talk, TalkType
from wafer.tests.utils import create_user
def create_talk_type(name):
"""Create a talk type"""
return TalkType.objects.create(name=name)
def create_talk(title, status, username=None, user=None, talk_type=None):
if sum((user is None, username is None)) != 1:
raise ValueError('One of user OR username must be specified')
if username:
user = create_user(username)
talk = Talk.objects.create(
title=title, status=status, corresponding_author_id=user.id)
talk.authors.add(user)
talk.notes = "Some notes for talk %s" % title
talk.private_notes = "Some private notes for talk %s" % title
talk.save()
if talk_type:
talk.talk_type = talk_type
talk.save()
return talk
|
Check that user OR username is specified
|
Check that user OR username is specified
|
Python
|
isc
|
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
|
2c41a2b37df94339074fdc04ccb3ef560d2e6cac
|
falmer/events/filters.py
|
falmer/events/filters.py
|
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter
from falmer.events.models import Curator
from . import models
class EventFilterSet(FilterSet):
class Meta:
model = models.Event
fields = (
'title',
'venue',
'type',
'bundle',
'parent',
'brand',
'student_group',
'from_time',
'to_time',
'audience_just_for_pgs',
'audience_suitable_kids_families',
'audience_good_to_meet_people',
'is_over_18_only',
'cost',
'alcohol',
'type',
'ticket_level',
'curated_by'
)
title = CharFilter(lookup_expr='icontains')
brand = CharFilter(field_name='brand__slug')
bundle = CharFilter(field_name='bundle__slug')
to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte')
from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte')
uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull')
curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by')
#
# class BrandingPeriodFilerSet(FilterSet):
# class Meta:
# model = BrandingPeriod
|
from django_filters import FilterSet, CharFilter, IsoDateTimeFilter, BooleanFilter, ModelChoiceFilter
from falmer.events.models import Curator
from . import models
class EventFilterSet(FilterSet):
class Meta:
model = models.Event
fields = (
'title',
'venue',
'type',
'bundle',
'parent',
'brand',
'student_group',
'from_time',
'to_time',
'audience_just_for_pgs',
'audience_suitable_kids_families',
'audience_good_to_meet_people',
'is_over_18_only',
'cost',
'alcohol',
'type',
'ticket_level',
'curated_by'
)
title = CharFilter(lookup_expr='icontains')
brand = CharFilter(field_name='brand__slug')
bundle = CharFilter(field_name='bundle__slug')
student_group = CharFilter(field_name='student_group__slug')
to_time = IsoDateTimeFilter(field_name='start_time', lookup_expr='lte')
from_time = IsoDateTimeFilter(field_name='end_time', lookup_expr='gte')
uncurated = BooleanFilter(field_name='curated_by', lookup_expr='isnull')
curated_by = ModelChoiceFilter(queryset=Curator.objects.all(), field_name='curated_by')
#
# class BrandingPeriodFilerSet(FilterSet):
# class Meta:
# model = BrandingPeriod
|
Use SG slug for event filtering
|
Use SG slug for event filtering
|
Python
|
mit
|
sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer,sussexstudent/falmer
|
2e4e9ab2bab2e0a4bd00e10dcf115b1f96d1c714
|
modules/urlparser/__init__.py
|
modules/urlparser/__init__.py
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('ascii', 'ignore'))
break
except:
print "<<Error>> in Urlparser"
print traceback.print_exc()
|
from modules import *
import re
import urllib2
import traceback
try:
import simplejson as json
except ImportError:
import json
from unidecode import unidecode
from twitter import Twitter
from bitly import Bitly
from youtube import Youtube
class Urlparser(Module):
"""Checks incoming messages for possible urls. If a url is found then
route the url to a corresponding module to handle.
"""
def __init__(self, *args, **kwargs):
"""Constructor."""
Module.__init__(self, kwargs=kwargs)
self.url_patterns = [
Twitter,
Youtube,
Bitly,
]
self.url_pattern = re.compile("http://(.*?)")
def _register_events(self):
self.add_event('pubmsg', 'parse_message')
def parse_message(self, event):
nick = event['nick']
# make sure the message contains a url before checking
# the other handlers patterns
try:
for handler in self.url_patterns:
m = handler.pattern.search(event['message'])
if m:
handler_instance = handler()
msg = handler_instance.handle(event=event, match=m)
if msg:
self.server.privmsg(event['target'], msg.encode('ascii', 'ignore'))
break
except:
print "<<Error>> in Urlparser (%s)" % (event['message'])
print traceback.print_exc()
|
Include url message when fail to run urlparser
|
Include url message when fail to run urlparser
|
Python
|
mit
|
billyvg/piebot
|
a6d49059851450c7ea527941600564cb3f48cc72
|
flask_profiler/storage/base.py
|
flask_profiler/storage/base.py
|
class BaseStorage(object):
"""docstring for BaseStorage"""
def __init__(self):
super(BaseStorage, self).__init__()
def filter(self, criteria):
raise Exception("Not implemneted Error")
def getSummary(self, criteria):
raise Exception("Not implemneted Error")
def insert(self, measurement):
raise Exception("Not implemented Error")
def delete(self, measurementId):
raise Exception("Not imlemented Error")
|
class BaseStorage(object):
"""docstring for BaseStorage"""
def __init__(self):
super(BaseStorage, self).__init__()
def filter(self, criteria):
raise Exception("Not implemneted Error")
def getSummary(self, criteria):
raise Exception("Not implemneted Error")
def insert(self, measurement):
raise Exception("Not implemented Error")
def delete(self, measurementId):
raise Exception("Not imlemented Error")
def truncate(self):
raise Exception("Not imlemented Error")
|
Add tuncate method to BaseStorage class
|
Add tuncate method to BaseStorage class
This will provide an interface for supporting any new database, there by, making the code more robust.
|
Python
|
mit
|
muatik/flask-profiler
|
d7299fd931ae62cc661b48dbc84aa161a395f1fa
|
fermipy/__init__.py
|
fermipy/__init__.py
|
import os
__version__ = "unknown"
try:
from version import get_git_version
__version__ = get_git_version()
except Exception as message:
print(message)
__author__ = "Matthew Wood"
PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
PACKAGE_DATA = os.path.join(PACKAGE_ROOT,'data')
os.environ['FERMIPY_ROOT'] = PACKAGE_ROOT
os.environ['FERMIPY_DATA_DIR'] = PACKAGE_DATA
|
from __future__ import absolute_import, division, print_function
import os
__version__ = "unknown"
try:
from .version import get_git_version
__version__ = get_git_version()
except Exception as message:
print(message)
__author__ = "Matthew Wood"
PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
PACKAGE_DATA = os.path.join(PACKAGE_ROOT,'data')
os.environ['FERMIPY_ROOT'] = PACKAGE_ROOT
os.environ['FERMIPY_DATA_DIR'] = PACKAGE_DATA
|
Fix version module import for Python 3
|
Fix version module import for Python 3
|
Python
|
bsd-3-clause
|
jefemagril/fermipy,jefemagril/fermipy,jefemagril/fermipy,fermiPy/fermipy
|
67795baac1f7eb10fbfc90fda5a9f54949af6c24
|
ckanext/tayside/helpers.py
|
ckanext/tayside/helpers.py
|
from ckan import model
from ckan.plugins import toolkit
def _get_action(action, context_dict, data_dict):
return toolkit.get_action(action)(context_dict, data_dict)
def get_groups():
# Helper used on the homepage for showing groups
data_dict = {
'sort': 'package_count',
'limit': 7,
'all_fields': True
}
groups = _get_action('group_list', {}, data_dict)
return groups
|
from ckan import model
from ckan.plugins import toolkit
def _get_action(action, context_dict, data_dict):
return toolkit.get_action(action)(context_dict, data_dict)
def get_groups():
# Helper used on the homepage for showing groups
data_dict = {
'sort': 'package_count',
'all_fields': True
}
groups = _get_action('group_list', {}, data_dict)
return groups
|
Remove limit of 7 groups in homepage
|
Remove limit of 7 groups in homepage
|
Python
|
agpl-3.0
|
ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside,ViderumGlobal/ckanext-tayside
|
8fad8a4f1591fb4a7d7d1bdf932c5918197b475c
|
tests/client.py
|
tests/client.py
|
# -*- coding: utf-8 -*-
"""
Description: Client side of sanity check
Author: Mike Ellis
Copyright 2017 Owner
"""
from htmltree import *
def start():
console.log("Starting")
newcontent = H1("Sanity check PASS", _class='test', style=dict(color='green'))
console.log(newcontent.render(0))
document.body.innerHTML = newcontent.render()
console.log("Finished")
document.addEventListener('DOMContentLoaded', start)
|
# -*- coding: utf-8 -*-
"""
Description: Client side of sanity check
Uses JS functions insertAdjacentHTML, innerHTML and addEventListener.
See https://developer.mozilla.org/en-US/docs/Web/API/Element/insertAdjacentHTML
https://developer.mozilla.org/en-US/docs/Web/API/Element/innerHTML
https://developer.mozilla.org/en-US/docs/Web/API/EventTarget/addEventListener
Author: Mike Ellis
Copyright 2017 Owner
"""
from htmltree import *
def start():
console.log("Starting")
## insert a style element at the end of the <head?
cssrules = {'.test':{'color':'green', 'text-align':'center'}}
style = Style(**cssrules)
document.head.insertAdjacentHTML('beforeend', style.render())
## Replace the <body> content
newcontent = Div(H1("Sanity check PASS", _class='test'))
document.body.innerHTML = newcontent.render()
console.log("Finished")
## JS is event driven.
## Wait for DOM load to complete before firing
## our start() function.
document.addEventListener('DOMContentLoaded', start)
|
Fix <style> rendering under Transcrypt.
|
Fix <style> rendering under Transcrypt.
The hasattr test in renderCss() was failing when it shouldn't have.
Fixed by removal. Updated tests/client.py to create and append a style
element to detect problems related to Style() on the client side.
|
Python
|
mit
|
Michael-F-Ellis/htmltree
|
e91eac0c667c74062672a1a2cdb7da2a910f8cf7
|
InvenTree/users/serializers.py
|
InvenTree/users/serializers.py
|
from rest_framework import serializers
from django.contrib.auth.models import User
class UserSerializer(serializers.HyperlinkedModelSerializer):
""" Serializer for a User
"""
class Meta:
model = User
fields = ('username',
'first_name',
'last_name',
'email',)
|
from rest_framework import serializers
from django.contrib.auth.models import User
class UserSerializer(serializers.HyperlinkedModelSerializer):
""" Serializer for a User
"""
class Meta:
model = User
fields = ('pk',
'username',
'first_name',
'last_name',
'email',)
|
Include PK in user serializer
|
Include PK in user serializer
|
Python
|
mit
|
inventree/InvenTree,SchrodingersGat/InvenTree,inventree/InvenTree,inventree/InvenTree,inventree/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree,SchrodingersGat/InvenTree
|
bf9866e2c337f024fcc02de69456a235dc7ac07c
|
labs/lab-6/common.py
|
labs/lab-6/common.py
|
#!/usr/bin/env python
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import sys
import os
from tspapi import API
class Common(object):
def __init__(self, ):
self.api = API()
self.usage_args = ""
@staticmethod
def usage(self, args):
sys.stderr.write("usage: {0} {1}\n".format(os.path.basename(sys.argv[0]), args))
def send_measurements(self, measurements):
"""
Sends measurements using the Measurement API
:param measurements:
:return: None
"""
self.api.measurement_create_batch(measurements)
def run(self):
"""
Main loop
"""
while True:
print("Doing absolutely nothing")
time.sleep(self.interval)
|
#!/usr/bin/env python
# Copyright 2014-2015 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import sys
import os
from tspapi import API
class Common(object):
def __init__(self, ):
self.api = API()
self.usage_args = ""
# Set our application id from the environment variable
self.appl_id = os.environ['TSI_APPL_ID']
@staticmethod
def usage(args):
sys.stderr.write("usage: {0} {1}\n".format(os.path.basename(sys.argv[0]), args))
def send_measurements(self, measurements):
"""
Sends measurements using the Measurement API
:param measurements:
:return: None
"""
self.api.measurement_create_batch(measurements)
def run(self):
"""
Main loop
"""
while True:
print("Doing absolutely nothing")
time.sleep(self.interval)
|
Add application id and static method for usage
|
Add application id and static method for usage
|
Python
|
apache-2.0
|
jdgwartney/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,jdgwartney/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab,boundary/tsi-lab
|
eec24c2cff1b588b957215a867a85a148f4e71e9
|
tuneme/views.py
|
tuneme/views.py
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render
from molo.core.models import ArticlePage
from molo.commenting.models import MoloComment
from wagtail.wagtailsearch.models import Query
def search(request, results_per_page=10):
search_query = request.GET.get('q', None)
page = request.GET.get('p', 1)
if search_query:
results = ArticlePage.objects.live().search(search_query)
Query.get(search_query).add_hit()
else:
results = ArticlePage.objects.none()
paginator = Paginator(results, results_per_page)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search_results.html', {
'search_query': search_query,
'search_results': search_results,
'results': results,
})
def report_response(request, comment_pk):
comment = MoloComment.objects.get(pk=comment_pk)
return render(request, 'comments/report_response.html', {
'article': comment.content_object,
})
|
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.shortcuts import render
from django.utils.translation import get_language_from_request
from molo.core.utils import get_locale_code
from molo.core.models import ArticlePage
from molo.commenting.models import MoloComment
from wagtail.wagtailsearch.models import Query
def search(request, results_per_page=10):
search_query = request.GET.get('q', None)
page = request.GET.get('p', 1)
locale = get_locale_code(get_language_from_request(request))
if search_query:
results = ArticlePage.objects.filter(
languages__language__locale=locale).live().search(search_query)
Query.get(search_query).add_hit()
else:
results = ArticlePage.objects.none()
paginator = Paginator(results, results_per_page)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search_results.html', {
'search_query': search_query,
'search_results': search_results,
'results': results,
})
def report_response(request, comment_pk):
comment = MoloComment.objects.get(pk=comment_pk)
return render(request, 'comments/report_response.html', {
'article': comment.content_object,
})
|
Add multi-languages support for search
|
Add multi-languages support for search
|
Python
|
bsd-2-clause
|
praekelt/molo-tuneme,praekelt/molo-tuneme,praekelt/molo-tuneme,praekelt/molo-tuneme
|
46fc6c7f8f63ce747a30a35bb5fb33ff2d53a2c0
|
mackerel/host.py
|
mackerel/host.py
|
# -*- coding: utf-8 -*-
"""
mackerel.host
~~~~~~~~~~~~~
Mackerel client implemented by Pyton.
Ported from `mackerel-client-ruby`.
<https://github.com/mackerelio/mackerel-client-ruby>
:copyright: (c) 2014 Hatena, All rights reserved.
:copyright: (c) 2015 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import re
class Host(object):
MACKEREL_INTERFACE_NAME_PATTERN = re.compile(r'^eth\d')
def __init__(self, **kwargs):
self.args = kwargs
self.name = kwargs.get('name')
self.meta = kwargs.get('meta')
self.type = kwargs.get('type')
self.status = kwargs.get('status')
self.memo = kwargs.get('memo')
self.is_retired = kwargs.get('isRetired')
self.id = kwargs.get('id')
self.created_at = kwargs.get('createdAt')
self.roles = kwargs.get('roles')
self.interfaces = kwargs.get('interfaces')
def ip_addr(self):
pass
def mac_addr(self):
pass
|
# -*- coding: utf-8 -*-
"""
mackerel.host
~~~~~~~~~~~~~
Mackerel client implemented by Pyton.
Ported from `mackerel-client-ruby`.
<https://github.com/mackerelio/mackerel-client-ruby>
:copyright: (c) 2014 Hatena, All rights reserved.
:copyright: (c) 2015 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import re
class Host(object):
MACKEREL_INTERFACE_NAME_PATTERN = re.compile(r'^eth\d')
def __init__(self, **kwargs):
self.args = kwargs
self.name = kwargs.get('name', None)
self.meta = kwargs.get('meta', None)
self.type = kwargs.get('type', None)
self.status = kwargs.get('status', None)
self.memo = kwargs.get('memo', None)
self.is_retired = kwargs.get('isRetired', None)
self.id = kwargs.get('id', None)
self.created_at = kwargs.get('createdAt', None)
self.roles = kwargs.get('roles', None)
self.interfaces = kwargs.get('interfaces', None)
def ip_addr(self):
pass
def mac_addr(self):
pass
|
Add None if kwargs can not get.
|
Add None if kwargs can not get.
|
Python
|
bsd-3-clause
|
heavenshell/py-mackerel-client
|
63a893add1170c1e90cdb8eaea6c1e1c6a3a8e0a
|
9.py
|
9.py
|
"""Python challenge solution #9:
http://www.pythonchallenge.com/pc/return/good.html"""
def main():
pass
if __name__ == "__main__":
main()
|
"""Python challenge solution #9:
http://www.pythonchallenge.com/pc/return/good.html"""
import urllib
import urllib2
from PIL import Image, ImageDraw
un = 'huge'
pw = 'file'
url = 'http://www.pythonchallenge.com/pc/return/good.jpg'
def setup_auth_handler():
password_mgr = urllib2.HTTPPasswordMgrWithDefaultRealm()
password_mgr.add_password(None, url, un, pw)
handler = urllib2.HTTPBasicAuthHandler(password_mgr)
opener = urllib2.build_opener(handler)
opener.open(url)
urllib2.install_opener(opener)
def main():
setup_auth_handler()
img = urllib2.urlopen('http://www.pythonchallenge.com/pc/return/good.jpg')
im = Image.open(img)
draw = ImageDraw.Draw(im)
draw.line([(0, 0), im.size], fill=128)
im.show()
if __name__ == "__main__":
main()
|
Add authentication handler for opening image.
|
Add authentication handler for opening image.
|
Python
|
mit
|
bm5w/pychal
|
f3bbfd5221a3d7b3e394a70853d0a7dc1b5eeeac
|
knights/base.py
|
knights/base.py
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.nodelist = parse.parse(raw)
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
}
return ''.join(eval(self.code, global_ctx, {}))
|
import ast
from . import parse
class Template:
def __init__(self, raw):
self.raw = raw
self.parser = parse.Parser(raw)
self.nodelist = self.parser()
code = ast.Expression(
body=ast.GeneratorExp(
elt=ast.Call(
func=ast.Name(id='str', ctx=ast.Load()),
args=[
ast.Call(
func=ast.Attribute(
value=ast.Name(id='x', ctx=ast.Load()),
attr='render',
ctx=ast.Load()
),
args=[ast.Name(id='context', ctx=ast.Load())],
keywords=[], starargs=None, kwargs=None
),
],
keywords=[], starargs=None, kwargs=None
),
generators=[
ast.comprehension(
target=ast.Name(id='x', ctx=ast.Store()),
iter=ast.Name(id='nodelist', ctx=ast.Load()),
ifs=[]
),
]
)
)
ast.fix_missing_locations(code)
self.code = compile(code, filename='<template>', mode='eval')
def render(self, context):
global_ctx = {
'nodelist': self.nodelist,
'context': dict(context),
}
return ''.join(eval(self.code, global_ctx, {}))
|
Update Template for Parser class
|
Update Template for Parser class
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
52d32849f4cd38ca7a0fcfc0418e9e9580dd426a
|
kimochiconsumer/views.py
|
kimochiconsumer/views.py
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
from pyramid.view import view_config
from pyramid.httpexceptions import (
HTTPNotFound,
)
@view_config(route_name='page', renderer='templates/page.mako')
@view_config(route_name='page_view', renderer='templates/page.mako')
def page_view(request):
if 'page_id' in request.matchdict:
data = request.kimochi.page(request.matchdict['page_id'])
else:
data = request.kimochi.page('1')
return data
@view_config(route_name='gallery_view', renderer='templates/gallery.mako')
def gallery_view(request):
data = request.kimochi.gallery(request.matchdict['gallery_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
@view_config(route_name='gallery_image_view', renderer='templates/gallery_image.mako')
def gallery_image_view(request):
data = request.kimochi.gallery_image(request.matchdict['gallery_id'], request.matchdict['image_id'])
if 'gallery' not in data or not data['gallery']:
raise HTTPNotFound
return data
|
Use the gallery_image method for required information
|
Use the gallery_image method for required information
|
Python
|
mit
|
matslindh/kimochi-consumer
|
cc6ce477550152135eed5a9e35bca8144be10111
|
groupmestats/plotly_helpers.py
|
groupmestats/plotly_helpers.py
|
import plotly
def try_saving_plotly_figure(figure, filename):
try:
plotly.plotly.image.save_as(figure, filename)
except plotly.exceptions.PlotlyError as e:
if 'The response from plotly could not be translated.'in str(e):
print("Failed to save plotly figure. <home>/.plotly/.credentials"
" might not be configured correctly? "
"Or you may have hit your plotly account's rate limit"
" (http://help.plot.ly/api-rate-limits/)")
else:
raise
# A green bar with slightly darker green line
marker = dict(
color='#4BB541',
line=dict(
color='#3A9931',
width=1.5,
)
)
|
import plotly
def try_saving_plotly_figure(figure, filename):
try:
print("Saving plot to '%s'" % filename)
plotly.plotly.image.save_as(figure, filename)
except plotly.exceptions.PlotlyError as e:
if 'The response from plotly could not be translated.'in str(e):
print("Failed to save plotly figure. <home>/.plotly/.credentials"
" might not be configured correctly? "
"Or you may have hit your plotly account's rate limit"
" (http://help.plot.ly/api-rate-limits/)")
else:
raise
# A green bar with slightly darker green line
marker = dict(
color='#4BB541',
line=dict(
color='#3A9931',
width=1.5,
)
)
|
Print when saving plot to file
|
Print when saving plot to file
|
Python
|
mit
|
kjteske/groupmestats,kjteske/groupmestats
|
e2ca99c9f3548fa0d4e46bdd3b309ebd0e658ffa
|
test/backend/wayland/conftest.py
|
test/backend/wayland/conftest.py
|
import contextlib
import os
from libqtile.backend.wayland.core import Core
from test.helpers import Backend
wlr_env = {
"WLR_BACKENDS": "headless",
"WLR_LIBINPUT_NO_DEVICES": "1",
"WLR_RENDERER_ALLOW_SOFTWARE": "1",
"WLR_RENDERER": "pixman",
}
@contextlib.contextmanager
def wayland_environment(outputs):
"""This backend just needs some environmental variables set"""
env = wlr_env.copy()
env["WLR_HEADLESS_OUTPUTS"] = str(outputs)
yield env
class WaylandBackend(Backend):
def __init__(self, env, args=()):
self.env = env
self.args = args
self.core = Core
self.manager = None
def create(self):
"""This is used to instantiate the Core"""
os.environ.update(self.env)
return self.core(*self.args)
def configure(self, manager):
"""This backend needs to get WAYLAND_DISPLAY variable."""
success, display = manager.c.eval("self.core.display_name")
assert success
self.env["WAYLAND_DISPLAY"] = display
def fake_click(self, x, y):
"""Click at the specified coordinates"""
raise NotImplementedError
def get_all_windows(self):
"""Get a list of all windows in ascending order of Z position"""
raise NotImplementedError
|
import contextlib
import os
import textwrap
from libqtile.backend.wayland.core import Core
from test.helpers import Backend
wlr_env = {
"WLR_BACKENDS": "headless",
"WLR_LIBINPUT_NO_DEVICES": "1",
"WLR_RENDERER_ALLOW_SOFTWARE": "1",
"WLR_RENDERER": "pixman",
}
@contextlib.contextmanager
def wayland_environment(outputs):
"""This backend just needs some environmental variables set"""
env = wlr_env.copy()
env["WLR_HEADLESS_OUTPUTS"] = str(outputs)
yield env
class WaylandBackend(Backend):
def __init__(self, env, args=()):
self.env = env
self.args = args
self.core = Core
self.manager = None
def create(self):
"""This is used to instantiate the Core"""
os.environ.update(self.env)
return self.core(*self.args)
def configure(self, manager):
"""This backend needs to get WAYLAND_DISPLAY variable."""
success, display = manager.c.eval("self.core.display_name")
assert success
self.env["WAYLAND_DISPLAY"] = display
def fake_click(self, x, y):
"""Click at the specified coordinates"""
self.manager.c.eval(textwrap.dedent("""
self.core._focus_by_click()
self.core._process_cursor_button(1, True)
"""))
def get_all_windows(self):
"""Get a list of all windows in ascending order of Z position"""
success, result = self.manager.c.eval(textwrap.dedent("""
[win.wid for win in self.core.mapped_windows]
"""))
assert success
return eval(result)
|
Add Wayland Backend.fake_click and Backend.get_all_windows methods
|
Add Wayland Backend.fake_click and Backend.get_all_windows methods
These work by eval-ing in the test Qtile instance. It might be nicer to
instead make these cmd_s on the `Core` if/when we expose cmd_ methods
from the Core.
|
Python
|
mit
|
ramnes/qtile,ramnes/qtile,qtile/qtile,qtile/qtile
|
33c26aab9ff4e391f9dde2bfe873f86db4ce126e
|
opal/tests/test_user_profile.py
|
opal/tests/test_user_profile.py
|
"""
Tests for opal.models.UserProfile
"""
from django.test import TestCase
from django.contrib.auth.models import User
from opal.models import UserProfile, Team
class UserProfileTest(TestCase):
def setUp(self):
self.user = User(username='testing')
self.user.save()
self.profile, _ = UserProfile.objects.get_or_create(user=self.user)
def test_get_roles(self):
self.assertEqual({'default': []}, self.profile.get_roles())
def test_get_teams(self):
teams = list(Team.objects.filter(active=True, restricted=False))
user_teams = self.profile.get_teams()
for t in teams:
self.assertIn(t, user_teams)
|
"""
Tests for opal.models.UserProfile
"""
from django.contrib.auth.models import User
from django.test import TestCase
from mock import patch
from opal.models import UserProfile, Team
class UserProfileTest(TestCase):
def setUp(self):
self.user = User(username='testing')
self.user.save()
self.profile, _ = UserProfile.objects.get_or_create(user=self.user)
def test_get_roles(self):
self.assertEqual({'default': []}, self.profile.get_roles())
def test_get_teams(self):
teams = list(Team.objects.filter(active=True, restricted=False))
user_teams = self.profile.get_teams()
for t in teams:
self.assertIn(t, user_teams)
def test_can_see_pid(self):
with patch.object(UserProfile, 'get_roles') as mock_roles:
mock_roles.return_value = dict(default=['scientist'])
self.assertEqual(False, self.profile.can_see_pid)
def test_explicit_access_only(self):
with patch.object(UserProfile, 'get_roles') as mock_roles:
mock_roles.return_value = dict(default=['scientist'])
self.assertEqual(True, self.profile.explicit_access_only)
|
Add tests for userprofile properties
|
Add tests for userprofile properties
|
Python
|
agpl-3.0
|
khchine5/opal,khchine5/opal,khchine5/opal
|
50fd28e67109f47893a6d38ede7f64758f3fe618
|
consts/auth_type.py
|
consts/auth_type.py
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
class AuthType(object):
"""
An auth type defines what write privileges an authenticated agent has.
"""
EVENT_DATA = 0 # DEPRECATED - USE FINER PERMISSIONS INSTEAD
MATCH_VIDEO = 1
EVENT_TEAMS = 2
EVENT_MATCHES = 3
EVENT_RANKINGS = 4
EVENT_ALLIANCES = 5
EVENT_AWARDS = 6
type_names = {
EVENT_DATA: "event data",
MATCH_VIDEO: "match video",
EVENT_TEAMS: "event teams",
EVENT_MATCHES: "event matches",
EVENT_RANKINGS: "event rankings",
EVENT_ALLIANCES: "event alliances",
EVENT_AWARDS: "event awrads"
}
|
Add deprecation notice for AuthType.EVENT_DATA
|
Add deprecation notice for AuthType.EVENT_DATA
|
Python
|
mit
|
tsteward/the-blue-alliance,tsteward/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,bdaroz/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,nwalters512/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance
|
5c40cbfcb89649738945eda02c1bfb804e2ecdae
|
us_ignite/mailinglist/views.py
|
us_ignite/mailinglist/views.py
|
import hashlib
import mailchimp
from django.contrib import messages
from django.conf import settings
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
def subscribe_email(email):
master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
mailing_list = mailchimp.Lists(master)
uid = hashlib.md5(email).hexdigest()
email_data = {
'email': email,
'euid': uid,
'leid': uid,
}
return mailing_list.subscribe(
settings.MAILCHIMP_LIST, email_data)
def mailing_subscribe(request):
"""Handles MailChimp email registration."""
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
try:
subscribe_email(form.cleaned_data['email'])
messages.success(request, 'Successfully subscribed.')
redirect_to = 'home'
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
context = {
'form': form,
}
return TemplateResponse(request, 'mailinglist/form.html', context)
|
import hashlib
import logging
import mailchimp
from django.contrib import messages
from django.conf import settings
from django.shortcuts import redirect
from django.template.response import TemplateResponse
from us_ignite.mailinglist.forms import EmailForm
logger = logging.getLogger('us_ignite.mailinglist.views')
def subscribe_email(email):
master = mailchimp.Mailchimp(settings.MAILCHIMP_API_KEY)
mailing_list = mailchimp.Lists(master)
uid = hashlib.md5(email).hexdigest()
email_data = {
'email': email,
'euid': uid,
'leid': uid,
}
return mailing_list.subscribe(
settings.MAILCHIMP_LIST, email_data)
def mailing_subscribe(request):
"""Handles MailChimp email registration."""
if request.method == 'POST':
form = EmailForm(request.POST)
if form.is_valid():
try:
subscribe_email(form.cleaned_data['email'])
messages.success(request, 'Successfully subscribed.')
redirect_to = 'home'
except mailchimp.ListAlreadySubscribedError:
messages.error(request, 'Already subscribed.')
redirect_to = 'mailing_subscribe'
except Exception, e:
logger.exception(e)
msg = (u'There is a problem with the maling list. '
'Please try again later.')
messages.error(request, msg)
redirect_to = 'mailing_subscribe'
return redirect(redirect_to)
else:
form = EmailForm()
context = {
'form': form,
}
return TemplateResponse(request, 'mailinglist/form.html', context)
|
Improve handling of errors during mailing list subscription.
|
Improve handling of errors during mailing list subscription.
https://github.com/madewithbytes/us_ignite/issues/209
Any exception thrown by the mailchimp component will
be handled gracefully and logged.
|
Python
|
bsd-3-clause
|
us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite
|
5a82f76e3e95268fb1bbb297faa43e7f7cb59058
|
tests/perf_concrete_execution.py
|
tests/perf_concrete_execution.py
|
# Performance tests on concrete code execution without invoking Unicorn engine
import os
import time
import logging
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_tight_loop(arch):
b = angr.Project(os.path.join(test_location, arch, "perf_tight_loops"), auto_load_libs=False)
simgr = b.factory.simgr()
# logging.getLogger('angr.sim_manager').setLevel(logging.INFO)
start = time.time()
simgr.explore()
elapsed = time.time() - start
print("Elapsed %f sec" % elapsed)
print(simgr)
if __name__ == "__main__":
test_tight_loop("x86_64")
|
# Performance tests on concrete code execution without invoking Unicorn engine
import os
import time
import logging
import angr
test_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', '..', 'binaries', 'tests'))
def test_tight_loop(arch):
b = angr.Project(os.path.join(test_location, arch, "perf_tight_loops"), auto_load_libs=False)
state = b.factory.full_init_state(plugins={'registers': angr.state_plugins.SimLightRegisters()},
remove_options={angr.sim_options.COPY_STATES})
simgr = b.factory.simgr(state)
# logging.getLogger('angr.sim_manager').setLevel(logging.INFO)
start = time.time()
simgr.explore()
elapsed = time.time() - start
print("Elapsed %f sec" % elapsed)
print(simgr)
if __name__ == "__main__":
test_tight_loop("x86_64")
|
Enable SimLightRegisters and remove COPY_STATES for the performance test case.
|
Enable SimLightRegisters and remove COPY_STATES for the performance test case.
|
Python
|
bsd-2-clause
|
angr/angr,schieb/angr,schieb/angr,iamahuman/angr,schieb/angr,iamahuman/angr,angr/angr,iamahuman/angr,angr/angr
|
db981f7616283992fd1d17a3b1bf7d300b8ee34f
|
proper_parens.py
|
proper_parens.py
|
#!/usr/bin/env python
from __future__ import unicode_literals
<<<<<<< HEAD
def check_statement1(value):
output = 0
while output >= 0:
for item in value:
if item == ")":
output -= 1
if output == -1:
return -1
elif item == "(":
output += 1
if output == 0:
return 0
elif output > 1:
return 1
=======
def check_statement(value):
''' Return 1, 0, or -1 if input is open, balanced, or broken. '''
output = 0
index = 0
while index < len(value) and output >= 0:
# If the count is ever < 0, statement must be -1 (broken), end loop
# If the index is out of range, end loop
if value[index] == ")":
# Subtract 1 for every close paren
output -= 1
elif value[index] == "(":
# Add 1 for every close paren
output += 1
index += 1
if output == -1:
# Check if output is -1 (broken)
return output
elif not output:
# Check if output is 0 (balanced)
return output
else:
# Must be 1 (open) if it makes it to here
return 1
>>>>>>> 74dee1d09fdc09f93af3d15286336d7face4ba08
|
#!/usr/bin/env python
from __future__ import unicode_literals
def check_statement(value):
''' Return 1, 0, or -1 if input is open, balanced, or broken. '''
output = 0
index = 0
while index < len(value) and output >= 0:
# If the count is ever < 0, statement must be -1 (broken), end loop
# If the index is out of range, end loop
if value[index] == ")":
# Subtract 1 for every close paren
output -= 1
elif value[index] == "(":
# Add 1 for every close paren
output += 1
index += 1
if output == -1:
# Check if output is -1 (broken)
return output
elif not output:
# Check if output is 0 (balanced)
return output
else:
# Must be 1 (open) if it makes it to here
return 1
|
Fix proper parens merge conflict
|
Fix proper parens merge conflict
|
Python
|
mit
|
constanthatz/data-structures
|
075b8ba1813360720fc8933dc5e167f92b4e3aaf
|
python/epidb/client/client.py
|
python/epidb/client/client.py
|
import urllib
__version__ = '0.0~20090901.1'
__user_agent__ = 'EpiDBClient v%s/python' % __version__
class EpiDBClientOpener(urllib.FancyURLopener):
version = __user_agent__
class EpiDBClient:
version = __version__
user_agent = __user_agent__
server = 'https://egg.science.uva.nl:7443'
path_survey = '/survey/'
def __init__(self, api_key=None):
self.api_key = api_key
def __epidb_call(self, url, param):
data = urllib.urlencode(param)
opener = EpiDBClientOpener()
sock = opener.open(url, data)
res = sock.read()
sock.close()
return res
def survey_submit(self, data):
param = {
'data': data
}
url = self.server + self.path_survey
res = self.__epidb_call(url, param)
return res
|
import urllib
import urllib2
__version__ = '0.0~20090901.1'
__user_agent__ = 'EpiDBClient v%s/python' % __version__
class EpiDBClient:
version = __version__
user_agent = __user_agent__
server = 'https://egg.science.uva.nl:7443'
path_survey = '/survey/'
def __init__(self, api_key=None):
self.api_key = api_key
def __epidb_call(self, url, param):
data = urllib.urlencode(param)
req = urllib2.Request(url)
req.add_header('User-Agent', self.user_agent)
if self.api_key:
req.add_header('Cookie', 'epidb-apikey=%s' % self.api_key)
sock = urllib2.urlopen(req, data)
res = sock.read()
sock.close()
return res
def survey_submit(self, data):
param = {
'data': data
}
url = self.server + self.path_survey
res = self.__epidb_call(url, param)
return res
|
Send api-key through HTTP cookie.
|
[python] Send api-key through HTTP cookie.
|
Python
|
agpl-3.0
|
ISIFoundation/influenzanet-epidb-client
|
d3933d58b2ebcb0fb0c6301344335ae018973774
|
n_pair_mc_loss.py
|
n_pair_mc_loss.py
|
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) + batch_l2_norm_squared(f_p))
loss = loss_sce + l2_reg * l2_loss
return loss
|
from chainer import cuda
from chainer.functions import matmul
from chainer.functions import transpose
from chainer.functions import softmax_cross_entropy
from chainer.functions import batch_l2_norm_squared
def n_pair_mc_loss(f, f_p, l2_reg):
"""Multi-class N-pair loss (N-pair-mc loss) function.
Args:
f (~chainer.Variable): Feature vectors.
All examples must be different classes each other.
f_p (~chainer.Variable): Positive examples corresponding to f.
Each example must be the same class for each example in f.
l2_reg (~float): A weight of L2 regularization for feature vectors.
Returns:
~chainer.Variable: Loss value.
See: `Improved Deep Metric Learning with Multi-class N-pair Loss \
Objective <https://papers.nips.cc/paper/6200-improved-deep-metric-\
learning-with-multi-class-n-pair-loss-objective>`_
"""
logit = matmul(f, transpose(f_p))
N = len(logit.data)
xp = cuda.get_array_module(logit.data)
loss_sce = softmax_cross_entropy(logit, xp.arange(N))
l2_loss = sum(batch_l2_norm_squared(f) +
batch_l2_norm_squared(f_p)) / (2.0 * N)
loss = loss_sce + l2_reg * l2_loss
return loss
|
Modify to average the L2 norm loss of output vectors
|
Modify to average the L2 norm loss of output vectors
|
Python
|
mit
|
ronekko/deep_metric_learning
|
a094d29959243777fad47ea38b4497d891b9990e
|
data/data/models.py
|
data/data/models.py
|
from django.db import models
from uuid import uuid4
import hashlib
def _get_rand_hash():
uid = uuid4()
return hashlib.sha1(str(uid)).hexdigest()
def generate_token_secret():
return _get_rand_hash(), _get_rand_hash()
class User(models.Model):
username = models.CharField(max_length=200, unique=True)
password = models.CharField(max_length=200)
token = models.CharField(max_length=200, blank=True)
secret = models.CharField(max_length=200, blank=True)
def __unicode__(self):
return self.username
def save(self, *args, **kwargs):
if not self.token:
self.token, self.secret = generate_token_secret()
return super(User, self).save(*args, **kwargs)
|
from django.db import models
from uuid import uuid4
import hashlib
def get_rand_hash():
uid = uuid4()
return hashlib.sha1(str(uid)).hexdigest()
class User(models.Model):
username = models.CharField(max_length=200, unique=True)
password = models.CharField(max_length=200)
token = models.CharField(max_length=200, default=get_rand_hash)
secret = models.CharField(max_length=200, default=get_rand_hash)
def __unicode__(self):
return self.username
|
Set token and secret by default
|
Set token and secret by default
|
Python
|
bsd-2-clause
|
honza/oauth-service,honza/oauth-service
|
d96b07d529ea7ced5cbe5f5accaa84485e14395a
|
Lib/test/test_tk.py
|
Lib/test/test_tk.py
|
from test import support
# Skip test if _tkinter wasn't built.
support.import_module('_tkinter')
import tkinter
from tkinter.test import runtktests
import unittest
import tkinter
try:
tkinter.Button()
except tkinter.TclError as msg:
# assuming tk is not available
raise unittest.SkipTest("tk not available: %s" % msg)
def test_main(enable_gui=False):
if enable_gui:
if support.use_resources is None:
support.use_resources = ['gui']
elif 'gui' not in support.use_resources:
support.use_resources.append('gui')
support.run_unittest(
*runtktests.get_tests(text=False, packages=['test_tkinter']))
if __name__ == '__main__':
test_main(enable_gui=True)
|
from test import support
# Skip test if _tkinter wasn't built.
support.import_module('_tkinter')
import tkinter
from tkinter.test import runtktests
import unittest
try:
tkinter.Button()
except tkinter.TclError as msg:
# assuming tk is not available
raise unittest.SkipTest("tk not available: %s" % msg)
def test_main(enable_gui=False):
if enable_gui:
if support.use_resources is None:
support.use_resources = ['gui']
elif 'gui' not in support.use_resources:
support.use_resources.append('gui')
support.run_unittest(
*runtktests.get_tests(text=False, packages=['test_tkinter']))
if __name__ == '__main__':
test_main(enable_gui=True)
|
Remove redundant import of tkinter.
|
Remove redundant import of tkinter.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
4d4de16969439c71f0e9e15b32b26bd4b7310e8f
|
Simulated_import.py
|
Simulated_import.py
|
#!/usr/bin/env python
# We're going to simulate how evolution_master should import things
from genes import golang
from genes import web_cli
# etc...
|
#!/usr/bin/env python
# We're going to simulate how evolution_master should import things
from genes import docker
from genes import java
# etc...
|
Change simulated around for existing modules
|
Change simulated around for existing modules
|
Python
|
mit
|
hatchery/Genepool2,hatchery/genepool
|
f29377a4f7208d75490e550a732a24cb6f471f18
|
linked_list.py
|
linked_list.py
|
# -*- coding: utf-8 -*-
class Node(object):
""" """
def __init__(self, value, pointer=None):
self.value = value
self.pointer = pointer
class LinkedList(object):
""" """
def __init__(self):
self.length = 0
self.header = None
def push(self, value):
temp_node = self.header
new_node = Node(value, temp_node)
self.header = new_node
# self.set_init_list(*value)
# def set_init_list(self, *values):
# for value in values:
# self.length += 1
|
# -*- coding: utf-8 -*-
class Node(object):
""" """
def __init__(self, value, pointer=None):
self.value = value
self.pointer = pointer
class LinkedList(object):
""" """
def __init__(self):
self.length = 0
self.header = None
def __len__(self):
return self.length
def push(self, value):
temp_node = self.header
new_node = Node(value, temp_node)
self.header = new_node
self.length += 1
def size(self):
return len(self)
# self.set_init_list(*value)
# def set_init_list(self, *values):
# for value in values:
# self.length += 1
|
Add size and len finctions.
|
Add size and len finctions.
|
Python
|
mit
|
jefferyrayrussell/data-structures
|
3c1523f2fc3fec918f451a7dc361be9eb631524c
|
serrano/urls.py
|
serrano/urls.py
|
from django.conf.urls import patterns, url, include
urlpatterns = patterns('',
url(r'', include(patterns('',
url(r'^$', include('serrano.resources')),
url(r'^fields/', include('serrano.resources.field')),
url(r'^concepts/', include('serrano.resources.concept')),
url(r'^contexts/', include('serrano.resources.context', namespace='contexts')),
url(r'^queries/', include('serrano.resources.query', namespace='queries')),
url(r'^views/', include('serrano.resources.view', namespace='views')),
url(r'^data/', include(patterns('',
url(r'^export/', include('serrano.resources.exporter')),
url(r'^preview/', include('serrano.resources.preview')),
), namespace='data')),
), namespace='serrano')),
)
|
import time
from django.conf.urls import patterns, url, include
urlpatterns = patterns('',
url(r'', include(patterns('',
url(r'^$', include('serrano.resources')),
url(r'^fields/', include('serrano.resources.field')),
url(r'^concepts/', include('serrano.resources.concept')),
url(r'^contexts/', include('serrano.resources.context', namespace='contexts')),
url(r'^queries/', include('serrano.resources.query', namespace='queries')),
url(r'^views/', include('serrano.resources.view', namespace='views')),
url(r'^data/', include(patterns('',
url(r'^export/', include('serrano.resources.exporter')),
url(r'^preview/', include('serrano.resources.preview')),
), namespace='data')),
), namespace='serrano')),
)
|
Insert unused import to test pyflakes in travis
|
Insert unused import to test pyflakes in travis
|
Python
|
bsd-2-clause
|
chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano
|
06e858fc86f8f34ccae521cb269c959569f53f97
|
script/sample/submitpython.py
|
script/sample/submitpython.py
|
#!/usr/bin/env python
from __future__ import print_function
import multyvac
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url='http://docker:8000/v1')
def add(a, b):
return a + b
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("result = {}".format(result))
|
#!/usr/bin/env python
# CLOUDPIPE_URL=http://`echo $DOCKER_HOST | cut -d ":" -f2 | tr -d "/"`:8000/v1 python2 script/sample/submitpython.py
from __future__ import print_function
import multyvac
import os
# Grab from the CLOUDPIPE_URL environment variable, otherwise assume they have
# /etc/hosts configured to point to their docker
api_url = os.environ.get('CLOUDPIPE_URL', 'http://docker:8000/v1')
multyvac.config.set_key(api_key='admin', api_secret_key='12345', api_url=api_url)
def add(a, b):
return a + b
jid = multyvac.submit(add, 3, 4)
result = multyvac.get(jid).get_result()
print("added {} and {} to get {}... in the cloud!".format(3,4,result))
|
Allow api_url in the script to be configurable
|
Allow api_url in the script to be configurable
|
Python
|
bsd-3-clause
|
cloudpipe/cloudpipe,cloudpipe/cloudpipe,cloudpipe/cloudpipe
|
1510d5657e6084cb94de514d1ba05e3b30f0ce5f
|
tools/python/frame_processor/frame_processor.py
|
tools/python/frame_processor/frame_processor.py
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
#msg = "HELLO " + str(i)
msg = IpcMessage(msg_type='cmd', msg_val='status')
print "Sending message", msg
self.ctrl_channel.send(msg.encode())
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(0.01)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
msg = "HELLO " + str(i)
print "Sending message", msg
self.ctrl_channel.send(msg)
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(1)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
Revert "Update python frame processor test harness to send IPC JSON messages to frame receiver for testing of control path and channel multiplexing"
|
Revert "Update python frame processor test harness to send IPC JSON messages to frame receiver for testing of control path and channel multiplexing"
This reverts commit 0e301d3ee54366187b2e12fa5c6927f27e907347.
|
Python
|
apache-2.0
|
odin-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data
|
6f995fbe0532fc4ea36f3f7cd24240d3525b115f
|
Ref.py
|
Ref.py
|
"""
MoinMoin - Ref Macro
Collect and emit footnotes. Note that currently footnote
text cannot contain wiki markup.
@copyright: 2011 Jason L. Wright <jason@thought.net>
@license: BSD
"""
from MoinMoin import config, wikiutil
from MoinMoin.parser.text_moin_wiki import Parser as WikiParser
from MoinMoin.support.python_compatibility import hash_new
import MoinMoin.macro.FootNote as FootNote
import MoinMoin.macro.RefText as RefText
Dependencies = ["time"] # footnote macro cannot be cached
def execute(macro, args):
request = macro.request
formatter = macro.formatter
txt = RefText.execute(macro, args)
return FootNote.execute(macro, txt)
|
"""
MoinMoin - Ref Macro
Collect and emit references (as footnotes)
@copyright: 2011 Jason L. Wright <jason@thought.net>
@license: BSD
"""
from MoinMoin import config, wikiutil
from MoinMoin.parser.text_moin_wiki import Parser as WikiParser
from MoinMoin.support.python_compatibility import hash_new
import MoinMoin.macro.FootNote as FootNote
import MoinMoin.macro.RefText as RefText
Dependencies = ["time"] # footnote macro cannot be cached
def execute(macro, args):
request = macro.request
formatter = macro.formatter
txt = RefText.execute(macro, args)
return FootNote.execute(macro, txt)
|
Update text to reflect current usage
|
Update text to reflect current usage
|
Python
|
bsd-2-clause
|
wrigjl/moin-ref-bibtex
|
7f44c6a114f95c25b533c9b69988798ba3919d68
|
wger/email/forms.py
|
wger/email/forms.py
|
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
subject = CharField(label=pgettext('Subject', 'As in "email subject"'))
body = CharField(widget=Textarea, label=pgettext('Content', 'As in "content of an email"'))
|
# -*- coding: utf-8 -*-
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.utils.translation import (
pgettext,
ugettext_lazy as _
)
from django.forms import (
Form,
CharField,
Textarea
)
class EmailListForm(Form):
'''
Small form to send emails
'''
subject = CharField(label=pgettext('As in "email subject"', 'Subject'))
body = CharField(widget=Textarea, label=pgettext('As in "content of an email"', 'Content'))
|
Use correct order of arguments of pgettext
|
Use correct order of arguments of pgettext
|
Python
|
agpl-3.0
|
rolandgeider/wger,rolandgeider/wger,wger-project/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,rolandgeider/wger,kjagoo/wger_stark,petervanderdoes/wger,rolandgeider/wger,petervanderdoes/wger,wger-project/wger,wger-project/wger,petervanderdoes/wger,DeveloperMal/wger,kjagoo/wger_stark,kjagoo/wger_stark,petervanderdoes/wger,kjagoo/wger_stark,DeveloperMal/wger
|
47bf5652c621da89a72597b8198fbfde84c2049c
|
healthfun/person/models.py
|
healthfun/person/models.py
|
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Person(models.Model):
first_name = models.CharField(verbose_name=_(u"First Name"), max_length=75, blank=True)
last_name = models.CharField(verbose_name=_(u"Last Name"), max_length=75, blank=True)
height = models.IntegerField(blank=True)
email = models.EmailField()
|
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.translation import ugettext_lazy as _
class Person(models.Model):
first_name = models.CharField(verbose_name=_(u"First Name"), max_length=75, blank=True)
last_name = models.CharField(verbose_name=_(u"Last Name"), max_length=75, blank=True)
height = models.IntegerField(blank=True)
email = models.EmailField()
def __unicode__ (self):
return self.email
|
Use email to 'print' a person
|
Use email to 'print' a person
|
Python
|
agpl-3.0
|
frlan/healthfun
|
c24a7287d0ac540d6ef6dcf353b06ee42aaa7a43
|
serrano/decorators.py
|
serrano/decorators.py
|
from functools import wraps
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
def get_token(request):
return request.REQUEST.get('token', '')
def check_auth(func):
@wraps(func)
def inner(self, request, *args, **kwargs):
auth_required = getattr(settings, 'SERRANO_AUTH_REQUIRED', False)
user = getattr(request, 'user', None)
# Attempt to authenticate if a token is present
if not user or not user.is_authenticated():
token = get_token(request)
user = authenticate(token=token)
if user:
login(request, user)
elif auth_required:
return HttpResponse(status=401)
return func(self, request, *args, **kwargs)
return inner
|
from functools import wraps
from django.conf import settings
from django.http import HttpResponse
from django.contrib.auth import authenticate, login
def get_token(request):
"Attempts to retrieve a token from the request."
if 'token' in request.REQUEST:
return request.REQUEST['token']
if 'HTTP_API_TOKEN' in request.META:
return request.META['HTTP_API_TOKEN']
return ''
def check_auth(func):
@wraps(func)
def inner(self, request, *args, **kwargs):
auth_required = getattr(settings, 'SERRANO_AUTH_REQUIRED', False)
user = getattr(request, 'user', None)
# Attempt to authenticate if a token is present
if not user or not user.is_authenticated():
token = get_token(request)
user = authenticate(token=token)
if user:
login(request, user)
elif auth_required:
return HttpResponse(status=401)
return func(self, request, *args, **kwargs)
return inner
|
Add support for extracting the token from request headers
|
Add support for extracting the token from request headers
Clients can now set the `Api-Token` header instead of supplying the
token as a GET or POST parameter.
|
Python
|
bsd-2-clause
|
chop-dbhi/serrano,rv816/serrano_night,rv816/serrano_night,chop-dbhi/serrano
|
cd5bfa0fb09835e4e33236ec4292a16ed5556088
|
tests/parser.py
|
tests/parser.py
|
from spec import Spec, skip, ok_, eq_, raises
from invoke.parser import Parser, Context, Argument
from invoke.collection import Collection
class Parser_(Spec):
def has_and_requires_initial_context(self):
c = Context()
p = Parser(initial=c)
eq_(p.initial, c)
def may_also_take_additional_contexts(self):
c1 = Context('foo')
c2 = Context('bar')
p = Parser(initial=Context(), contexts=[c1, c2])
eq_(p.contexts['foo'], c1)
eq_(p.contexts['bar'], c2)
@raises(ValueError)
def raises_ValueError_for_unnamed_Contexts_in_contexts(self):
Parser(initial=Context(), contexts=[Context()])
class parse_argv:
def parses_sys_argv_style_list_of_strings(self):
"parses sys.argv-style list of strings"
# Doesn't-blow-up tests FTL
mytask = Context(name='mytask')
mytask.add_arg('--arg')
p = Parser(contexts=[mytask])
p.parse_argv(['mytask', '--arg'])
def returns_ordered_list_of_tasks_and_their_args(self):
skip()
def returns_remainder(self):
"returns -- style remainder string chunk"
skip()
|
from spec import Spec, skip, ok_, eq_, raises
from invoke.parser import Parser, Context, Argument
from invoke.collection import Collection
class Parser_(Spec):
def can_take_initial_context(self):
c = Context()
p = Parser(initial=c)
eq_(p.initial, c)
def can_take_initial_and_other_contexts(self):
c1 = Context('foo')
c2 = Context('bar')
p = Parser(initial=Context(), contexts=[c1, c2])
eq_(p.contexts['foo'], c1)
eq_(p.contexts['bar'], c2)
def can_take_just_other_contexts(self):
c = Context('foo')
p = Parser(contexts=[c])
eq_(p.contexts['foo'], c)
@raises(ValueError)
def raises_ValueError_for_unnamed_Contexts_in_contexts(self):
Parser(initial=Context(), contexts=[Context()])
class parse_argv:
def parses_sys_argv_style_list_of_strings(self):
"parses sys.argv-style list of strings"
# Doesn't-blow-up tests FTL
mytask = Context(name='mytask')
mytask.add_arg('--arg')
p = Parser(contexts=[mytask])
p.parse_argv(['mytask', '--arg'])
def returns_ordered_list_of_tasks_and_their_args(self):
skip()
def returns_remainder(self):
"returns -- style remainder string chunk"
skip()
|
Update tests to explicitly account for previous
|
Update tests to explicitly account for previous
|
Python
|
bsd-2-clause
|
mattrobenolt/invoke,frol/invoke,sophacles/invoke,pyinvoke/invoke,tyewang/invoke,frol/invoke,mattrobenolt/invoke,pfmoore/invoke,singingwolfboy/invoke,kejbaly2/invoke,pfmoore/invoke,pyinvoke/invoke,mkusz/invoke,alex/invoke,mkusz/invoke,kejbaly2/invoke
|
d01b09256f8fda4b222f3e26366817f4ac5b4c5a
|
zinnia/tests/test_admin_forms.py
|
zinnia/tests/test_admin_forms.py
|
"""Test cases for Zinnia's admin forms"""
from django.test import TestCase
from django.contrib.admin.widgets import RelatedFieldWidgetWrapper
from zinnia.models import Category
from zinnia.admin.forms import EntryAdminForm
from zinnia.admin.forms import CategoryAdminForm
class EntryAdminFormTestCase(TestCase):
def test_categories_has_related_widget(self):
form = EntryAdminForm()
self.assertTrue(
isinstance(form.fields['categories'].widget,
RelatedFieldWidgetWrapper))
def test_initial_sites(self):
form = EntryAdminForm()
self.assertEqual(
len(form.fields['sites'].initial), 1)
class CategoryAdminFormTestCase(TestCase):
def test_parent_has_related_widget(self):
form = CategoryAdminForm()
self.assertTrue(
isinstance(form.fields['parent'].widget,
RelatedFieldWidgetWrapper))
def test_clean_parent(self):
category = Category.objects.create(
title='Category 1', slug='cat-1')
datas = {'parent': category.pk,
'title': category.title,
'slug': category.slug}
form = CategoryAdminForm(datas, instance=category)
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors['parent']), 1)
subcategory = Category.objects.create(
title='Category 2', slug='cat-2')
self.assertEqual(subcategory.parent, None)
datas = {'parent': category.pk,
'title': subcategory.title,
'slug': subcategory.slug}
form = CategoryAdminForm(datas, instance=subcategory)
self.assertTrue(form.is_valid())
|
"""Test cases for Zinnia's admin forms"""
from django.test import TestCase
from django.contrib.admin.widgets import RelatedFieldWidgetWrapper
from zinnia.models import Category
from zinnia.admin.forms import EntryAdminForm
from zinnia.admin.forms import CategoryAdminForm
class EntryAdminFormTestCase(TestCase):
def test_categories_has_related_widget(self):
form = EntryAdminForm()
self.assertTrue(
isinstance(form.fields['categories'].widget,
RelatedFieldWidgetWrapper))
class CategoryAdminFormTestCase(TestCase):
def test_parent_has_related_widget(self):
form = CategoryAdminForm()
self.assertTrue(
isinstance(form.fields['parent'].widget,
RelatedFieldWidgetWrapper))
def test_clean_parent(self):
category = Category.objects.create(
title='Category 1', slug='cat-1')
datas = {'parent': category.pk,
'title': category.title,
'slug': category.slug}
form = CategoryAdminForm(datas, instance=category)
self.assertFalse(form.is_valid())
self.assertEqual(len(form.errors['parent']), 1)
subcategory = Category.objects.create(
title='Category 2', slug='cat-2')
self.assertEqual(subcategory.parent, None)
datas = {'parent': category.pk,
'title': subcategory.title,
'slug': subcategory.slug}
form = CategoryAdminForm(datas, instance=subcategory)
self.assertTrue(form.is_valid())
|
Remove now useless test for initial sites value in form
|
Remove now useless test for initial sites value in form
|
Python
|
bsd-3-clause
|
extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,Zopieux/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,bywbilly/django-blog-zinnia,dapeng0802/django-blog-zinnia,Zopieux/django-blog-zinnia,aorzh/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,aorzh/django-blog-zinnia,aorzh/django-blog-zinnia,extertioner/django-blog-zinnia,ZuluPro/django-blog-zinnia,petecummings/django-blog-zinnia,Fantomas42/django-blog-zinnia,marctc/django-blog-zinnia,petecummings/django-blog-zinnia,ZuluPro/django-blog-zinnia,ZuluPro/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,Maplecroft/django-blog-zinnia,petecummings/django-blog-zinnia,marctc/django-blog-zinnia,bywbilly/django-blog-zinnia,extertioner/django-blog-zinnia,Maplecroft/django-blog-zinnia,Fantomas42/django-blog-zinnia,ghachey/django-blog-zinnia,dapeng0802/django-blog-zinnia,marctc/django-blog-zinnia
|
f096225138afff2a722b1b019eb94e14f8d18fc3
|
sutro/dispatcher.py
|
sutro/dispatcher.py
|
import random
import gevent.queue
class MessageDispatcher(object):
def __init__(self, stats):
self.consumers = {}
self.stats = stats
def get_connection_count(self):
return sum(len(sockets) for sockets in self.consumers.itervalues())
def on_message_received(self, namespace, message):
consumers = self.consumers.get(namespace, [])
with self.stats.timer("sutro.dispatch"):
for consumer in consumers:
consumer.put(message)
def listen(self, namespace, max_timeout):
queue = gevent.queue.Queue()
self.consumers.setdefault(namespace, []).append(queue)
try:
while True:
# jitter the timeout a bit to ensure we don't herd
timeout = max_timeout - random.uniform(0, max_timeout / 2)
try:
yield queue.get(block=True, timeout=timeout)
except gevent.queue.Empty:
yield None
# ensure we're not starving others by spinning
gevent.sleep()
finally:
self.consumers[namespace].remove(queue)
if not self.consumers[namespace]:
del self.consumers[namespace]
|
import posixpath
import random
import gevent.queue
def _walk_namespace_hierarchy(namespace):
assert namespace.startswith("/")
yield namespace
while namespace != "/":
namespace = posixpath.dirname(namespace)
yield namespace
class MessageDispatcher(object):
def __init__(self, stats):
self.consumers = {}
self.stats = stats
def get_connection_count(self):
return sum(len(sockets) for sockets in self.consumers.itervalues())
def on_message_received(self, namespace, message):
consumers = self.consumers.get(namespace, [])
with self.stats.timer("sutro.dispatch"):
for consumer in consumers:
consumer.put(message)
def listen(self, namespace, max_timeout):
queue = gevent.queue.Queue()
namespace = namespace.rstrip("/")
for ns in _walk_namespace_hierarchy(namespace):
self.consumers.setdefault(ns, []).append(queue)
try:
while True:
# jitter the timeout a bit to ensure we don't herd
timeout = max_timeout - random.uniform(0, max_timeout / 2)
try:
yield queue.get(block=True, timeout=timeout)
except gevent.queue.Empty:
yield None
# ensure we're not starving others by spinning
gevent.sleep()
finally:
for ns in _walk_namespace_hierarchy(namespace):
self.consumers[ns].remove(queue)
if not self.consumers[ns]:
del self.consumers[ns]
|
Make sockets listen to parent namespaces as well.
|
Make sockets listen to parent namespaces as well.
For example, /live/test will now receive messages destined for
/live/test, /live and /. This allows us to send messages to multiple
endpoints at once such as refreshing all liveupdate threads or the like.
|
Python
|
bsd-3-clause
|
spladug/sutro,spladug/sutro
|
f4ee715a5bd6ea979cf09fb847a861f621d42c7b
|
CFC_WebApp/utils/update_client.py
|
CFC_WebApp/utils/update_client.py
|
from dao.client import Client
import sys
def update_entry(clientName, createKeyOpt):
newKey = Client(clientName).update(createKey = createKeyOpt)
print "%s: %s" % (clientName, newKey)
if __name__ == '__main__':
# Deal with getopt here to support -a
update_entry(sys.argv[1], bool(sys.argv[2]))
|
from dao.client import Client
import sys
def update_entry(clientName, createKeyOpt):
print "createKeyOpt = %s" % createKeyOpt
newKey = Client(clientName).update(createKey = createKeyOpt)
print "%s: %s" % (clientName, newKey)
if __name__ == '__main__':
# Deal with getopt here to support -a
update_entry(sys.argv[1], sys.argv[2] == "true" or sys.argv[2] == "True")
|
Fix stupid bug in the client creation script
|
Fix stupid bug in the client creation script
bool(String) doesn't actually convert a string to a boolean.
So we were creating a new ID even if the user typed in "False".
Fixed with a simple boolean check instead.
|
Python
|
bsd-3-clause
|
sunil07t/e-mission-server,e-mission/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,e-mission/e-mission-server,yw374cornell/e-mission-server,sdsingh/e-mission-server,shankari/e-mission-server,joshzarrabi/e-mission-server,joshzarrabi/e-mission-server,yw374cornell/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sdsingh/e-mission-server,shankari/e-mission-server,yw374cornell/e-mission-server,shankari/e-mission-server,sunil07t/e-mission-server,sunil07t/e-mission-server,e-mission/e-mission-server
|
9697010c909e3a4777bdef7c2889813ae3decad7
|
telemetry/telemetry/internal/platform/profiler/android_screen_recorder_profiler.py
|
telemetry/telemetry/internal/platform/profiler/android_screen_recorder_profiler.py
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.internal.platform import profiler
from telemetry.internal import util
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
from telemetry.core import util
from telemetry.internal.platform import profiler
from telemetry.internal.backends.chrome import android_browser_finder
class AndroidScreenRecordingProfiler(profiler.Profiler):
"""Captures a screen recording on Android."""
def __init__(self, browser_backend, platform_backend, output_path, state):
super(AndroidScreenRecordingProfiler, self).__init__(
browser_backend, platform_backend, output_path, state)
self._output_path = output_path + '.mp4'
self._recorder = subprocess.Popen(
[os.path.join(util.GetChromiumSrcDir(), 'build', 'android',
'screenshot.py'),
'--video',
'--file', self._output_path,
'--device', browser_backend.device.adb.GetDeviceSerial()],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
@classmethod
def name(cls):
return 'android-screen-recorder'
@classmethod
def is_supported(cls, browser_type):
if browser_type == 'any':
return android_browser_finder.CanFindAvailableBrowsers()
return browser_type.startswith('android')
def CollectProfile(self):
self._recorder.communicate(input='\n')
print 'Screen recording saved as %s' % self._output_path
print 'To view, open in Chrome or a video player'
return [self._output_path]
|
Fix an import path in the Android screen recorder
|
telemetry: Fix an import path in the Android screen recorder
Review URL: https://codereview.chromium.org/1301613004
Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#343960}
|
Python
|
bsd-3-clause
|
benschmaus/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,catapult-project/catapult-csm,catapult-project/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult,catapult-project/catapult-csm,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult,sahiljain/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,sahiljain/catapult,catapult-project/catapult,benschmaus/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult-csm,catapult-project/catapult-csm
|
3031bcfda01a55c70f3af860bb5620a5530e654a
|
Motor/src/main/python/vehicles.py
|
Motor/src/main/python/vehicles.py
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=None):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
with self.motor_hat.getMotor(index) as motor:
motor.run(command)
motor.setSpeed(speed)
motor = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor
elif index == n:
self.motors.append(motor)
else:
raise IndexError()
|
from Adafruit_MotorHAT import Adafruit_MotorHAT
class Vehicle:
def __init__(self, motor_hat=Adafruit_MotorHAT()):
self.motor_hat = motor_hat
self.motors = []
def release(self):
self.motor_hat.getMotor(1).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(2).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(3).run(Adafruit_MotorHAT.RELEASE)
self.motor_hat.getMotor(4).run(Adafruit_MotorHAT.RELEASE)
def update_motor(self, index, command, speed):
with self.motor_hat.getMotor(index) as motor:
motor.run(command)
motor.setSpeed(speed)
motor = {"location": index, "command": command, "speed": speed}
n = len(self.motors)
if index < n:
self.motors[index] = motor
elif index == n:
self.motors.append(motor)
else:
raise IndexError()
|
Build vehicle with motor hat.
|
Build vehicle with motor hat.
|
Python
|
mit
|
misalcedo/RapBot,misalcedo/RapBot,misalcedo/RapBot,misalcedo/RapBot
|
0007ea4aa0f7ebadfadb0c6f605c51a1d11e483c
|
account/managers.py
|
account/managers.py
|
from __future__ import unicode_literals
from django.db import models, IntegrityError
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
try:
email_address = self.create(user=user, email=email, **kwargs)
except IntegrityError:
return None
else:
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
from __future__ import unicode_literals
from django.db import models, IntegrityError
class EmailAddressManager(models.Manager):
def add_email(self, user, email, **kwargs):
confirm = kwargs.pop("confirm", False)
email_address = self.create(user=user, email=email, **kwargs)
if confirm and not email_address.verified:
email_address.send_confirmation()
return email_address
def get_primary(self, user):
try:
return self.get(user=user, primary=True)
except self.model.DoesNotExist:
return None
def get_users_for(self, email):
# this is a list rather than a generator because we probably want to
# do a len() on it right away
return [address.user for address in self.filter(verified=True, email=email)]
class EmailConfirmationManager(models.Manager):
def delete_expired_confirmations(self):
for confirmation in self.all():
if confirmation.key_expired():
confirmation.delete()
|
Allow IntegrityError to propagate with duplicate email
|
Allow IntegrityError to propagate with duplicate email
Fixes #62. When ACCOUNT_EMAIL_UNIQUE is True we should fail loudly when an
attempt to insert a duplicate occurs. Let the callers handle the failure.
|
Python
|
mit
|
mentholi/django-user-accounts,jawed123/django-user-accounts,ntucker/django-user-accounts,jpotterm/django-user-accounts,rizumu/django-user-accounts,jacobwegner/django-user-accounts,mentholi/django-user-accounts,pinax/django-user-accounts,GeoNode/geonode-user-accounts,nderituedwin/django-user-accounts,mysociety/django-user-accounts,jmburbach/django-user-accounts,jawed123/django-user-accounts,jpotterm/django-user-accounts,GeoNode/geonode-user-accounts,jacobwegner/django-user-accounts,gem/geonode-user-accounts,mgpyh/django-user-accounts,gem/geonode-user-accounts,pinax/django-user-accounts,osmfj/django-user-accounts,gem/geonode-user-accounts,nderituedwin/django-user-accounts,jmburbach/django-user-accounts,rizumu/django-user-accounts,osmfj/django-user-accounts,ntucker/django-user-accounts,mysociety/django-user-accounts
|
2ee1e8046323e2632c8cd8c8d88e3c313caabe1e
|
kobo/hub/forms.py
|
kobo/hub/forms.py
|
# -*- coding: utf-8 -*-
import django.forms as forms
from django.db.models import Q
class TaskSearchForm(forms.Form):
search = forms.CharField(required=False)
my = forms.BooleanField(required=False)
def get_query(self, request):
self.is_valid()
search = self.cleaned_data["search"]
my = self.cleaned_data["my"]
query = Q()
if search:
query |= Q(method__icontains=search)
query |= Q(owner__username__icontains=search)
if my and request.user.is_authenticated():
query &= Q(owner=request.user)
return query
|
# -*- coding: utf-8 -*-
import django.forms as forms
from django.db.models import Q
class TaskSearchForm(forms.Form):
search = forms.CharField(required=False)
my = forms.BooleanField(required=False)
def get_query(self, request):
self.is_valid()
search = self.cleaned_data["search"]
my = self.cleaned_data["my"]
query = Q()
if search:
query |= Q(method__icontains=search)
query |= Q(owner__username__icontains=search)
query |= Q(label__icontains=search)
if my and request.user.is_authenticated():
query &= Q(owner=request.user)
return query
|
Enable searching in task list by label.
|
Enable searching in task list by label.
|
Python
|
lgpl-2.1
|
pombredanne/https-git.fedorahosted.org-git-kobo,release-engineering/kobo,release-engineering/kobo,release-engineering/kobo,pombredanne/https-git.fedorahosted.org-git-kobo,release-engineering/kobo,pombredanne/https-git.fedorahosted.org-git-kobo,pombredanne/https-git.fedorahosted.org-git-kobo
|
56aa7fa21b218e047e9f3d7c2239aa6a22d9a5b1
|
kombu/__init__.py
|
kombu/__init__.py
|
"""AMQP Messaging Framework for Python"""
VERSION = (1, 0, 0, "rc4")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "ask@celeryproject.org"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext"
import os
if not os.environ.get("KOMBU_NO_EVAL", False):
from kombu.connection import BrokerConnection
from kombu.entity import Exchange, Queue
from kombu.messaging import Consumer, Producer
|
"""AMQP Messaging Framework for Python"""
VERSION = (1, 0, 0, "rc4")
__version__ = ".".join(map(str, VERSION[0:3])) + "".join(VERSION[3:])
__author__ = "Ask Solem"
__contact__ = "ask@celeryproject.org"
__homepage__ = "http://github.com/ask/kombu/"
__docformat__ = "restructuredtext en"
import os
import sys
if not os.environ.get("KOMBU_NO_EVAL", False):
# Lazy loading.
# - See werkzeug/__init__.py for the rationale behind this.
from types import ModuleType
all_by_module = {
"kombu.connection": ["BrokerConnection"],
"kombu.entity": ["Exchange", "Queue"],
"kombu.messaging": ["Consumer", "Producer"],
}
object_origins = {}
for module, items in all_by_module.iteritems():
for item in items:
object_origins[item] = module
class module(ModuleType):
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
result = list(new_module.__all__)
result.extend(("__file__", "__path__", "__doc__", "__all__",
"__docformat__", "__name__", "__path__", "VERSION",
"__package__", "__version__", "__author__",
"__contact__", "__homepage__", "__docformat__"))
return result
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules[__name__]
new_module = sys.modules[__name__] = module(__name__)
new_module.__dict__.update({
"__file__": __file__,
"__path__": __path__,
"__doc__": __doc__,
"__all__": tuple(object_origins),
"__version__": __version__,
"__author__": __author__,
"__contact__": __contact__,
"__homepage__": __homepage__,
"__docformat__": __docformat__,
"VERSION": VERSION})
|
Load kombu root module lazily
|
Load kombu root module lazily
|
Python
|
bsd-3-clause
|
urbn/kombu,depop/kombu,bmbouter/kombu,WoLpH/kombu,ZoranPavlovic/kombu,depop/kombu,mathom/kombu,xujun10110/kombu,romank0/kombu,xujun10110/kombu,alex/kombu,numb3r3/kombu,alex/kombu,andresriancho/kombu,daevaorn/kombu,daevaorn/kombu,iris-edu-int/kombu,ZoranPavlovic/kombu,WoLpH/kombu,cce/kombu,mverrilli/kombu,disqus/kombu,cce/kombu,Elastica/kombu,numb3r3/kombu,Elastica/kombu,pantheon-systems/kombu,tkanemoto/kombu,romank0/kombu,bmbouter/kombu,iris-edu-int/kombu,disqus/kombu,andresriancho/kombu,jindongh/kombu,celery/kombu,tkanemoto/kombu,mathom/kombu,pantheon-systems/kombu,mverrilli/kombu,jindongh/kombu
|
f22a217e86602b138451801afd3cd3c1c6314655
|
bin/post_reports.py
|
bin/post_reports.py
|
#!/usr/bin/env python3
import os
import django
from fitbit.slack import post_message
IDS_TO_POST = os.environ['AUTOPOST'].split(',')
if __name__ == '__main__':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fitbitslackbot.settings")
django.setup()
# Cannot import these until django is setup
from fitbit.models import Token
from fitbit.views import get_message
for token in Token.objects.filter(fitbit_id__in=IDS_TO_POST):
try:
post_message(get_message(token.fitbit_id))
except Exception:
print("Could not send message for {}".format(token.fitbit_id))
|
#!/usr/bin/env python3
import os
import django
from fitbit.slack import post_message
if __name__ == '__main__':
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "fitbitslackbot.settings")
django.setup()
# Cannot import these until django is setup
from fitbit.models import Token
from fitbit.views import get_message
for token in Token.objects.all():
try:
post_message(get_message(token.fitbit_id))
except Exception:
print("Could not send message for {}".format(token.fitbit_id))
|
Send all user data to the slack
|
Send all user data to the slack
|
Python
|
apache-2.0
|
Bachmann1234/fitbitSlackBot,Bachmann1234/fitbitSlackBot
|
83bb9f15ae8ceed3352232b26176b74607a08efb
|
tests/test_tools.py
|
tests/test_tools.py
|
"""Test the functions in the tools file."""
import bibpy.tools
def test_version_format():
assert bibpy.tools.version_format().format('0.1.0') == '%(prog)s v0.1.0'
program_name = dict(prog='tool_name')
assert (bibpy.tools.version_format() % program_name).format('2.3') ==\
'tool_name v2.3'
def test_key_grammar():
pass
def test_entry_grammar():
pass
def test_field_grammar():
pass
def test_numeric_grammar():
pass
def test_parse_query():
assert bibpy.tools.parse_query('~Author') == ('entry', ['~', 'Author'])
assert bibpy.tools.parse_query('!Author') == ('entry', ['!', 'Author'])
def test_predicate_composition():
pass
|
"""Test the functions in the tools file."""
import bibpy.tools
def test_version_format():
assert bibpy.tools.version_format().format('0.1.0') == '%(prog)s v0.1.0'
program_name = dict(prog='tool_name')
assert (bibpy.tools.version_format() % program_name).format('2.3') ==\
'tool_name v2.3'
def test_key_grammar():
pass
def test_entry_grammar():
pass
def test_field_grammar():
pass
def test_numeric_grammar():
pass
def test_parse_query():
assert bibpy.tools.parse_query('~Author') == ('entry', ['~', 'Author'])
assert bibpy.tools.parse_query('!Author') == ('entry', ['!', 'Author'])
def always_true(value):
"""A function that always returns True."""
return True
def always_false(value):
"""A function that always returns False."""
return False
def test_predicate_composition():
pred1 = bibpy.tools.compose_predicates([always_false, always_true,
always_false], any)
pred2 = bibpy.tools.compose_predicates([always_false, always_false,
always_false], any)
pred3 = bibpy.tools.compose_predicates([always_false, always_true], all)
pred4 = bibpy.tools.compose_predicates([always_true, always_true], all)
assert pred1(1)
assert not pred2(1)
assert not pred3(1)
assert pred4(1)
|
Add test for predicate composition
|
Add test for predicate composition
|
Python
|
mit
|
MisanthropicBit/bibpy,MisanthropicBit/bibpy
|
096800e08d29581e5a515dd01031c64eb2f01539
|
pyxform/tests_v1/test_audit.py
|
pyxform/tests_v1/test_audit.py
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | | |
| | type | name | label | parameters |
| | audit | audit | | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
from pyxform.tests_v1.pyxform_test_case import PyxformTestCase
class AuditTest(PyxformTestCase):
def test_audit(self):
self.assertPyxformXform(
name="meta_audit",
md="""
| survey | | | |
| | type | name | label |
| | audit | audit | |
""",
xml__contains=[
'<meta>',
'<audit/>',
'</meta>',
'<bind nodeset="/meta_audit/meta/audit" type="binary"/>'],
)
|
Remove non-required column from test.
|
Remove non-required column from test.
|
Python
|
bsd-2-clause
|
XLSForm/pyxform,XLSForm/pyxform
|
7b3276708417284242b4e0c9a13c6194dcc83aa7
|
quickstartup/contacts/views.py
|
quickstartup/contacts/views.py
|
# coding: utf-8
from django.core.urlresolvers import reverse
from django.views.generic import CreateView
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from .forms import ContactForm
class ContactView(CreateView):
template_name = 'contacts/contact.html'
form_class = ContactForm
def get_success_url(self):
return reverse("qs_contacts:contact")
def form_valid(self, form):
messages.success(self.request, _("Your message was sent successfully!"))
return super(ContactView, self).form_valid(form)
|
# coding: utf-8
from django.core.urlresolvers import reverse
from django.views.generic import CreateView
from django.utils.translation import ugettext_lazy as _
from django.contrib import messages
from .forms import ContactForm
class ContactView(CreateView):
template_name = 'contacts/contact.html'
form_class = ContactForm
def get_success_url(self):
return reverse("qs_contacts:contact")
def form_valid(self, form):
valid = super(ContactView, self).form_valid(form)
messages.success(self.request, _("Your message was sent successfully!"))
return valid
|
Set flash message *after* message sending
|
Set flash message *after* message sending
|
Python
|
mit
|
georgeyk/quickstartup,georgeyk/quickstartup,osantana/quickstartup,osantana/quickstartup,osantana/quickstartup,georgeyk/quickstartup
|
77a5ecc7c406e4a6acf814a2f0381dc605e0d14c
|
leds/led_dance.py
|
leds/led_dance.py
|
# Light LEDs at 'random' and make them fade over time
#
# Usage:
#
# led_dance(speed)
#
# 'speed' is the time between each new LED being turned on. Note that the
# random number is actually based on time and so the speed will determine
# the pattern (and it is not really random).
#
# Hold button 'A' pressed to stop new LEDs being turned on.
import pyb
def led_dance(delay):
dots = {}
control = pyb.Switch(1)
while True:
if not control.value():
dots[pyb.millis() % 25] = 16
for d in dots:
pyb.pixel(d, dots[d])
if dots[d] == 0:
del(dots[d])
else:
dots[d] = int(dots[d]/2)
pyb.delay(delay)
led_dance(101)
|
# Light LEDs at random and make them fade over time
#
# Usage:
#
# led_dance(delay)
#
# 'delay' is the time between each new LED being turned on.
import microbit
def led_dance(delay):
dots = [ [0]*5, [0]*5, [0]*5, [0]*5, [0]*5 ]
microbit.display.set_display_mode(1)
while True:
dots[microbit.random(5)][microbit.random(5)] = 128
for i in range(5):
for j in range(5):
microbit.display.image.set_pixel_value(i, j, dots[i][j])
dots[i][j] = int(dots[i][j]/2)
microbit.sleep(delay)
led_dance(100)
|
Update for new version of micropython for microbit
|
Update for new version of micropython for microbit
|
Python
|
mit
|
jrmhaig/microbit_playground
|
dccc51fcc51290648964c350cfff2254cfa99834
|
oauth_provider/consts.py
|
oauth_provider/consts.py
|
from django.utils.translation import ugettext_lazy as _
KEY_SIZE = 16
SECRET_SIZE = 16
VERIFIER_SIZE = 10
CONSUMER_KEY_SIZE = 256
MAX_URL_LENGTH = 2083 # http://www.boutell.com/newfaq/misc/urllength.html
PENDING = 1
ACCEPTED = 2
CANCELED = 3
REJECTED = 4
CONSUMER_STATES = (
(PENDING, _('Pending')),
(ACCEPTED, _('Accepted')),
(CANCELED, _('Canceled')),
(REJECTED, _('Rejected')),
)
PARAMETERS_NAMES = ('consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce')
OAUTH_PARAMETERS_NAMES = ['oauth_'+s for s in PARAMETERS_NAMES]
OUT_OF_BAND = 'oob'
|
from django.utils.translation import ugettext_lazy as _
from django.conf import settings
KEY_SIZE = getattr(settings, 'OAUTH_PROVIDER_KEY_SIZE', 16)
SECRET_SIZE = getattr(settings, 'OAUTH_PROVIDER_SECRET_SIZE', 16)
VERIFIER_SIZE = getattr(settings, 'OAUTH_PROVIDER_VERIFIER_SIZE', 10)
CONSUMER_KEY_SIZE = getattr(settings, 'OAUTH_PROVIDER_CONSUMER_KEY_SIZE', 256)
MAX_URL_LENGTH = 2083 # http://www.boutell.com/newfaq/misc/urllength.html
PENDING = 1
ACCEPTED = 2
CANCELED = 3
REJECTED = 4
CONSUMER_STATES = (
(PENDING, _('Pending')),
(ACCEPTED, _('Accepted')),
(CANCELED, _('Canceled')),
(REJECTED, _('Rejected')),
)
PARAMETERS_NAMES = ('consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce')
OAUTH_PARAMETERS_NAMES = ['oauth_'+s for s in PARAMETERS_NAMES]
OUT_OF_BAND = 'oob'
|
Allow settings to override default lengths.
|
Allow settings to override default lengths.
|
Python
|
bsd-3-clause
|
e-loue/django-oauth-plus
|
99a8147a31060442368d79ebeee231744183a6d1
|
tests/test_adam.py
|
tests/test_adam.py
|
import pytest
from adam.adam import *
def test_contains_asset():
storage = AssetStorage()
a = Asset()
storage['key'] = a
assert storage['key'] == a
def test_contains_key():
storage = AssetStorage()
a = Asset()
assert 'key' not in storage
storage['key'] = a
assert 'key' in storage
def test_asset_is_versioned():
storage = AssetStorage()
a = Asset()
updated_a = Asset()
storage['key'] = a
storage['key'] = updated_a
versions = storage.versions_of('key')
assert len(versions) == 2
assert versions[0] == a
assert versions[1] == updated_a
def test_asset_is_deleted():
storage = AssetStorage()
a = Asset()
storage['key'] = a
del storage['key']
assert 'key' not in storage
def test_deleting_unkown_key_raises_exception():
storage = AssetStorage()
with pytest.raises(KeyError):
del storage['key']
def test_create_asset_from_wav():
reader = WavReader()
asset = reader.read('tests/16-bit-mono.wav')
assert asset.mime_type == 'audio/wav'
assert asset.framerate == 48000
assert asset.channels == 1
|
import pytest
from adam.adam import *
def test_contains_asset():
storage = AssetStorage()
a = Asset()
storage['key'] = a
assert storage['key'] == a
def test_contains_key():
storage = AssetStorage()
a = Asset()
assert 'key' not in storage
storage['key'] = a
assert 'key' in storage
def test_asset_is_versioned():
storage = AssetStorage()
a = Asset()
updated_a = Asset()
storage['key'] = a
storage['key'] = updated_a
versions = storage.versions_of('key')
assert len(versions) == 2
assert versions[0] == a
assert versions[1] == updated_a
def test_asset_is_deleted():
storage = AssetStorage()
a = Asset()
storage['key'] = a
del storage['key']
assert 'key' not in storage
def test_deleting_unkown_key_raises_exception():
storage = AssetStorage()
with pytest.raises(KeyError):
del storage['key']
def test_create_asset_from_wav():
reader = WavReader()
asset = reader.read('tests/16-bit-mono.wav')
assert asset.mime_type == 'audio/wav'
assert asset.framerate == 48000
assert asset.channels == 1
assert asset.essence != None
|
Test for reading a wave file asserts that the essence is set.
|
Test for reading a wave file asserts that the essence is set.
|
Python
|
agpl-3.0
|
eseifert/madam
|
268718b9ad28c8bad26a7fede52a88d51ac5a8da
|
tests/test_opts.py
|
tests/test_opts.py
|
import sys
from skeletor import config
from skeletor.config import Config
from .base import BaseTestCase
from .helpers import nostdout
class OptsTests(BaseTestCase):
def test_something(self):
assert True
|
import optparse
from skeletor.opts import Option
from .base import BaseTestCase
class OptsTests(BaseTestCase):
def should_raise_exception_when_require_used_incorrectly(self):
try:
Option('-n', '--does_not_take_val', action="store_true",
default=None, required=True)
except optparse.OptionError:
assert True
|
Test for custom option class
|
Test for custom option class
|
Python
|
bsd-3-clause
|
krak3n/Facio,krak3n/Facio,krak3n/Facio,krak3n/Facio,krak3n/Facio
|
80c3d7693b17f38c80b2e1a06716969a8ef11adf
|
tests/test_simple_features.py
|
tests/test_simple_features.py
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
|
from wordgraph.points import Point
import wordgraph
EPOCH_START = 1407109280
def time_values(values, start=EPOCH_START, increment=1):
datapoints = []
for index, value in enumerate(values):
datapoints.append(Point(x=value, y=start + (increment * index)))
return datapoints
def test_monotonic_up_per_second():
datapoints = time_values(float(i) for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_monotonic_down_per_second():
datapoints = time_values(10.0 - i for i in range(10))
features = wordgraph.describe(datapoints)
assert "" in features
def test_tent_map():
values = list(float(i) for i in range(10))
values.append(11.0)
values+= list(10.0 - i for i in range(10))
datapoints = time_values(values)
features = wordgraph.describe(datapoints)
assert "" in features
|
Test case for tent map time series
|
Test case for tent map time series
Generate a time series with both a monionically increase and decreasing
sections.
|
Python
|
apache-2.0
|
tleeuwenburg/wordgraph,tleeuwenburg/wordgraph
|
6a940fbd0cc8c4e4a9f17423c593452d010b6883
|
app/lib/query/__init__.py
|
app/lib/query/__init__.py
|
# -*- coding: utf-8 -*-
"""
Initialisation file for query directory.
"""
|
# -*- coding: utf-8 -*-
"""
Initialisation file for query directory, relating to local database queries.
"""
|
Update query init file docstring.
|
Update query init file docstring.
|
Python
|
mit
|
MichaelCurrin/twitterverse,MichaelCurrin/twitterverse
|
3ebf82c7ef356de3c4d427cea3723737661522e8
|
pinax/waitinglist/management/commands/mail_out_survey_links.py
|
pinax/waitinglist/management/commands/mail_out_survey_links.py
|
from django.conf import settings
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.template.loader import render_to_string
from django.contrib.sites.models import Site
from ...models import WaitingListEntry, Survey
class Command(BaseCommand):
help = "Email links to survey instances for those that never saw a survey"
def handle(self, *args, **options):
survey = Survey.objects.get(active=True)
entries = WaitingListEntry.objects.filter(surveyinstance__isnull=True)
for entry in entries:
instance = survey.instances.create(entry=entry)
site = Site.objects.get_current()
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
ctx = {
"instance": instance,
"site": site,
"protocol": protocol,
}
subject = render_to_string("waitinglist/survey_invite_subject.txt", ctx)
subject = subject.strip()
message = render_to_string("waitinglist/survey_invite_body.txt", ctx)
EmailMessage(
subject,
message,
to=[entry.email],
from_email=settings.WAITINGLIST_SURVEY_INVITE_FROM_EMAIL
).send()
|
from django.conf import settings
from django.core.mail import EmailMessage
from django.core.management.base import BaseCommand
from django.template.loader import render_to_string
from django.contrib.sites.models import Site
from ...models import WaitingListEntry, Survey
class Command(BaseCommand):
help = "Email links to survey instances for those that never saw a survey"
def handle(self, *args, **options):
survey = Survey.objects.get(active=True)
entries = WaitingListEntry.objects.filter(surveyinstance__isnull=True)
for entry in entries:
instance = survey.instances.create(entry=entry)
site = Site.objects.get_current()
protocol = getattr(settings, "DEFAULT_HTTP_PROTOCOL", "http")
ctx = {
"instance": instance,
"site": site,
"protocol": protocol,
}
subject = render_to_string("pinax/waitinglist/survey_invite_subject.txt", ctx)
subject = subject.strip()
message = render_to_string("pinax/waitinglist/survey_invite_body.txt", ctx)
EmailMessage(
subject,
message,
to=[entry.email],
from_email=settings.WAITINGLIST_SURVEY_INVITE_FROM_EMAIL
).send()
|
Fix paths in mail out email management command
|
Fix paths in mail out email management command
|
Python
|
mit
|
pinax/pinax-waitinglist,pinax/pinax-waitinglist
|
73c7161d4414a9259ee6123ee3d3540153f30b9e
|
purchase_edi_file/models/purchase_order_line.py
|
purchase_edi_file/models/purchase_order_line.py
|
# Copyright (C) 2021 Akretion (http://www.akretion.com).
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
seller = product._select_seller(partner_id=partner)
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
|
# Copyright (C) 2021 Akretion (http://www.akretion.com).
from odoo import _, exceptions, models
class PurchaseOrderLine(models.Model):
_inherit = "purchase.order.line"
def _get_lines_by_profiles(self, partner):
profile_lines = {
key: self.env["purchase.order.line"]
for key in partner.edi_purchase_profile_ids
}
for line in self:
product = line.product_id
seller = product._select_seller(
partner_id=partner, quantity=line.product_uom_qty
)
purchase_edi = seller.purchase_edi_id
# Services should not appear in EDI file unless an EDI profile
# is specifically on the supplier info. This way, we avoid
# adding transport of potential discount or anything else
# in the EDI file.
if product.type == "service" and not purchase_edi:
continue
if purchase_edi:
profile_lines[purchase_edi] |= line
elif partner.default_purchase_profile_id:
profile_lines[partner.default_purchase_profile_id] |= line
else:
raise exceptions.UserError(
_("Some products don't have edi profile configured : %s")
% (product.default_code,)
)
return profile_lines
|
Add qty when searching seller because even if not passed a verification is made by default in _select_seller
|
Add qty when searching seller because even if not passed a verification is made by default in _select_seller
|
Python
|
agpl-3.0
|
akretion/ak-odoo-incubator,akretion/ak-odoo-incubator,akretion/ak-odoo-incubator,akretion/ak-odoo-incubator
|
701b935564521d64cc35dc51753493f4dc2782f6
|
python/ql/test/library-tests/frameworks/django/SqlExecution.py
|
python/ql/test/library-tests/frameworks/django/SqlExecution.py
|
from django.db import connection, models
from django.db.models.expressions import RawSQL
def test_plain():
cursor = connection.cursor()
cursor.execute("some sql") # $getSql="some sql"
def test_context():
with connection.cursor() as cursor:
cursor.execute("some sql") # $getSql="some sql"
cursor.execute(sql="some sql") # $getSql="some sql"
class User(models.Model):
pass
def test_model():
User.objects.raw("some sql") # $getSql="some sql"
User.objects.annotate(RawSQL("some sql")) # $getSql="some sql"
User.objects.annotate(RawSQL("foo"), RawSQL("bar")) # $getSql="foo" getSql="bar"
User.objects.annotate(val=RawSQL("some sql")) # $getSql="some sql"
User.objects.extra("some sql") # $getSql="some sql"
User.objects.extra(select="select", where="where", tables="tables", order_by="order_by") # $getSql="select" getSql="where" getSql="tables" getSql="order_by"
raw = RawSQL("so raw")
User.objects.annotate(val=raw) # $getSql="so raw"
|
from django.db import connection, models
from django.db.models.expressions import RawSQL
def test_plain():
cursor = connection.cursor()
cursor.execute("some sql") # $getSql="some sql"
def test_context():
with connection.cursor() as cursor:
cursor.execute("some sql") # $getSql="some sql"
cursor.execute(sql="some sql") # $getSql="some sql"
class User(models.Model):
pass
def test_model():
User.objects.raw("some sql") # $getSql="some sql"
User.objects.annotate(RawSQL("some sql")) # $getSql="some sql"
User.objects.annotate(RawSQL("foo"), RawSQL("bar")) # $getSql="foo" getSql="bar"
User.objects.annotate(val=RawSQL("some sql")) # $getSql="some sql"
User.objects.extra("some sql") # $getSql="some sql"
User.objects.extra(select="select", where="where", tables="tables", order_by="order_by") # $getSql="select" getSql="where" getSql="tables" getSql="order_by"
raw = RawSQL("so raw")
User.objects.annotate(val=raw) # $getSql="so raw"
# chaining QuerySet calls
User.objects.using("db-name").exclude(username="admin").extra("some sql") # $ MISSING: getSql="some sql"
|
Add example of QuerySet chain (django)
|
Python: Add example of QuerySet chain (django)
|
Python
|
mit
|
github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql
|
3fe0313d67857ec302cc20e0cdc30d658e41dd97
|
troposphere/ecr.py
|
troposphere/ecr.py
|
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
class LifecyclePolicy(AWSProperty):
props = {
'LifecyclePolicyText': (basestring, False),
'RegistryId': (basestring, False),
}
class Repository(AWSObject):
resource_type = "AWS::ECR::Repository"
props = {
'ImageScanningConfiguration': (dict, False),
'ImageTagMutability': (basestring, False),
'LifecyclePolicy': (LifecyclePolicy, False),
'RepositoryName': (basestring, False),
'RepositoryPolicyText': (policytypes, False),
'Tags': (Tags, False),
}
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 31.0.0
from . import AWSObject, AWSProperty, Tags
from .compat import policytypes
class PublicRepository(AWSObject):
resource_type = "AWS::ECR::PublicRepository"
props = {
'RepositoryCatalogData': (dict, False),
'RepositoryName': (basestring, False),
'RepositoryPolicyText': (policytypes, False),
'Tags': (Tags, False),
}
class RegistryPolicy(AWSObject):
resource_type = "AWS::ECR::RegistryPolicy"
props = {
'PolicyText': (policytypes, True),
}
class ReplicationDestination(AWSProperty):
props = {
'Region': (basestring, True),
'RegistryId': (basestring, True),
}
class ReplicationRule(AWSProperty):
props = {
'Destinations': ([ReplicationDestination], True),
}
class ReplicationConfigurationProperty(AWSProperty):
props = {
'Rules': ([ReplicationRule], True),
}
class ReplicationConfiguration(AWSObject):
resource_type = "AWS::ECR::Repository"
props = {
'ReplicationConfigurationProperty':
(ReplicationConfigurationProperty, True),
}
class LifecyclePolicy(AWSProperty):
props = {
'LifecyclePolicyText': (basestring, False),
'RegistryId': (basestring, False),
}
class Repository(AWSObject):
resource_type = "AWS::ECR::Repository"
props = {
'ImageScanningConfiguration': (dict, False),
'ImageTagMutability': (basestring, False),
'LifecyclePolicy': (LifecyclePolicy, False),
'RepositoryName': (basestring, False),
'RepositoryPolicyText': (policytypes, False),
'Tags': (Tags, False),
}
|
Update ECR per 2020-12-18 and 2021-02-04 changes
|
Update ECR per 2020-12-18 and 2021-02-04 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
5b7e2c7c4ad28634db9641a2b8c96f4d047ae503
|
arim/fields.py
|
arim/fields.py
|
import re
from django import forms
mac_pattern = re.compile("^[0-9a-f]{12}$")
class MacAddrFormField(forms.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 17
super(MacAddrFormField, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(MacAddrFormField, self).clean(value)
value = filter(lambda x: x in "0123456789abcdef", value)
if mac_pattern.match(value) is None:
raise forms.ValidationError('Invalid MAC address')
value = reduce(lambda x,y: x + ':' + y,
(value[i:i+2] for i in xrange(0, 12, 2)))
return value
|
import re
from django import forms
mac_pattern = re.compile("^[0-9a-f]{12}$")
class MacAddrFormField(forms.CharField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 17
super(MacAddrFormField, self).__init__(*args, **kwargs)
def clean(self, value):
value = super(MacAddrFormField, self).clean(value)
value = value.lower().replace(':', '').replace('-', '')
if mac_pattern.match(value) is None:
raise forms.ValidationError('Invalid MAC address')
value = reduce(lambda x,y: x + ':' + y,
(value[i:i+2] for i in xrange(0, 12, 2)))
return value
|
Revert "Properly handle non-hex characters in MAC"
|
Revert "Properly handle non-hex characters in MAC"
This reverts commit 2734a3f0212c722fb9fe3698dfea0dbd8a14faa7.
|
Python
|
bsd-3-clause
|
OSU-Net/arim,drkitty/arim,OSU-Net/arim,drkitty/arim,drkitty/arim,OSU-Net/arim
|
bd3dad98976d5e02c4a941ae3c687174db78781d
|
src/WebCatch/catchLink.py
|
src/WebCatch/catchLink.py
|
import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=2)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
os.system("ssh pgadmin@10.211.55.8 psql test -c \
'insert into url values(nextval('url_seq'), '"+ currentURL +"')'")
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url)
|
import requests
import re
import os
url = "https://www.autohome.com.cn/shanghai/"
urlBox = []
def catchURL(url):
file = requests.get(url,timeout=5)
data = file.content
links = re.findall(r'(https?://[^\s)";]+\.(\w|/)*)',str(data))
for i in links:
try:
currentURL = i[0]
if currentURL not in urlBox:
urlBox.append(currentURL)
sql = """
ssh pgadmin@10.211.55.8 psql test -U pgadmin << EOF
insert into url values(nextval(\'url_seq\'), \'"""+currentURL+"""\');
EOF
"""
print(sql)
os.popen(sql)
print(currentURL)
catchURL(currentURL)
except Exception as e:
pass
continue
catchURL(url)
|
Put the crawled link into the database
|
Put the crawled link into the database
|
Python
|
mit
|
zhaodjie/py_learning
|
545f688f0dd59df009e2392cbf27ef06865a4b89
|
src/azure/cli/__main__.py
|
src/azure/cli/__main__.py
|
import sys
import azure.cli.main
from azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush
try:
try:
if user_agrees_to_telemetry():
init_telemetry()
except Exception: #pylint: disable=broad-except
pass
sys.exit(azure.cli.main.main(sys.argv[1:]))
finally:
telemetry_flush()
|
import sys
import os
import azure.cli.main
from azure.cli._telemetry import init_telemetry, user_agrees_to_telemetry, telemetry_flush
try:
try:
if user_agrees_to_telemetry():
init_telemetry()
except Exception: #pylint: disable=broad-except
pass
args = sys.argv[1:]
# Check if we are in argcomplete mode - if so, we
# need to pick up our args from environment variables
if os.environ.get('_ARGCOMPLETE'):
comp_line = os.environ.get('COMP_LINE')
if comp_line:
args = comp_line.split()[1:]
sys.exit(azure.cli.main.main(args))
finally:
telemetry_flush()
|
Speed up argument completions by not loading all command packages unless we have to...
|
Speed up argument completions by not loading all command packages unless we have to...
|
Python
|
mit
|
yugangw-msft/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,BurtBiel/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,samedder/azure-cli,QingChenmsft/azure-cli,QingChenmsft/azure-cli,yugangw-msft/azure-cli,yugangw-msft/azure-cli,samedder/azure-cli
|
e6ca7ef801115d16d809c563b657c3a63e828fb1
|
corehq/apps/locations/management/commands/location_last_modified.py
|
corehq/apps/locations/management/commands/location_last_modified.py
|
from django.core.management.base import BaseCommand
from corehq.apps.locations.models import Location
from dimagi.utils.couch.database import iter_docs
from datetime import datetime
class Command(BaseCommand):
help = 'Populate last_modified field for locations'
def handle(self, *args, **options):
self.stdout.write("Processing locations...\n")
relevant_ids = set([r['id'] for r in Location.get_db().view(
'commtrack/locations_by_code',
reduce=False,
).all()])
to_save = []
for location in iter_docs(Location.get_db(), relevant_ids):
if 'last_modified' not in location or not location['last_modified']:
location['last_modified'] = datetime.now().isoformat()
to_save.append(location)
if len(to_save) > 500:
Location.get_db().bulk_save(to_save)
to_save = []
if to_save:
Location.get_db().bulk_save(to_save)
|
from django.core.management.base import BaseCommand
from corehq.apps.locations.models import Location
from dimagi.utils.couch.database import iter_docs
from datetime import datetime
class Command(BaseCommand):
help = 'Populate last_modified field for locations'
def handle(self, *args, **options):
self.stdout.write("Processing locations...\n")
relevant_ids = set([r['id'] for r in Location.get_db().view(
'commtrack/locations_by_code',
reduce=False,
).all()])
to_save = []
for location in iter_docs(Location.get_db(), relevant_ids):
# exclude any psi domain to make this take a realistic
# amount fo time
if (
not location.get('last_modified', False) and
'psi' not in location.get('domain', '')
):
location['last_modified'] = datetime.now().isoformat()
to_save.append(location)
if len(to_save) > 500:
Location.get_db().bulk_save(to_save)
to_save = []
if to_save:
Location.get_db().bulk_save(to_save)
|
Exclude psi domains or this takes forever
|
Exclude psi domains or this takes forever
|
Python
|
bsd-3-clause
|
qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,SEL-Columbia/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,SEL-Columbia/commcare-hq
|
e2813d7c27079a259f542ff36383ec0aa2233a9e
|
spyder_terminal/__init__.py
|
spyder_terminal/__init__.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 4)
__version__ = '.'.join(map(str, VERSION_INFO))
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
|
Set development version to v0.3.0.dev0
|
Set development version to v0.3.0.dev0
|
Python
|
mit
|
spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal,spyder-ide/spyder-terminal
|
352cb871a86abd926842a0624475db1f2ee2c0ce
|
TorGTK/list_elements.py
|
TorGTK/list_elements.py
|
from var import *
from ui_elements import *
from gi.repository import Gtk
from torctl import *
# ORGANIZATION OF THESE LISTS:
# 1. Main list for all the elements
# 2. A sublist for each element, with the first being a label, and the second
# being the element itself
# List for main listbox
lb_main_elements = [
["", init_menubutton("btnMainMenu", objs["menuMain"])],
["Enable Tor", init_switch("swEnable", enableTor)],
]
# List for settings listbox
lb_settings_elements = [
["SOCKS Port", init_spinbutton("spinSocks", default_socks_port, 1024, 65535, 1)],
["Control Port", init_spinbutton("spinCtl", default_control_port, 1024, 65535, 1)],
]
|
from var import *
from ui_elements import *
from gi.repository import Gtk
from torctl import *
# ORGANIZATION OF THESE LISTS:
# 1. Main list for all the elements
# 2. A sublist for each element, with the first being a label, and the second
# being the element itself
# List for main listbox
lb_main_elements = [
["", init_menubutton("btnMainMenu", objs["menuMain"])],
["Enable Tor", init_switch("swEnable", enableTor)],
]
# List for settings listbox
lb_settings_elements = [
["SOCKS Port", init_spinbutton("spinSocks", default_socks_port, 1024, 65535, 1)],
["Control Port", init_spinbutton("spinCtl", default_control_port, 1024, 65535, 1)],
["Exit Nodes", init_textfield("txtExit")],
]
|
Add field (not working yet) for Tor exit node selection
|
Add field (not working yet) for Tor exit node selection
|
Python
|
bsd-2-clause
|
neelchauhan/TorNova,neelchauhan/TorGTK
|
3f136f153cdc60c1dcc757a8a35ef116bb892a1c
|
python/prep_policekml.py
|
python/prep_policekml.py
|
"""
A collection of classes used to manipulate Police KML data, used with prepgml4ogr.py.
"""
import os
from lxml import etree
class prep_kml():
def __init__ (self, inputfile):
self.inputfile = inputfile
self.infile = os.path.basename(inputfile)
self.feat_types = ['Placemark']
def get_feat_types(self):
return self.feat_types
def prepare_feature(self, feat_str):
# Parse the xml string into something useful
feat_elm = etree.fromstring(feat_str)
feat_elm = self._prepare_feat_elm(feat_elm)
return etree.tostring(feat_elm, encoding='UTF-8', pretty_print=True).decode('utf_8');
def _prepare_feat_elm(self, feat_elm):
feat_elm = self._add_filename_elm(feat_elm)
return feat_elm
def _add_filename_elm(self, feat_elm):
# Create an element with the fid
elm = etree.SubElement(feat_elm, "name")
elm.text = self.infile[:-4]
elm = etree.SubElement(feat_elm, "description")
elm.text = os.path.dirname(self.inputfile).split('/')[-1]
return feat_elm
|
"""
prep_kml class used to manipulate police.uk KML data, used with prepgml4ogr.py
"""
import os
from lxml import etree
class prep_kml():
def __init__(self, inputfile):
self.inputfile = inputfile
self.infile = os.path.basename(inputfile)
self.feat_types = ['Placemark']
def get_feat_types(self):
return self.feat_types
def prepare_feature(self, feat_str):
# Parse the xml string into something useful
feat_elm = etree.fromstring(feat_str)
feat_elm = self._prepare_feat_elm(feat_elm)
return etree.tostring(feat_elm, encoding='UTF-8', pretty_print=True).decode('utf_8');
def _prepare_feat_elm(self, feat_elm):
feat_elm = self._add_filename_elm(feat_elm)
return feat_elm
def _add_filename_elm(self, feat_elm):
elm = etree.SubElement(feat_elm, "name")
elm.text = self.infile[:-4]
elm = etree.SubElement(feat_elm, "description")
elm.text = os.path.dirname(self.inputfile).split('/')[-1]
return feat_elm
|
Remove stray comment, update docstring and minor PEP8 changes
|
Remove stray comment, update docstring and minor PEP8 changes
|
Python
|
mit
|
AstunTechnology/Loader
|
fe65e85e0a29341a6eebbb1bafb28b8d1225abfc
|
harvester/rq_worker_sns_msgs.py
|
harvester/rq_worker_sns_msgs.py
|
'''A custom rq worker class to add start & stop SNS messages to all jobs'''
import logging
from rq.worker import Worker
from harvester.sns_message import publish_to_harvesting
logger = logging.getLogger(__name__)
class SNSWorker(Worker):
def execute_job(self, job, queue):
"""Spawns a work horse to perform the actual work and passes it a job.
The worker will wait for the work horse and make sure it executes
within the given timeout bounds, or will end the work horse with
SIGALRM.
"""
worker_name = (self.key.rsplit(':', 1)[1]).rsplit('.', 1)[0]
subject = 'Worker {} starting job {}'.format(
worker_name,
job.description)
publish_to_harvesting(subject, subject)
self.set_state('busy')
self.fork_work_horse(job, queue)
self.monitor_work_horse(job)
subject = 'Worker {} finished job {}'.format(
worker_name,
job.description)
publish_to_harvesting(subject, subject)
self.set_state('idle')
|
'''A custom rq worker class to add start & stop SNS messages to all jobs'''
import logging
from rq.worker import Worker
from harvester.sns_message import publish_to_harvesting
logger = logging.getLogger(__name__)
def exception_to_sns(job, *exc_info):
'''Make an exception handler to report exceptions to SNS msg queue'''
subject = 'FAILED: job {}'.format(job.description)
message = 'ERROR: job {} failed\n{}'.format(
job.description,
exc_info[1])
logging.error(message)
publish_to_harvesting(subject, message)
class SNSWorker(Worker):
def execute_job(self, job, queue):
"""Spawns a work horse to perform the actual work and passes it a job.
The worker will wait for the work horse and make sure it executes
within the given timeout bounds, or will end the work horse with
SIGALRM.
"""
worker_name = (self.key.rsplit(':', 1)[1]).rsplit('.', 1)[0]
subject = 'Worker {} starting job {}'.format(
worker_name,
job.description)
#publish_to_harvesting(subject, subject)
self.set_state('busy')
self.fork_work_horse(job, queue)
self.monitor_work_horse(job)
subject = 'Worker {} finished job {}'.format(
worker_name,
job.description)
#publish_to_harvesting(subject, subject)
self.set_state('idle')
|
Add RQ exception handler to report to SNS topic
|
Add RQ exception handler to report to SNS topic
|
Python
|
bsd-3-clause
|
mredar/harvester,barbarahui/harvester,barbarahui/harvester,mredar/harvester,ucldc/harvester,ucldc/harvester
|
e27088976467dd95ad2672123cb39dd54b78f413
|
blog/models.py
|
blog/models.py
|
from django.db import models
from django.template.defaultfilters import slugify
from django.core.urlresolvers import reverse_lazy
class Category(models.Model):
title = models.CharField(max_length=80)
class Meta:
verbose_name_plural = 'categories'
def __unicode__(self):
return self.title
class Post(models.Model):
title = models.CharField(max_length=100)
slug = models.SlugField(editable=False, unique=True)
image = models.ImageField(upload_to='posts', blank=True, null=False)
created_on = models.DateTimeField(auto_now_add=True)
content = models.TextField()
categories = models.ManyToManyField(Category)
class Meta:
ordering = ('created_on',)
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
self.slug = slugify(self.title)
super(Post, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse_lazy('blog:show_post', kwargs={'slug': self.slug})
|
from django.db import models
from django.core.exceptions import ValidationError
from django.template.defaultfilters import slugify
from django.core.urlresolvers import reverse_lazy
def validate_no_commas(value):
if ',' in value:
raise ValidationError('%s contains commas' % value)
class Category(models.Model):
title = models.CharField(max_length=80, validators=[validate_no_commas])
class Meta:
verbose_name_plural = 'categories'
def __unicode__(self):
return self.title
class Post(models.Model):
title = models.CharField(max_length=100)
slug = models.SlugField(editable=False, unique=True)
image = models.ImageField(upload_to='posts', blank=True, null=False)
created_on = models.DateTimeField(auto_now_add=True)
content = models.TextField()
categories = models.ManyToManyField(Category)
class Meta:
ordering = ('created_on',)
def __unicode__(self):
return self.title
def save(self, *args, **kwargs):
self.slug = self.get_slug()
super(Post, self).save(*args, **kwargs)
def get_slug(self):
return self.slug or slugify(self.title)
def get_absolute_url(self):
return reverse_lazy('blog:show_post', kwargs={'slug': self.slug})
|
Add validation in category and get_slug in post
|
Add validation in category and get_slug in post
|
Python
|
mit
|
jmcomets/jmcomets.github.io
|
eff3195097e9599b87f5cec9bbae744b91ae16cf
|
buses/utils.py
|
buses/utils.py
|
import re
def minify(template_source):
template_source = re.sub(r'(\n *)+', '\n', template_source)
template_source = re.sub(r'({%.+%})\n+', r'\1', template_source)
return template_source
|
import re
from haystack.utils import default_get_identifier
def minify(template_source):
template_source = re.sub(r'(\n *)+', '\n', template_source)
template_source = re.sub(r'({%.+%})\n+', r'\1', template_source)
return template_source
def get_identifier(obj_or_string):
if isinstance(obj_or_string, basestring):
return obj_or_string
return default_get_identifier(obj_or_string)
|
Add custom Hastack get_identifier function
|
Add custom Hastack get_identifier function
|
Python
|
mpl-2.0
|
jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk
|
b9a7289c1f3466bb0caee1488a16dafbae327c6f
|
tartpy/eventloop.py
|
tartpy/eventloop.py
|
"""
Very basic implementation of an event loop
==========================================
The eventloop is a singleton to schedule and run events.
Exports
-------
- ``EventLoop``: the basic eventloop
"""
import queue
import sched
import threading
import time
from .singleton import Singleton
class EventLoop(object, metaclass=Singleton):
"""A generic event loop object."""
def __init__(self):
self.scheduler = sched.scheduler()
def schedule(self, event):
"""Schedule an event.
An `event` is a thunk.
"""
self.scheduler.enter(0, 1, event)
def stop(self):
"""Stop the loop."""
pass
def run(self, block=False):
self.scheduler.run(blocking=block)
def run_in_thread(self):
self.thread = threading.Thread(target=self.run, args=(True,),
name='event_loop')
self.thread.daemon = True
self.thread.start()
|
"""
Very basic implementation of an event loop
==========================================
The eventloop is a singleton to schedule and run events.
Exports
-------
- ``EventLoop``: the basic eventloop
"""
import queue
import sched
import threading
import time
from .singleton import Singleton
class EventLoop(object, metaclass=Singleton):
"""A generic event loop object."""
def __init__(self):
self.scheduler = sched.scheduler()
def schedule(self, event):
"""Schedule an event.
An `event` is a thunk.
"""
self.scheduler.enter(0, 1, event)
def stop(self):
"""Stop the loop."""
pass
def run(self, block=False):
self.scheduler.run(blocking=block)
def run_forever(self, wait=0.05):
while True:
self.run()
time.sleep(wait)
def run_in_thread(self):
self.thread = threading.Thread(target=self.run_forever,
name='event_loop')
self.thread.daemon = True
self.thread.start()
|
Fix threaded run of the new event loop
|
Fix threaded run of the new event loop
|
Python
|
mit
|
waltermoreira/tartpy
|
3409aa543b4f0a4c574afd7ff4fdd59d1bd8a4b0
|
tests/date_tests.py
|
tests/date_tests.py
|
# -*- coding: utf-8 -*-
#
# (C) Pywikibot team, 2014
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
from tests.utils import unittest
from pywikibot import date
class TestDate(unittest.TestCase):
"""Test cases for date library"""
def __init__(self, formatname):
super(TestDate, self).__init__()
self.formatname = formatname
def testMapEntry(self, formatname):
"""The test ported from date.py"""
step = 1
if formatname in date.decadeFormats:
step = 10
predicate, start, stop = date.formatLimits[formatname]
for code, convFunc in date.formats[formatname].items():
for value in range(start, stop, step):
self.assertTrue(
predicate(value),
"date.formats['%(formatname)s']['%(code)s']:\n"
"invalid value %(value)d" % locals())
newValue = convFunc(convFunc(value))
self.assertEqual(
newValue, value,
"date.formats['%(formatname)s']['%(code)s']:\n"
"value %(newValue)d does not match %(value)s"
% locals())
def runTest(self):
"""method called by unittest"""
self.testMapEntry(self.formatname)
def suite():
"""Setup the test suite and register all test to different instances"""
suite = unittest.TestSuite()
suite.addTests(TestDate(formatname) for formatname in date.formats)
return suite
if __name__ == '__main__':
try:
unittest.TextTestRunner().run(suite())
except SystemExit:
pass
|
# -*- coding: utf-8 -*-
#
# (C) Pywikibot team, 2014
#
# Distributed under the terms of the MIT license.
#
__version__ = '$Id$'
from tests.utils import unittest
from pywikibot import date
class TestDate(unittest.TestCase):
"""Test cases for date library"""
def testMapEntry(self):
"""Test the validity of the pywikibot.date format maps."""
for formatName in date.formats:
step = 1
if formatName in date.decadeFormats:
step = 10
predicate, start, stop = date.formatLimits[formatName]
for code, convFunc in date.formats[formatName].items():
for value in range(start, stop, step):
self.assertTrue(
predicate(value),
"date.formats['%(formatName)s']['%(code)s']:\n"
"invalid value %(value)d" % locals())
newValue = convFunc(convFunc(value))
self.assertEqual(
newValue, value,
"date.formats['%(formatName)s']['%(code)s']:\n"
"value %(newValue)d does not match %(value)s"
% locals())
if __name__ == '__main__':
try:
unittest.main()
except SystemExit:
pass
|
Revert "Progressing dots to show test is running"
|
Revert "Progressing dots to show test is running"
Breaks tests; https://travis-ci.org/wikimedia/pywikibot-core/builds/26752150
This reverts commit 93379dbf499c58438917728b74862f282c15dba4.
Change-Id: Iacb4cc9e6999d265b46c558ed3999c1198f87de0
|
Python
|
mit
|
hasteur/g13bot_tools_new,smalyshev/pywikibot-core,h4ck3rm1k3/pywikibot-core,TridevGuha/pywikibot-core,npdoty/pywikibot,icyflame/batman,valhallasw/pywikibot-core,darthbhyrava/pywikibot-local,hasteur/g13bot_tools_new,xZise/pywikibot-core,npdoty/pywikibot,magul/pywikibot-core,happy5214/pywikibot-core,VcamX/pywikibot-core,h4ck3rm1k3/pywikibot-core,happy5214/pywikibot-core,jayvdb/pywikibot-core,Darkdadaah/pywikibot-core,Darkdadaah/pywikibot-core,hasteur/g13bot_tools_new,emijrp/pywikibot-core,wikimedia/pywikibot-core,jayvdb/pywikibot-core,trishnaguha/pywikibot-core,PersianWikipedia/pywikibot-core,magul/pywikibot-core,wikimedia/pywikibot-core
|
9fbef73081b0cb608e32c91a57502aaefa0599cc
|
tests/test_basic.py
|
tests/test_basic.py
|
import unittest
import os, sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestBasic(unittest.TestCase):
def setUp(self):
pass
def test_initialize(self):
self.assertEqual(CodeConverter('foo').s, 'foo')
if __name__ == '__main__':
unittest.main()
|
import unittest
import os, sys
PROJECT_ROOT = os.path.dirname(__file__)
sys.path.append(os.path.join(PROJECT_ROOT, ".."))
from CodeConverter import CodeConverter
class TestBasic(unittest.TestCase):
def setUp(self):
pass
def test_initialize(self):
self.assertEqual(CodeConverter('foo').s, 'foo')
# def test_python_version(self):
# # Python for Sublime Text 2 is 2.6.7 (r267:88850, Oct 11 2012, 20:15:00)
# if sys.version_info[:3] != (2, 6, 7):
# print 'Sublime Text 2 uses python 2.6.7'
# print 'Your version is ' + '.'.join(str(x) for x in sys.version_info[:3])
# self.assertTrue(True)
if __name__ == '__main__':
unittest.main()
|
Add test to check python version
|
Add test to check python version
|
Python
|
mit
|
kyamaguchi/SublimeObjC2RubyMotion,kyamaguchi/SublimeObjC2RubyMotion
|
5d6206f42323c9fd5e4185f36e75a2466adf79e8
|
thinc/neural/_classes/feed_forward.py
|
thinc/neural/_classes/feed_forward.py
|
from .model import Model
class FeedForward(Model):
'''A feed-forward network, that chains multiple Model instances together.'''
def __init__(self, layers, **kwargs):
Model.__init__(self, **kwargs)
self.layers.extend(layers)
if self.layers:
nO = self.layers[0].output_shape[1]
for layer in self.layers[1:]:
if nO is not None and layer.nI is None:
layer.nI = nO
nO = layer.nO
@property
def input_shape(self):
return self.layers[0].input_shape
@property
def output_shape(self):
return self.layers[-1].output_shape
def begin_update(self, X, drop=0.):
callbacks = []
for layer in self.layers:
assert layer.W is not None
assert layer.b is not None
X = self.ops.xp.ascontiguousarray(X, dtype='float32')
X, inc_layer_grad = layer.begin_update(X, drop=drop)
callbacks.append(inc_layer_grad)
def continue_update(gradient, sgd=None):
for callback in reversed(callbacks):
gradient = self.ops.xp.ascontiguousarray(gradient, dtype='float32')
gradient = callback(gradient, sgd)
return gradient
return X, continue_update
|
from .model import Model
from ... import describe
def _run_child_hooks(model, X, y):
for layer in model._layers:
for hook in layer.on_data_hooks:
hook(layer, X, y)
@describe.on_data(_run_child_hooks)
class FeedForward(Model):
'''A feed-forward network, that chains multiple Model instances together.'''
def __init__(self, layers, **kwargs):
Model.__init__(self, **kwargs)
self._layers.extend(layers)
@property
def input_shape(self):
return self._layers[0].input_shape
@property
def output_shape(self):
return self._layers[-1].output_shape
def begin_update(self, X, drop=0.):
callbacks = []
for layer in self.layers:
X = self.ops.xp.ascontiguousarray(X, dtype='float32')
X, inc_layer_grad = layer.begin_update(X, drop=drop)
callbacks.append(inc_layer_grad)
def continue_update(gradient, sgd=None):
for callback in reversed(callbacks):
gradient = self.ops.xp.ascontiguousarray(gradient, dtype='float32')
gradient = callback(gradient, sgd)
return gradient
return X, continue_update
|
Improve how child hooks are run in FeedForward
|
Improve how child hooks are run in FeedForward
|
Python
|
mit
|
explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,explosion/thinc
|
741db5b16922ceca0c23a95caa143f9ff7baeee2
|
Api/app/types.py
|
Api/app/types.py
|
import graphene
from graphene_django import DjangoObjectType
from app import models
class TagType(DjangoObjectType):
class Meta:
model = models.Tag
interfaces = (graphene.relay.Node,)
@classmethod
def get_node(cls, id, context, info):
return models.Tag.objects.get(pk=id)
class TagConnection(graphene.relay.Connection):
class Meta:
node = TagType
class ArticleType(DjangoObjectType):
class Meta:
model = models.Article
interfaces = (graphene.relay.Node,)
tags = graphene.relay.ConnectionField(TagConnection)
@classmethod
def get_node(cls, id, context, info):
return models.Article.objects.get(pk=id)
@graphene.resolve_only_args
def resolve_tags(self):
return self.tags.all()
|
import graphene
from graphene_django import DjangoObjectType
from graphene_django.filter import DjangoFilterConnectionField
from app import models
class TagType(DjangoObjectType):
class Meta:
model = models.Tag
interfaces = (graphene.relay.Node,)
articles = DjangoFilterConnectionField(lambda: ArticleType)
@classmethod
def get_node(cls, id, context, info):
return models.Tag.objects.get(pk=id)
class ArticleType(DjangoObjectType):
class Meta:
model = models.Article
interfaces = (graphene.relay.Node,)
tags = DjangoFilterConnectionField(lambda: TagType)
@classmethod
def get_node(cls, id, context, info):
return models.Article.objects.get(pk=id)
|
Fix tag and article connections
|
Fix tag and article connections
|
Python
|
mit
|
rcatlin/ryancatlin-info,rcatlin/ryancatlin-info,rcatlin/ryancatlin-info,rcatlin/ryancatlin-info
|
d7d2361bb27c8649e38b61b65ba193e5ea716ed5
|
blog/posts/helpers.py
|
blog/posts/helpers.py
|
from models import Post
def get_post_url(post):
post_year = str(post.publication_date.year)
post_month = '%02d' % post.publication_date.month
post_title = post.title
url = u'/blog/' + post_year + '/' + post_month + '/' + post_title + '/'
return url
def post_as_components(post_text):
''' This function returns the components of a blog post for use with other
functions. Given a Markdown formatted post, it returns a three-tuple. The
first element is the blog title (not markdowned), the second is the first
paragraph (in Markdown format) and the third is the entire post body (in
Markdown format).
'''
post_content = post_text.split('\n\n')
title = post_content[0].strip('# ')
first_para = post_content[1]
body = u'\n\n'.join(post_content[1:])
return (title, first_para, body)
|
from models import Post
from django.core.urlresolvers import reverse
def get_post_url(post):
post_year = str(post.publication_date.year)
post_month = '%02d' % post.publication_date.month
post_title = post.title
#url = u'/blog/' + post_year + '/' + post_month + '/' + post_title + '/'
url = reverse('blog_post', kwargs={'post_year': post_year,
'post_month': post_month,
'post_title': post_title})
return url
def post_as_components(post_text):
''' This function returns the components of a blog post for use with other
functions. Given a Markdown formatted post, it returns a three-tuple. The
first element is the blog title (not markdowned), the second is the first
paragraph (in Markdown format) and the third is the entire post body (in
Markdown format).
'''
post_content = post_text.split('\n\n')
title = post_content[0].strip('# ')
first_para = post_content[1]
body = u'\n\n'.join(post_content[1:])
return (title, first_para, body)
|
Use named urls for get_post_url().
|
Use named urls for get_post_url().
The helper should not assume knowledge of the post url structure.
|
Python
|
mit
|
Lukasa/minimalog
|
77a6bb72318e9b02cbb1179cbbbacd3dd0bad55f
|
bookstore/__init__.py
|
bookstore/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
from . import swift
from . import cloudfiles
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''Bookstore
Stores IPython notebooks automagically onto OpenStack clouds through Swift.
'''
__title__ = 'bookstore'
__version__ = '1.0.0'
__build__ = 0x010000
__author__ = 'Kyle Kelley'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kyle Kelley'
#from . import swift
#from . import cloudfiles
from . import filenotebookmanager
|
Add unit test for bookstore
|
Add unit test for bookstore
|
Python
|
apache-2.0
|
wusung/ipython-notebook-store
|
14b9ed3052054cf983fe6b7b1903faca3f1a0a13
|
couchdb/tests/testutil.py
|
couchdb/tests/testutil.py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import uuid
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
name = 'couchdb-python/' + uuid.uuid4().hex
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2007-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
import random
import sys
from couchdb import client
class TempDatabaseMixin(object):
temp_dbs = None
_db = None
def setUp(self):
self.server = client.Server(full_commit=False)
def tearDown(self):
if self.temp_dbs:
for name in self.temp_dbs:
self.server.delete(name)
def temp_db(self):
if self.temp_dbs is None:
self.temp_dbs = {}
# Find an unused database name
while True:
name = 'couchdb-python/%d' % random.randint(0, sys.maxint)
if name not in self.temp_dbs:
break
print '%s already used' % name
db = self.server.create(name)
self.temp_dbs[name] = db
return name, db
def del_db(self, name):
del self.temp_dbs[name]
self.server.delete(name)
@property
def db(self):
if self._db is None:
name, self._db = self.temp_db()
return self._db
|
Use a random number instead of uuid for temp database name.
|
Use a random number instead of uuid for temp database name.
|
Python
|
bsd-3-clause
|
djc/couchdb-python,djc/couchdb-python,infinit/couchdb-python,Roger/couchdb-python
|
ee5cf0b47d50904061daf62c33741d50b848f02b
|
feature_extraction.py
|
feature_extraction.py
|
from PIL import Image
import glob
def _get_rectangle_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
rectangle_masks = []
for file_name in glob.glob(TRAIN_MASKS):
image = Image.open(file_name)
rectangle_mask = ((0,0), (0,0))
mask_coord = [(i-image.width*(i/image.width), i/image.width) for i, pixel in enumerate(image.getdata()) if pixel != 0]
if mask_coord:
mask_xs, mask_ys = zip(*mask_coord)
rectangle_mask = ((min(mask_xs), mask_ys[0]), (max(mask_xs), mask_ys[len(mask_ys)-1]))
rectangle_masks.append(rectangle_mask)
return rectangle_masks
def run():
print _get_rectangle_masks()
if __name__ == '__main__':
run()
|
from PIL import Image
import glob
def _get_masks():
TRAIN_MASKS = './data/train/*_mask.tif'
return [Image.open(file_name) for file_name in glob.glob(TRAIN_MASKS)]
def _get_rectangle_masks():
rectangle_masks = []
for image in _get_masks():
rectangle_mask = ((0,0), (0,0))
mask_coord = [(i-image.width*(i/image.width), i/image.width) for i, pixel in enumerate(image.getdata()) if pixel != 0]
if mask_coord:
mask_xs, mask_ys = zip(*mask_coord)
rectangle_mask = ((min(mask_xs), mask_ys[0]), (max(mask_xs), mask_ys[len(mask_ys)-1]))
rectangle_masks.append(rectangle_mask)
return rectangle_masks
def run():
print _get_rectangle_masks()
if __name__ == '__main__':
run()
|
Move mask gathering to it's own function
|
Move mask gathering to it's own function
|
Python
|
mit
|
Brok-Bucholtz/Ultrasound-Nerve-Segmentation
|
a1b4afc062b246dc347526202ef00a43992afa28
|
code/kmeans.py
|
code/kmeans.py
|
#returns the distance between two data points
def distance(X, Y):
d = 0
for row in range(len(X)):
for col in range(len(X[row]):
if X[row][col] != Y[row][col]:
d += 1
return d
#partitions the data into the sets closest to each centroid
def fit(data, centroids):
pass
#returns k centroids which partition the data optimally into k clusters
def cluster(data, k):
pass
#allows the user to assign character names to each centroid given
def label(centroids):
pass
|
from random import randint
from copy import deepcopy
from parse import parse
#In this file, I am assuming that the 6 metadata entries at the front of each
# raw data point hae been stripped off during initial parsing.
#returns the distance between two data points
def distance(X, Y):
assert(len(X) == len(Y))
d = 0
for pixel in range(len(X)):
if X[pixel] != Y[pixel]:
d += 1
return d
#Intelligently find some starting centroids, instead of choosing k random points.
# Choose one random point to start with, then find the point with largest
# sum of distances from all other centroids selected so far and make it a centroid
# until k have been chosen.
def find_initial_centroids(data, k):
assert(len(data) >= k)
data = deepcopy(data)
centroids = []
i = randint(0, len(data - 1))
if k > 0:
centroids.append(data[i])
while (len(centroids) < k):
new_i = None
max_distance = None
for i in range(len(data)):
total_distance = 0
for c in centroids:
total_distance += distance(data[i], c)
if (new_i == None) or (total_distance > max_distance):
new_i = i
max_distance = total_distance
centroids.append(data.pop(i))
return centroids
#Finds the representative centroid of a subset of data, based on the most
# common pixel in each position
def find_centroid(data):
assert(len(data) > 0)
centroid = [0]*len(data[0])
for i in range(len(centroid)):
sum = 0
for point in data:
sum += point[i] #Assuming pixel values are either 1 or 0
if (sum / len(data)) >= .5: #If a majority of pixels have value 1
centroid[i] = 1
return centroid
#partitions the data into the sets closest to each centroid
def fit(data, centroids):
pass
#returns k centroids which partition the data optimally into k clusters
def cluster(data, k):
centroids = find_initial_centroids(data, k)
|
Add helper to find representative centroid of a subset of data, add helper to generate initial k centroid intelligently
|
Add helper to find representative centroid of a subset of data, add helper to generate initial k centroid intelligently
|
Python
|
mit
|
mkaplan218/clusterverify
|
cd6429cd177e550d047408cc212b64648e0cbe6c
|
calc_cov.py
|
calc_cov.py
|
import mne
import sys
from mne import compute_covariance
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from my_settings import *
reject = dict(grad=4000e-13, # T / m (gradiometers)
mag=4e-12, # T (magnetometers)
eeg=180e-6 #
)
subject = sys.argv[1]
epochs = mne.read_epochs(epochs_folder + "%s_trial_start-epo.fif" % subject)
epochs.drop_bad_epochs(reject)
fig = epochs.plot_drop_log(subject=subject, show=False)
fig.savefig(epochs_folder + "pics/%s_drop_log.png" % subject)
# Make noise cov
cov = compute_covariance(epochs, tmin=None, tmax=0,
method="shrunk")
mne.write_cov(mne_folder + "%s-cov.fif" % subject, cov)
|
import mne
import sys
from mne import compute_covariance
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from my_settings import *
subject = sys.argv[1]
epochs = mne.read_epochs(epochs_folder + "%s_trial_start-epo.fif" % subject)
epochs.drop_bad_epochs(reject=reject_params)
fig = epochs.plot_drop_log(subject=subject, show=False)
fig.savefig(epochs_folder + "pics/%s_drop_log.png" % subject)
# Make noise cov
cov = compute_covariance(epochs, tmin=None, tmax=-0.2,
method="shrunk")
mne.write_cov(mne_folder + "%s-cov.fif" % subject, cov)
|
Clean up and change cov time
|
Clean up and change cov time
|
Python
|
bsd-3-clause
|
MadsJensen/CAA,MadsJensen/CAA
|
bfd34a7aaf903c823d41068173c09bc5b1a251bc
|
test/sasdataloader/test/utest_sesans.py
|
test/sasdataloader/test/utest_sesans.py
|
"""
Unit tests for the SESANS .ses reader
"""
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
if __name__ == "__main__":
unittest.main()
|
"""
Unit tests for the SESANS .ses reader
"""
import unittest
from sas.sascalc.dataloader.loader import Loader
import os.path
class sesans_reader(unittest.TestCase):
def setUp(self):
self.loader = Loader()
def test_sesans_load(self):
"""
Test .SES file loading
"""
f =self.loader.load("sphere3micron.ses")
# self.assertEqual(f, 5)
self.assertEqual(len(f.x), 40)
self.assertEqual(f.x[0], 391.56)
self.assertEqual(f.x[-1], 46099)
self.assertEqual(f.y[-1], -0.19956)
self.assertEqual(f.x_unit, "A")
self.assertEqual(f.y_unit, "A-2 cm-1")
self.assertEqual(f.sample.name, "Polystyrene 2 um in 53% H2O, 47% D2O")
self.assertEqual(f.sample.thickness, 0.2)
self.assertEqual(f.sample.zacceptance, (0.0168, "radians"))
self.assertEqual(f.isSesans, True)
if __name__ == "__main__":
unittest.main()
|
Test that .SES files are tagged as Sesans
|
Test that .SES files are tagged as Sesans
|
Python
|
bsd-3-clause
|
lewisodriscoll/sasview,lewisodriscoll/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview,SasView/sasview,SasView/sasview,lewisodriscoll/sasview
|
d0aba6489a96003c9a746bd38818cffa717d1469
|
akatsuki/bib2html.py
|
akatsuki/bib2html.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from akatsuki.exporter import export_html
from akatsuki.parser import load_bibtex_file
from akatsuki.utils import sort_by_date
def main(bibtex_file, html_file):
"""Load BibTeX file and export to HTML file"""
entries = load_bibtex_file(bibtex_file)
entries = sort_by_date(entries, reverse=True)
export_html(html_file, entries)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from akatsuki.exporter import export_html
from akatsuki.parser import load_bibtex_file
from akatsuki.utils import pmid_to_url, sort_by_date
def main(bibtex_file, html_file):
"""Load BibTeX file and export to HTML file"""
entries = load_bibtex_file(bibtex_file)
entries = pmid_to_url(entries)
entries = sort_by_date(entries, reverse=True)
export_html(html_file, entries)
|
Add pmid to url convertion
|
Add pmid to url convertion
|
Python
|
mit
|
403JFW/akatsuki
|
ea48d59c4e4073de940b394d2bc99e411cfbd3fb
|
example_of_usage.py
|
example_of_usage.py
|
# -----------------------------------------------------------------------------
# Created: 04.03.2014
# Copyright: (c) Josua Schmid 2014
# Licence: AGPLv3
#
# Sample script for parsing HTML tables
# -----------------------------------------------------------------------------
import urllib.request
from pprint import pprint
from html_table_parser import HTMLTableParser
def url_get_contents(url):
""" Opens a website and read its binary contents (HTTP Response Body) """
req = urllib.request.Request(url=url)
f = urllib.request.urlopen(req)
return f.read()
def main():
url = 'http://www.twitter.com'
xhtml = url_get_contents(url).decode('utf-8')
p = HTMLTableParser()
p.feed(xhtml)
pprint(p.tables)
if __name__ == '__main__':
main()
|
# -----------------------------------------------------------------------------
# Created: 04.03.2014
# Copyright: (c) Josua Schmid 2014
# Licence: AGPLv3
#
# Sample script for parsing HTML tables
# -----------------------------------------------------------------------------
import urllib.request
from pprint import pprint
from html_table_parser import HTMLTableParser
def url_get_contents(url):
""" Opens a website and read its binary contents (HTTP Response Body) """
req = urllib.request.Request(url=url)
f = urllib.request.urlopen(req)
return f.read()
def main():
url = 'https://w3schools.com/html/html_tables.asp'
xhtml = url_get_contents(url).decode('utf-8')
p = HTMLTableParser()
p.feed(xhtml)
# Get all tables
pprint(p.tables)
# Get tables with id attribute
pprint(p.named_tables)
if __name__ == '__main__':
main()
|
Add named tables to the examples
|
Add named tables to the examples
|
Python
|
agpl-3.0
|
schmijos/html-table-parser-python3,schmijos/html-table-parser-python3
|
246d2f47791f26ffe55bc9d09c59875b6045a847
|
data/models.py
|
data/models.py
|
import numpy
import ast
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
@classmethod
def get_all_data(cls):
data = DataPoint.objects.filter(band_gap__isnull=False,
exact_name__isnull=False,
decay_feature__isnull=False)
M = len(data)
HOMO = numpy.zeros((M, 1))
LUMO = numpy.zeros((M, 1))
GAP = numpy.zeros((M, 1))
vectors = []
for i, x in enumerate(data):
HOMO[i] = x.homo
LUMO[i] = x.lumo
GAP[i] = x.band_gap
vectors.append(ast.literal_eval(x.decay_feature))
FEATURE = numpy.matrix(vectors)
return FEATURE, HOMO, LUMO, GAP
|
import numpy
import ast
from django.db import models
class DataPoint(models.Model):
name = models.CharField(max_length=600)
exact_name = models.CharField(max_length=1000, null=True, blank=True)
decay_feature = models.CharField(max_length=1000, null=True, blank=True)
created = models.DateTimeField(auto_now_add=True)
options = models.CharField(max_length=100)
homo = models.FloatField()
lumo = models.FloatField()
homo_orbital = models.IntegerField()
energy = models.FloatField()
dipole = models.FloatField()
band_gap = models.FloatField(null=True, blank=True)
def __unicode__(self):
return self.exact_name
@classmethod
def get_all_data(cls):
data = DataPoint.objects.filter(band_gap__isnull=False,
exact_name__isnull=False,
decay_feature__isnull=False)
M = len(data)
HOMO = numpy.zeros((M, 1))
LUMO = numpy.zeros((M, 1))
GAP = numpy.zeros((M, 1))
vectors = []
for i, x in enumerate(data):
HOMO[i] = x.homo
LUMO[i] = x.lumo
GAP[i] = x.band_gap
vectors.append(ast.literal_eval(x.decay_feature))
FEATURE = numpy.matrix(vectors)
return FEATURE, HOMO, LUMO, GAP
|
Add created field to DataPoint model
|
Add created field to DataPoint model
|
Python
|
mit
|
crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp,crcollins/chemtools-webapp
|
1082db1f71ed3e84fd4068d3834ce72e744cdcca
|
build/fbcode_builder/specs/fbthrift.py
|
build/fbcode_builder/specs/fbthrift.py
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
Cut fbcode_builder dep for thrift on krb5
|
Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32
|
Python
|
apache-2.0
|
facebook/wangle,facebook/wangle,facebook/wangle
|
020e48affc34162676193ab97dad7f8ffbdaaaa6
|
jupyter_kernel/magics/shell_magic.py
|
jupyter_kernel/magics/shell_magic.py
|
# Copyright (c) Calico Development Team.
# Distributed under the terms of the Modified BSD License.
# http://calicoproject.org/
from jupyter_kernel import Magic
import subprocess
class ShellMagic(Magic):
def line_shell(self, *args):
"""%shell COMMAND - run the line as a shell command"""
command = " ".join(args)
try:
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
retval, error = process.communicate()
if error:
self.kernel.Error(error)
except Exception as e:
self.kernel.Error(e.message)
retval = None
if retval:
self.kernel.Print(retval)
def cell_shell(self):
"""%%shell - run the contents of the cell as shell commands"""
self.line_shell(self.code)
self.evaluate = False
def register_magics(kernel):
kernel.register_magics(ShellMagic)
|
# Copyright (c) Calico Development Team.
# Distributed under the terms of the Modified BSD License.
# http://calicoproject.org/
from jupyter_kernel import Magic
import subprocess
class ShellMagic(Magic):
def line_shell(self, *args):
"""%shell COMMAND - run the line as a shell command"""
command = " ".join(args)
try:
process = subprocess.Popen(command, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
retval, error = process.communicate()
if error:
self.kernel.Error(error)
except Exception as e:
self.kernel.Error(e.message)
retval = None
if retval:
retval = retval.decode('utf-8')
self.kernel.Print(retval)
def cell_shell(self):
"""%%shell - run the contents of the cell as shell commands"""
self.line_shell(self.code)
self.evaluate = False
def register_magics(kernel):
kernel.register_magics(ShellMagic)
|
Fix bytes problem on python 3.
|
Fix bytes problem on python 3.
|
Python
|
bsd-3-clause
|
Calysto/metakernel
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.