commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
3ec71d3925a3551f6f25fc25e827c88caaff1fdd
|
tests/integration/test_redirection_external.py
|
tests/integration/test_redirection_external.py
|
"""Check external REDIRECTIONS"""
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
"""Check external REDIRECTIONS"""
import os
import pytest
from nikola import __main__
from .helper import append_config, cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
def test_external_redirection(build, output_dir):
ext_link = os.path.join(output_dir, 'external.html')
assert os.path.exists(ext_link)
with open(ext_link) as ext_link_fd:
ext_link_content = ext_link_fd.read()
redirect_tag = '<meta http-equiv="refresh" content="0; url=http://www.example.com/">'
assert redirect_tag in ext_link_content
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
append_config(
target_dir,
"""
REDIRECTIONS = [ ("external.html", "http://www.example.com/"), ]
""",
)
with cd(target_dir):
__main__.main(["build"])
|
Add test for external redirection.
|
Add test for external redirection.
|
Python
|
mit
|
okin/nikola,okin/nikola,okin/nikola,getnikola/nikola,getnikola/nikola,getnikola/nikola,okin/nikola,getnikola/nikola
|
ad4effbdf95b51f151d613f02f70b4501bbe453d
|
tests/unit/extensions/flask_babel_unit_test.py
|
tests/unit/extensions/flask_babel_unit_test.py
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
# -*- coding: utf-8 -*-
"""
Unit Test: orchard.extensions.babel
"""
import unittest
import orchard
import orchard.extensions.flask_babel
class BabelUnitTest(unittest.TestCase):
def setUp(self):
self.app = orchard.create_app('Testing')
self.app.config['BABEL_DEFAULT_LOCALE'] = 'en'
self.app.config['LANGUAGES'] = {
'de': 'Deutsch',
'en': 'English'
}
def test_get_locale(self):
# The preferred language is available.
headers = {
'Accept-Language': 'de,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'de')
# The preferred language is not available.
headers = {
'Accept-Language': 'fr,en;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
# None of the accepted languages is available.
headers = {
'Accept-Language': 'fr,es;q=0.3'
}
with self.app.test_request_context('/', headers = headers):
locale = orchard.extensions.flask_babel._get_locale()
self.assertEqual(locale, 'en')
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Set default locale in test to avoid test failures when different default is used than expected.
|
Python
|
mit
|
BMeu/Orchard,BMeu/Orchard
|
09851ff2903db29703616da0fbc9ec003955712a
|
zerver/lib/markdown/preprocessor_priorities.py
|
zerver/lib/markdown/preprocessor_priorities.py
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
"help_relative_links": 475,
"setting": 450,
"fenced_code_block": 25,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
# Note that in the Markdown preprocessor registry, the highest
# numeric value is considered the highest priority, so the dict
# below is ordered from highest-to-lowest priority.
# Priorities for the built-in preprocessors are commented out.
PREPROCESSOR_PRIORITES = {
"generate_parameter_description": 535,
"generate_response_description": 531,
"generate_api_title": 531,
"generate_api_description": 530,
"generate_code_example": 525,
"generate_return_values": 510,
"generate_api_arguments": 505,
"include": 500,
# "include_wrapper": 500,
"help_relative_links": 475,
"setting": 450,
# "normalize_whitespace": 30,
"fenced_code_block": 25,
# "html_block": 20,
"tabbed_sections": -500,
"nested_code_blocks": -500,
"emoticon_translations": -505,
}
|
Document built-in preprocessor priorities for convenience.
|
markdown: Document built-in preprocessor priorities for convenience.
Fixes #19810
|
Python
|
apache-2.0
|
eeshangarg/zulip,rht/zulip,rht/zulip,kou/zulip,eeshangarg/zulip,rht/zulip,eeshangarg/zulip,zulip/zulip,rht/zulip,andersk/zulip,kou/zulip,eeshangarg/zulip,kou/zulip,andersk/zulip,andersk/zulip,andersk/zulip,kou/zulip,andersk/zulip,rht/zulip,rht/zulip,zulip/zulip,kou/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,andersk/zulip,kou/zulip,kou/zulip,andersk/zulip,eeshangarg/zulip,rht/zulip,zulip/zulip,zulip/zulip,eeshangarg/zulip,zulip/zulip
|
6835fa9e8978a081186008785bd2e11522372aa9
|
tests/utils.py
|
tests/utils.py
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
return pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout)))
|
import os
import re
from lxml import etree
def validate_xml(xmlout):
with open(os.path.join(os.path.dirname(__file__), 'pain.008.001.02.xsd'), 'rb') as schema_file:
schema_xml = schema_file.read()
schema_root = etree.XML(schema_xml)
schema = etree.XMLSchema(schema_root)
parser = etree.XMLParser(schema=schema)
xml_root = etree.fromstring(xmlout, parser)
return etree.tostring(xml_root, pretty_print=True)
def clean_ids(xmlout):
pat1 = re.compile(b'-[0-9a-f]{12}')
pat2 = re.compile(b'<MsgId>[^<]*</MsgId>')
pat3 = re.compile(b'\d\d\d\d-\d\d-\d\dT\d\d:\d\d:\d\d')
pat4 = re.compile(b'\d\d\d\d-\d\d-\d\d')
return pat4.sub(b'0000-00-00', pat3.sub(b'0000-00-00T00:00:00', pat2.sub(b'<MsgId></MsgId>', pat1.sub(b'-000000000000', xmlout))))
|
Fix dates in test output
|
Fix dates in test output
|
Python
|
mit
|
raphaelm/python-sepadd,lutoma/python-sepadd
|
d6b2dc137111e0a077625feefb0a2c70fc8e789b
|
Lib/__init__.py
|
Lib/__init__.py
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy import *
del fft, ifft, info
import numpy
__all__.extend(filter(lambda x: x not in ['fft','ifft','info'], numpy.__all__))
del numpy
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
"""\
SciPy --- A scientific computing package for Python
===================================================
You can support the development of SciPy by purchasing documentation
at
http://www.trelgol.com
It is being distributed for a fee for a limited time to try and raise
money for development.
Documentation is also available in the docstrings.
Available subpackages
---------------------
"""
import os, sys
SCIPY_IMPORT_VERBOSE = int(os.environ.get('SCIPY_IMPORT_VERBOSE','0'))
try:
import pkg_resources # activate namespace packages (manipulates __path__)
except ImportError:
pass
import numpy._import_tools as _ni
pkgload = _ni.PackageLoader()
del _ni
from numpy.testing import ScipyTest
test = ScipyTest('scipy').test
__all__.append('test')
from version import version as __version__
from numpy import __version__ as __numpy_version__
__all__.append('__version__')
__all__.append('__numpy_version__')
from __config__ import show as show_config
pkgload(verbose=SCIPY_IMPORT_VERBOSE,postpone=True)
|
Remove auto include of numpy namespace.
|
Remove auto include of numpy namespace.
|
Python
|
bsd-3-clause
|
mgaitan/scipy,rgommers/scipy,Srisai85/scipy,tylerjereddy/scipy,juliantaylor/scipy,sonnyhu/scipy,apbard/scipy,juliantaylor/scipy,zxsted/scipy,behzadnouri/scipy,mikebenfield/scipy,richardotis/scipy,nmayorov/scipy,pnedunuri/scipy,befelix/scipy,anntzer/scipy,mortada/scipy,chatcannon/scipy,WarrenWeckesser/scipy,mingwpy/scipy,aarchiba/scipy,vhaasteren/scipy,Newman101/scipy,josephcslater/scipy,maniteja123/scipy,richardotis/scipy,Srisai85/scipy,lhilt/scipy,ilayn/scipy,hainm/scipy,behzadnouri/scipy,njwilson23/scipy,scipy/scipy,fredrikw/scipy,grlee77/scipy,andyfaff/scipy,Eric89GXL/scipy,piyush0609/scipy,lhilt/scipy,lhilt/scipy,pnedunuri/scipy,jonycgn/scipy,WillieMaddox/scipy,Stefan-Endres/scipy,larsmans/scipy,petebachant/scipy,grlee77/scipy,scipy/scipy,nonhermitian/scipy,zaxliu/scipy,mortada/scipy,fernand/scipy,njwilson23/scipy,vberaudi/scipy,gdooper/scipy,niknow/scipy,tylerjereddy/scipy,ilayn/scipy,matthewalbani/scipy,anntzer/scipy,fernand/scipy,minhlongdo/scipy,FRidh/scipy,haudren/scipy,befelix/scipy,befelix/scipy,Shaswat27/scipy,jonycgn/scipy,ortylp/scipy,haudren/scipy,anielsen001/scipy,nonhermitian/scipy,haudren/scipy,gfyoung/scipy,minhlongdo/scipy,pizzathief/scipy,raoulbq/scipy,jseabold/scipy,zxsted/scipy,jamestwebber/scipy,befelix/scipy,vberaudi/scipy,matthewalbani/scipy,richardotis/scipy,ChanderG/scipy,jsilter/scipy,aarchiba/scipy,aeklant/scipy,haudren/scipy,WarrenWeckesser/scipy,aman-iitj/scipy,giorgiop/scipy,e-q/scipy,andim/scipy,sargas/scipy,mortonjt/scipy,futurulus/scipy,perimosocordiae/scipy,mortada/scipy,person142/scipy,gef756/scipy,matthew-brett/scipy,zerothi/scipy,zxsted/scipy,newemailjdm/scipy,gef756/scipy,Stefan-Endres/scipy,anielsen001/scipy,minhlongdo/scipy,mortonjt/scipy,jsilter/scipy,perimosocordiae/scipy,grlee77/scipy,argriffing/scipy,larsmans/scipy,futurulus/scipy,ales-erjavec/scipy,perimosocordiae/scipy,sargas/scipy,mingwpy/scipy,jsilter/scipy,Kamp9/scipy,jor-/scipy,vhaasteren/scipy,mingwpy/scipy,bkendzior/scipy,kalvdans/scipy,hainm/scipy,petebachant/scipy,Stefan-Endres/scipy,jonycgn/scipy,jakevdp/scipy,sriki18/scipy,ChanderG/scipy,mdhaber/scipy,zxsted/scipy,jjhelmus/scipy,minhlongdo/scipy,endolith/scipy,aman-iitj/scipy,sriki18/scipy,witcxc/scipy,minhlongdo/scipy,endolith/scipy,FRidh/scipy,nonhermitian/scipy,rgommers/scipy,Srisai85/scipy,mingwpy/scipy,surhudm/scipy,pschella/scipy,vigna/scipy,zxsted/scipy,larsmans/scipy,teoliphant/scipy,aeklant/scipy,bkendzior/scipy,pschella/scipy,sauliusl/scipy,gertingold/scipy,mortonjt/scipy,matthewalbani/scipy,mortonjt/scipy,endolith/scipy,piyush0609/scipy,pbrod/scipy,sriki18/scipy,niknow/scipy,FRidh/scipy,jor-/scipy,mtrbean/scipy,fernand/scipy,larsmans/scipy,surhudm/scipy,gef756/scipy,andyfaff/scipy,andyfaff/scipy,ogrisel/scipy,teoliphant/scipy,ogrisel/scipy,behzadnouri/scipy,giorgiop/scipy,Eric89GXL/scipy,zerothi/scipy,gertingold/scipy,jjhelmus/scipy,ilayn/scipy,zerothi/scipy,pbrod/scipy,zxsted/scipy,jor-/scipy,efiring/scipy,dominicelse/scipy,matthew-brett/scipy,anntzer/scipy,aeklant/scipy,felipebetancur/scipy,larsmans/scipy,Shaswat27/scipy,chatcannon/scipy,felipebetancur/scipy,gfyoung/scipy,mikebenfield/scipy,matthew-brett/scipy,andim/scipy,matthewalbani/scipy,jor-/scipy,Stefan-Endres/scipy,jsilter/scipy,haudren/scipy,tylerjereddy/scipy,pnedunuri/scipy,aman-iitj/scipy,gdooper/scipy,perimosocordiae/scipy,pbrod/scipy,dominicelse/scipy,pnedunuri/scipy,mdhaber/scipy,sauliusl/scipy,Eric89GXL/scipy,felipebetancur/scipy,maciejkula/scipy,kleskjr/scipy,mgaitan/scipy,zaxliu/scipy,kleskjr/scipy,Gillu13/scipy,WarrenWeckesser/scipy,fernand/scipy,vigna/scipy,FRidh/scipy,Srisai85/scipy,mgaitan/scipy,raoulbq/scipy,grlee77/scipy,jjhelmus/scipy,nvoron23/scipy,e-q/scipy,efiring/scipy,giorgiop/scipy,cpaulik/scipy,argriffing/scipy,andim/scipy,petebachant/scipy,vberaudi/scipy,ortylp/scipy,pyramania/scipy,behzadnouri/scipy,cpaulik/scipy,hainm/scipy,giorgiop/scipy,ales-erjavec/scipy,kleskjr/scipy,jamestwebber/scipy,vberaudi/scipy,mdhaber/scipy,ales-erjavec/scipy,nmayorov/scipy,anntzer/scipy,jseabold/scipy,jonycgn/scipy,ortylp/scipy,andim/scipy,pizzathief/scipy,ChanderG/scipy,pnedunuri/scipy,ndchorley/scipy,vanpact/scipy,Newman101/scipy,pyramania/scipy,newemailjdm/scipy,mhogg/scipy,jseabold/scipy,richardotis/scipy,niknow/scipy,futurulus/scipy,gef756/scipy,Dapid/scipy,trankmichael/scipy,behzadnouri/scipy,niknow/scipy,aeklant/scipy,aarchiba/scipy,niknow/scipy,tylerjereddy/scipy,giorgiop/scipy,njwilson23/scipy,dominicelse/scipy,mortonjt/scipy,mdhaber/scipy,sonnyhu/scipy,vanpact/scipy,rmcgibbo/scipy,jakevdp/scipy,maniteja123/scipy,kalvdans/scipy,zerothi/scipy,andim/scipy,mtrbean/scipy,lukauskas/scipy,maniteja123/scipy,Gillu13/scipy,lukauskas/scipy,WillieMaddox/scipy,larsmans/scipy,kleskjr/scipy,mhogg/scipy,sargas/scipy,person142/scipy,sriki18/scipy,arokem/scipy,zaxliu/scipy,rmcgibbo/scipy,mortonjt/scipy,zaxliu/scipy,Eric89GXL/scipy,mgaitan/scipy,petebachant/scipy,cpaulik/scipy,fredrikw/scipy,matthewalbani/scipy,endolith/scipy,mikebenfield/scipy,ndchorley/scipy,sonnyhu/scipy,pbrod/scipy,ogrisel/scipy,jor-/scipy,witcxc/scipy,newemailjdm/scipy,trankmichael/scipy,arokem/scipy,jakevdp/scipy,sauliusl/scipy,petebachant/scipy,nmayorov/scipy,juliantaylor/scipy,minhlongdo/scipy,bkendzior/scipy,andyfaff/scipy,raoulbq/scipy,rgommers/scipy,raoulbq/scipy,mtrbean/scipy,efiring/scipy,Dapid/scipy,maniteja123/scipy,behzadnouri/scipy,andyfaff/scipy,anntzer/scipy,dch312/scipy,jonycgn/scipy,gdooper/scipy,witcxc/scipy,Gillu13/scipy,richardotis/scipy,vhaasteren/scipy,Gillu13/scipy,chatcannon/scipy,endolith/scipy,rmcgibbo/scipy,efiring/scipy,Gillu13/scipy,person142/scipy,chatcannon/scipy,ndchorley/scipy,felipebetancur/scipy,anielsen001/scipy,pizzathief/scipy,gertingold/scipy,WillieMaddox/scipy,mortada/scipy,aarchiba/scipy,anielsen001/scipy,dominicelse/scipy,kalvdans/scipy,josephcslater/scipy,jakevdp/scipy,newemailjdm/scipy,richardotis/scipy,mtrbean/scipy,petebachant/scipy,fredrikw/scipy,trankmichael/scipy,ogrisel/scipy,njwilson23/scipy,sonnyhu/scipy,zerothi/scipy,tylerjereddy/scipy,surhudm/scipy,ndchorley/scipy,Dapid/scipy,piyush0609/scipy,argriffing/scipy,mhogg/scipy,pbrod/scipy,sriki18/scipy,person142/scipy,ales-erjavec/scipy,jamestwebber/scipy,lukauskas/scipy,lukauskas/scipy,arokem/scipy,Shaswat27/scipy,WillieMaddox/scipy,ilayn/scipy,rmcgibbo/scipy,lukauskas/scipy,nvoron23/scipy,sauliusl/scipy,ndchorley/scipy,Shaswat27/scipy,ilayn/scipy,arokem/scipy,piyush0609/scipy,felipebetancur/scipy,ilayn/scipy,gertingold/scipy,scipy/scipy,rmcgibbo/scipy,bkendzior/scipy,sargas/scipy,woodscn/scipy,sauliusl/scipy,Dapid/scipy,gdooper/scipy,ales-erjavec/scipy,pyramania/scipy,gfyoung/scipy,Kamp9/scipy,Eric89GXL/scipy,fredrikw/scipy,dominicelse/scipy,endolith/scipy,woodscn/scipy,pizzathief/scipy,mdhaber/scipy,mhogg/scipy,surhudm/scipy,WarrenWeckesser/scipy,dch312/scipy,maniteja123/scipy,dch312/scipy,sargas/scipy,fredrikw/scipy,nvoron23/scipy,jamestwebber/scipy,sriki18/scipy,gdooper/scipy,kleskjr/scipy,argriffing/scipy,njwilson23/scipy,apbard/scipy,lhilt/scipy,futurulus/scipy,mgaitan/scipy,cpaulik/scipy,e-q/scipy,Newman101/scipy,anielsen001/scipy,jjhelmus/scipy,zaxliu/scipy,apbard/scipy,WarrenWeckesser/scipy,scipy/scipy,pbrod/scipy,jakevdp/scipy,trankmichael/scipy,mtrbean/scipy,person142/scipy,maciejkula/scipy,Dapid/scipy,rgommers/scipy,niknow/scipy,Kamp9/scipy,josephcslater/scipy,hainm/scipy,woodscn/scipy,WarrenWeckesser/scipy,raoulbq/scipy,mikebenfield/scipy,perimosocordiae/scipy,trankmichael/scipy,nvoron23/scipy,cpaulik/scipy,pschella/scipy,apbard/scipy,hainm/scipy,giorgiop/scipy,bkendzior/scipy,sonnyhu/scipy,jsilter/scipy,kleskjr/scipy,vigna/scipy,aman-iitj/scipy,efiring/scipy,mortada/scipy,vberaudi/scipy,vigna/scipy,jjhelmus/scipy,matthew-brett/scipy,Srisai85/scipy,gertingold/scipy,trankmichael/scipy,Kamp9/scipy,zerothi/scipy,pschella/scipy,Newman101/scipy,gfyoung/scipy,kalvdans/scipy,surhudm/scipy,Eric89GXL/scipy,kalvdans/scipy,maciejkula/scipy,teoliphant/scipy,ortylp/scipy,e-q/scipy,ChanderG/scipy,aarchiba/scipy,grlee77/scipy,perimosocordiae/scipy,haudren/scipy,ortylp/scipy,vhaasteren/scipy,nmayorov/scipy,vanpact/scipy,efiring/scipy,Stefan-Endres/scipy,njwilson23/scipy,pizzathief/scipy,mortada/scipy,teoliphant/scipy,maniteja123/scipy,ChanderG/scipy,josephcslater/scipy,nvoron23/scipy,teoliphant/scipy,nvoron23/scipy,Kamp9/scipy,ChanderG/scipy,Gillu13/scipy,lukauskas/scipy,WillieMaddox/scipy,fernand/scipy,rgommers/scipy,lhilt/scipy,jseabold/scipy,woodscn/scipy,piyush0609/scipy,rmcgibbo/scipy,FRidh/scipy,chatcannon/scipy,gef756/scipy,maciejkula/scipy,zaxliu/scipy,Dapid/scipy,pyramania/scipy,vanpact/scipy,Shaswat27/scipy,vanpact/scipy,vanpact/scipy,josephcslater/scipy,jseabold/scipy,witcxc/scipy,jonycgn/scipy,argriffing/scipy,juliantaylor/scipy,ortylp/scipy,mingwpy/scipy,fernand/scipy,pschella/scipy,pyramania/scipy,futurulus/scipy,nmayorov/scipy,vhaasteren/scipy,Srisai85/scipy,ogrisel/scipy,juliantaylor/scipy,newemailjdm/scipy,pnedunuri/scipy,argriffing/scipy,mgaitan/scipy,vhaasteren/scipy,dch312/scipy,nonhermitian/scipy,woodscn/scipy,raoulbq/scipy,Shaswat27/scipy,sauliusl/scipy,apbard/scipy,aman-iitj/scipy,jseabold/scipy,ales-erjavec/scipy,ndchorley/scipy,aeklant/scipy,aman-iitj/scipy,witcxc/scipy,scipy/scipy,vberaudi/scipy,piyush0609/scipy,chatcannon/scipy,surhudm/scipy,matthew-brett/scipy,newemailjdm/scipy,e-q/scipy,mingwpy/scipy,maciejkula/scipy,arokem/scipy,gef756/scipy,mhogg/scipy,FRidh/scipy,jamestwebber/scipy,nonhermitian/scipy,Kamp9/scipy,Stefan-Endres/scipy,fredrikw/scipy,gfyoung/scipy,befelix/scipy,Newman101/scipy,cpaulik/scipy,mtrbean/scipy,Newman101/scipy,andyfaff/scipy,vigna/scipy,futurulus/scipy,felipebetancur/scipy,scipy/scipy,anntzer/scipy,andim/scipy,mikebenfield/scipy,woodscn/scipy,anielsen001/scipy,WillieMaddox/scipy,mdhaber/scipy,mhogg/scipy,sonnyhu/scipy,hainm/scipy,dch312/scipy
|
56c25218cb3c987201839917930fc1ae791b5601
|
reg/__init__.py
|
reg/__init__.py
|
# flake8: noqa
from .dispatch import dispatch, Dispatch
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
# flake8: noqa
from .dispatch import dispatch, Dispatch, LookupEntry
from .context import (dispatch_method, DispatchMethod,
methodify, clean_dispatch_methods)
from .arginfo import arginfo
from .error import RegistrationError
from .predicate import (Predicate, KeyIndex, ClassIndex,
match_key, match_instance, match_class)
from .cache import DictCachingKeyLookup, LruCachingKeyLookup
|
Add LookupEntry to the API.
|
Add LookupEntry to the API.
|
Python
|
bsd-3-clause
|
morepath/reg,taschini/reg
|
00a5d82c99ce6fb7096d432f12959ab4d8218f4f
|
booster_bdd/features/src/importBooster.py
|
booster_bdd/features/src/importBooster.py
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.environ.get('PROJECT_NAME')
pipeline = os.environ.get('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print 'Starting test.....'
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
print 'Making request to import...'
r = requests.post('https://forge.api.openshift.io/api/osio/import',
headers=headers, data=data)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
import pytest
import time
import requests
import support.helpers as helpers
import sys
import re
import os
class ImportBooster(object):
def importGithubRepo(self, gitRepo):
###############################################
# Environment variables
#
# Note: Pipelines = https://forge.api.openshift.io/api/services/jenkins/pipelines
# Tokens are stored in a form of "<access_token>;<refresh_token>(;<username>)"
theToken = helpers.get_user_tokens().split(";")[0]
projectName = os.getenv('PROJECT_NAME')
pipeline = os.getenv('PIPELINE')
spaceId = helpers.getSpaceID()
authHeader = 'Bearer {}'.format(theToken)
print('Starting test.....')
###############################################
# Import the booster
headers = {'Accept': 'application/json',
'Authorization': authHeader,
'X-App': 'osio',
'X-Git-Provider': 'GitHub',
'Content-Type': 'application/x-www-form-urlencoded'}
data = {'gitRepository': gitRepo,
'projectName': projectName,
'pipeline': pipeline,
'space': spaceId}
forgeApi = os.getenv("FORGE_API")
print('Making request to import...')
r = requests.post(
'{}/api/osio/import'.format(forgeApi),
headers=headers,
data=data
)
# print 'request results = {}'.format(r.text)
result = r.text
if re.search('uuid', result):
return 'Success'
else:
return 'Fail'
|
Replace hardcoded Forge API URL by variable.
|
booster-bdd: Replace hardcoded Forge API URL by variable.
|
Python
|
apache-2.0
|
ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test,ldimaggi/fabric8-test
|
c9e4a05ed2677fd569642e0ef77dd9f63bf3e15f
|
vumi/persist/tests/test_redis_base.py
|
vumi/persist/tests/test_redis_base.py
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client, key_prefix='test'):
return Manager(client, key_prefix)
def test_sub_manager(self):
dummy_client = object()
manager = self.mk_manager(dummy_client)
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, dummy_client)
|
"""Tests for vumi.persist.redis_base."""
from twisted.trial.unittest import TestCase
from vumi.persist.redis_base import Manager
class ManagerTestCase(TestCase):
def mk_manager(self, client=None, key_prefix='test'):
if client is None:
client = object()
return Manager(client, key_prefix)
def test_sub_manager(self):
manager = self.mk_manager()
sub_manager = manager.sub_manager("foo")
self.assertEqual(sub_manager._key_prefix, "test#foo")
self.assertEqual(sub_manager._client, manager._client)
self.assertEqual(sub_manager._key_separator, manager._key_separator)
|
Make sub_manager test neater and also check key_separator.
|
Make sub_manager test neater and also check key_separator.
|
Python
|
bsd-3-clause
|
TouK/vumi,vishwaprakashmishra/xmatrix,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi,harrissoerja/vumi,TouK/vumi,vishwaprakashmishra/xmatrix,harrissoerja/vumi
|
ce28d39244b75ee0dd865017b4cf1a0125bf4887
|
ynr/apps/parties/serializers.py
|
ynr/apps/parties/serializers.py
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name")
|
from rest_framework import serializers
from parties.models import Party, PartyDescription, PartyEmblem
class PartyEmblemSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyEmblem
fields = (
"image",
"description",
"date_approved",
"ec_emblem_id",
"default",
)
class PartyDescriptionSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = PartyDescription
fields = ("description", "date_description_approved")
class PartySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Party
fields = (
"ec_id",
"url",
"name",
"register",
"status",
"date_registered",
"date_deregistered",
"default_emblem",
"emblems",
"descriptions",
"legacy_slug",
)
extra_kwargs = {"url": {"lookup_field": "ec_id"}}
default_emblem = PartyEmblemSerializer()
emblems = PartyEmblemSerializer(many=True)
descriptions = PartyDescriptionSerializer(many=True)
class MinimalPartySerializer(PartySerializer):
class Meta:
model = Party
fields = ("ec_id", "name", "legacy_slug")
|
Add legacy slug to embedded Party on memberships
|
Add legacy slug to embedded Party on memberships
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
c37abb2849dc3c4b885673220f9f9965109f0be6
|
sieve/sieve.py
|
sieve/sieve.py
|
def sieve(n):
if n < 2:
return []
not_prime = set()
prime = [2]
for i in range(3, n+1, 2):
if i not in not_prime:
prime.append(i)
not_prime.update(range(i*i, n+1, i))
return prime
|
def sieve(n):
return list(primes(n))
def primes(n):
if n < 2:
raise StopIteration
yield 2
not_prime = set()
for i in range(3, n+1, 2):
if i not in not_prime:
not_prime.update(range(i*i, n+1, i))
yield i
|
Revert back to a generator - it's actually slight faster
|
Revert back to a generator - it's actually slight faster
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
2127e3adf190736e14f8500753ffc58126cb39f4
|
ovp_search/tests/test_execution.py
|
ovp_search/tests/test_execution.py
|
import ovp_search.apps
|
import ovp_search.apps
from django.test import TestCase
from django.core.management import call_command
class RebuildIndexTestCase(TestCase):
def test_rebuild_index_execution(self):
call_command('rebuild_index', '--noinput', verbosity=0)
|
Add test case for index rebuilding
|
Add test case for index rebuilding
|
Python
|
agpl-3.0
|
OpenVolunteeringPlatform/django-ovp-search
|
e23d5a64cfd5604f74cce583db3366f2cabb5e1f
|
tests/basics/builtin_minmax.py
|
tests/basics/builtin_minmax.py
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
|
# test builtin min and max functions
print(min(0,1))
print(min(1,0))
print(min(0,-1))
print(min(-1,0))
print(max(0,1))
print(max(1,0))
print(max(0,-1))
print(max(-1,0))
print(min([1,2,4,0,-1,2]))
print(max([1,2,4,0,-1,2]))
# test with key function
lst = [2, 1, 3, 4]
print(min(lst, key=lambda x:x))
print(min(lst, key=lambda x:-x))
print(min(1, 2, 3, 4, key=lambda x:-x))
print(min(4, 3, 2, 1, key=lambda x:-x))
print(max(lst, key=lambda x:x))
print(max(lst, key=lambda x:-x))
print(max(1, 2, 3, 4, key=lambda x:-x))
print(max(4, 3, 2, 1, key=lambda x:-x))
# need at least 1 item in the iterable
try:
min([])
except ValueError:
print("ValueError")
# 'default' tests
print(min([1, 2, 3, 4, 5], default=-1))
print(min([], default=-1))
print(max([1, 2, 3, 4, 5], default=-1))
print(max([], default=-1))
|
Add min/max "default" agrument test
|
tests: Add min/max "default" agrument test
|
Python
|
mit
|
mpalomer/micropython,dinau/micropython,henriknelson/micropython,deshipu/micropython,blazewicz/micropython,supergis/micropython,MrSurly/micropython-esp32,henriknelson/micropython,torwag/micropython,matthewelse/micropython,swegener/micropython,dmazzella/micropython,turbinenreiter/micropython,Timmenem/micropython,tralamazza/micropython,ryannathans/micropython,oopy/micropython,infinnovation/micropython,AriZuu/micropython,martinribelotta/micropython,micropython/micropython-esp32,ernesto-g/micropython,bvernoux/micropython,misterdanb/micropython,galenhz/micropython,toolmacher/micropython,micropython/micropython-esp32,mpalomer/micropython,micropython/micropython-esp32,misterdanb/micropython,hiway/micropython,ganshun666/micropython,alex-march/micropython,pramasoul/micropython,cwyark/micropython,pozetroninc/micropython,Timmenem/micropython,adamkh/micropython,pozetroninc/micropython,adafruit/micropython,pozetroninc/micropython,infinnovation/micropython,adafruit/circuitpython,dxxb/micropython,dxxb/micropython,dinau/micropython,tobbad/micropython,EcmaXp/micropython,martinribelotta/micropython,misterdanb/micropython,tralamazza/micropython,TDAbboud/micropython,selste/micropython,hosaka/micropython,martinribelotta/micropython,adafruit/micropython,torwag/micropython,MrSurly/micropython,emfcamp/micropython,Peetz0r/micropython-esp32,emfcamp/micropython,xhat/micropython,selste/micropython,pramasoul/micropython,henriknelson/micropython,kerneltask/micropython,adafruit/circuitpython,tuc-osg/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,selste/micropython,trezor/micropython,Timmenem/micropython,adafruit/micropython,dinau/micropython,selste/micropython,alex-march/micropython,pfalcon/micropython,alex-robbins/micropython,HenrikSolver/micropython,ryannathans/micropython,puuu/micropython,alex-robbins/micropython,supergis/micropython,adafruit/circuitpython,praemdonck/micropython,pfalcon/micropython,tobbad/micropython,infinnovation/micropython,ernesto-g/micropython,matthewelse/micropython,PappaPeppar/micropython,MrSurly/micropython,mhoffma/micropython,ganshun666/micropython,mianos/micropython,praemdonck/micropython,galenhz/micropython,dmazzella/micropython,misterdanb/micropython,neilh10/micropython,henriknelson/micropython,oopy/micropython,galenhz/micropython,micropython/micropython-esp32,pfalcon/micropython,danicampora/micropython,matthewelse/micropython,TDAbboud/micropython,toolmacher/micropython,mpalomer/micropython,swegener/micropython,hosaka/micropython,tuc-osg/micropython,turbinenreiter/micropython,galenhz/micropython,blazewicz/micropython,lowRISC/micropython,jmarcelino/pycom-micropython,matthewelse/micropython,drrk/micropython,turbinenreiter/micropython,jmarcelino/pycom-micropython,oopy/micropython,chrisdearman/micropython,adafruit/circuitpython,mhoffma/micropython,lowRISC/micropython,redbear/micropython,ganshun666/micropython,mpalomer/micropython,xhat/micropython,alex-march/micropython,praemdonck/micropython,pfalcon/micropython,EcmaXp/micropython,matthewelse/micropython,xhat/micropython,redbear/micropython,mianos/micropython,TDAbboud/micropython,TDAbboud/micropython,redbear/micropython,torwag/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,neilh10/micropython,EcmaXp/micropython,xhat/micropython,blazewicz/micropython,alex-robbins/micropython,dxxb/micropython,oopy/micropython,trezor/micropython,martinribelotta/micropython,ganshun666/micropython,Peetz0r/micropython-esp32,SHA2017-badge/micropython-esp32,puuu/micropython,bvernoux/micropython,tobbad/micropython,mhoffma/micropython,emfcamp/micropython,infinnovation/micropython,danicampora/micropython,pramasoul/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,mhoffma/micropython,redbear/micropython,pozetroninc/micropython,dxxb/micropython,ernesto-g/micropython,lowRISC/micropython,swegener/micropython,EcmaXp/micropython,hosaka/micropython,alex-march/micropython,emfcamp/micropython,dinau/micropython,danicampora/micropython,chrisdearman/micropython,ryannathans/micropython,cwyark/micropython,trezor/micropython,MrSurly/micropython,adamkh/micropython,drrk/micropython,SHA2017-badge/micropython-esp32,AriZuu/micropython,neilh10/micropython,trezor/micropython,HenrikSolver/micropython,mhoffma/micropython,toolmacher/micropython,deshipu/micropython,drrk/micropython,dmazzella/micropython,hosaka/micropython,tralamazza/micropython,alex-robbins/micropython,SHA2017-badge/micropython-esp32,tuc-osg/micropython,tobbad/micropython,kerneltask/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,lowRISC/micropython,praemdonck/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,trezor/micropython,MrSurly/micropython,cwyark/micropython,selste/micropython,adafruit/circuitpython,kerneltask/micropython,turbinenreiter/micropython,swegener/micropython,HenrikSolver/micropython,praemdonck/micropython,alex-march/micropython,drrk/micropython,adafruit/micropython,HenrikSolver/micropython,pfalcon/micropython,cwyark/micropython,adamkh/micropython,torwag/micropython,galenhz/micropython,bvernoux/micropython,tobbad/micropython,ernesto-g/micropython,hosaka/micropython,kerneltask/micropython,tralamazza/micropython,TDAbboud/micropython,jmarcelino/pycom-micropython,neilh10/micropython,danicampora/micropython,HenrikSolver/micropython,EcmaXp/micropython,mpalomer/micropython,xhat/micropython,deshipu/micropython,pramasoul/micropython,puuu/micropython,adafruit/circuitpython,blazewicz/micropython,neilh10/micropython,AriZuu/micropython,adamkh/micropython,hiway/micropython,hiway/micropython,supergis/micropython,pramasoul/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,toolmacher/micropython,danicampora/micropython,mianos/micropython,Timmenem/micropython,drrk/micropython,puuu/micropython,torwag/micropython,dinau/micropython,adamkh/micropython,adafruit/micropython,hiway/micropython,blazewicz/micropython,turbinenreiter/micropython,martinribelotta/micropython,puuu/micropython,AriZuu/micropython,emfcamp/micropython,oopy/micropython,redbear/micropython,PappaPeppar/micropython,infinnovation/micropython,chrisdearman/micropython,henriknelson/micropython,ganshun666/micropython,jmarcelino/pycom-micropython,Timmenem/micropython,bvernoux/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,mianos/micropython,supergis/micropython,mianos/micropython,dxxb/micropython,cwyark/micropython,lowRISC/micropython,MrSurly/micropython-esp32,misterdanb/micropython,hiway/micropython,PappaPeppar/micropython,chrisdearman/micropython,ryannathans/micropython,MrSurly/micropython,AriZuu/micropython,deshipu/micropython,tuc-osg/micropython,dmazzella/micropython,jmarcelino/pycom-micropython,ernesto-g/micropython,toolmacher/micropython,matthewelse/micropython,PappaPeppar/micropython,deshipu/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,ryannathans/micropython,supergis/micropython
|
d90d91906981a4393810069b494d68230f17439e
|
frameworks/Scala/spray/setup.py
|
frameworks/Scala/spray/setup.py
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("../sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
time.sleep(5)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
import subprocess
import sys
import time
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.check_call('"..\\sbt\\sbt.bat" assembly', shell=True, cwd="spray", stderr=errfile, stdout=logfile)
else:
subprocess.check_call("$FWROOT/sbt/sbt assembly", shell=True, cwd="spray", stderr=errfile, stdout=logfile)
subprocess.Popen("java -jar target/scala-2.10/spray-benchmark-assembly-1.0.jar", cwd="spray", shell=True, stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.check_call("wmic process where \"CommandLine LIKE '%spray-benchmark%'\" call terminate", stderr=errfile, stdout=logfile)
else:
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'spray-benchmark' in line:
try:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
except OSError:
pass
return 0
|
Enable spray to find sbt
|
Enable spray to find sbt
|
Python
|
bsd-3-clause
|
zane-techempower/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Verber/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sgml/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,valyala/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sgml/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,testn/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,valyala/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Verber/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,testn/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,testn/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,torhve/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,testn/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,methane/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,denkab/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,doom369/FrameworkBenchmarks,sgml/FrameworkBenchmarks,testn/FrameworkBenchmarks,actframework/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,doom369/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sgml/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,grob/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,zloster/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,Verber/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sgml/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,valyala/FrameworkBenchmarks,testn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,torhve/FrameworkBenchmarks,actframework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,valyala/FrameworkBenchmarks,valyala/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zapov/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jamming/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,joshk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,doom369/FrameworkBenchmarks,actframework/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,valyala/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sxend/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,sgml/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,torhve/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zloster/FrameworkBenchmarks,zapov/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,grob/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zapov/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,denkab/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,actframework/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,valyala/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,methane/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,valyala/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,grob/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,doom369/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,doom369/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zapov/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,methane/FrameworkBenchmarks,jamming/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,sxend/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,actframework/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,grob/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,denkab/FrameworkBenchmarks,doom369/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,denkab/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,doom369/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zloster/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,denkab/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jamming/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,valyala/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,grob/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zapov/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zapov/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sxend/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,joshk/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Verber/FrameworkBenchmarks,doom369/FrameworkBenchmarks,zapov/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jamming/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,zapov/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zapov/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,Verber/FrameworkBenchmarks,valyala/FrameworkBenchmarks,zloster/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,denkab/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,torhve/FrameworkBenchmarks,khellang/FrameworkBenchmarks,methane/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,herloct/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,testn/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,sxend/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,khellang/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Verber/FrameworkBenchmarks,khellang/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,Verber/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,valyala/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,herloct/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,joshk/FrameworkBenchmarks,denkab/FrameworkBenchmarks,khellang/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,zloster/FrameworkBenchmarks,methane/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,testn/FrameworkBenchmarks,testn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,testn/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,Verber/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,denkab/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,joshk/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,doom369/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,torhve/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,actframework/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,khellang/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,valyala/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,grob/FrameworkBenchmarks,herloct/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,testn/FrameworkBenchmarks,torhve/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,khellang/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,methane/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,zloster/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,zapov/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,grob/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,methane/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Verber/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,khellang/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,methane/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,zapov/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,herloct/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,khellang/FrameworkBenchmarks,sgml/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,zloster/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,actframework/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,herloct/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,Verber/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,jamming/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,grob/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,torhve/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Verber/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sxend/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,joshk/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,torhve/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,joshk/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,actframework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,methane/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,zapov/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,sxend/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,doom369/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,testn/FrameworkBenchmarks,sgml/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,khellang/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,grob/FrameworkBenchmarks,methane/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,zloster/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,herloct/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,knewmanTE/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,doom369/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,joshk/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,sgml/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,valyala/FrameworkBenchmarks,grob/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,methane/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,zloster/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,jebbstewart/FrameworkBenchmarks,khellang/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,hperadin/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,jamming/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,zloster/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,actframework/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,alubbe/FrameworkBenchmarks,grob/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,saturday06/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,doom369/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,sxend/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,sgml/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,kostya-sh/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,denkab/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,denkab/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,kbrock/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,Eyepea/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,jamming/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,greg-hellings/FrameworkBenchmarks,sgml/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,joshk/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,kellabyte/FrameworkBenchmarks,Synchro/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,markkolich/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,zloster/FrameworkBenchmarks,torhve/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,ratpack/FrameworkBenchmarks,lcp0578/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,donovanmuller/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,jamming/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,sanjoydesk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,zloster/FrameworkBenchmarks,k-r-g/FrameworkBenchmarks,methane/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,nbrady-techempower/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,raziel057/FrameworkBenchmarks,circlespainter/FrameworkBenchmarks,nkasvosve/FrameworkBenchmarks,Rydgel/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,torhve/FrameworkBenchmarks,nathana1/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,jeevatkm/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,actframework/FrameworkBenchmarks,zhuochenKIDD/FrameworkBenchmarks,sxend/FrameworkBenchmarks,Rayne/FrameworkBenchmarks,testn/FrameworkBenchmarks,jetty-project/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,sxend/FrameworkBenchmarks,joshk/FrameworkBenchmarks,grob/FrameworkBenchmarks,victorbriz/FrameworkBenchmarks,joshk/FrameworkBenchmarks,jamming/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,hamiltont/FrameworkBenchmarks,sagenschneider/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zane-techempower/FrameworkBenchmarks,sgml/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,youprofit/FrameworkBenchmarks,fabianmurariu/FrameworkBenchmarks,s-ludwig/FrameworkBenchmarks,herloct/FrameworkBenchmarks,sxend/FrameworkBenchmarks,mfirry/FrameworkBenchmarks,MTDdk/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jamming/FrameworkBenchmarks,thousandsofthem/FrameworkBenchmarks,yunspace/FrameworkBenchmarks,zapov/FrameworkBenchmarks,waiteb3/FrameworkBenchmarks,greenlaw110/FrameworkBenchmarks,psfblair/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks,stefanocasazza/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,herloct/FrameworkBenchmarks,ashawnbandy-te-tfb/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,RockinRoel/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,PermeAgility/FrameworkBenchmarks,F3Community/FrameworkBenchmarks,martin-g/FrameworkBenchmarks,zdanek/FrameworkBenchmarks,jaguililla/FrameworkBenchmarks,steveklabnik/FrameworkBenchmarks,Jesterovskiy/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,Dith3r/FrameworkBenchmarks,xitrum-framework/FrameworkBenchmarks,joshk/FrameworkBenchmarks,marko-asplund/FrameworkBenchmarks,diablonhn/FrameworkBenchmarks
|
6551c882745b13d5b9be183e83f379e34b067921
|
tests/test_emailharvesterws.py
|
tests/test_emailharvesterws.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_botanick
----------------------------------
Tests for `botanick` module.
"""
import pytest
from botanick import Botanick
def test_botanick():
emails_found = Botanick.search("squad.pro")
assert emails_found != ""
print(emails_found)
|
Revert "Fix a codacy issue"
|
Revert "Fix a codacy issue"
This reverts commit 0fe83f1bfa54eda16c42fb5d81b33215dc3ba562.
|
Python
|
mit
|
avidot/Botanick
|
2666eee0a59581c504b36acd618e256cf313c377
|
start_server.py
|
start_server.py
|
import os
def start_server():
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
|
import os
def start_server():
count = 0
while count < 2
send_ssh_server_start(count)
count +=1
exit()
def send_ssh_server_start(count):
try:
os.system('ssh pi@192.168.2.4 python python-libs/RaspberryDrive/driving_server.py &')
return
except:
sleep(count + 1)
|
Add logic to try server 3 times, pausing a little more each time.
|
Add logic to try server 3 times, pausing a little more each time.
|
Python
|
mit
|
jwarshaw/RaspberryDrive
|
72205981af062258c4cf75c4323aa3e4d2859bb8
|
pelicanconf.py
|
pelicanconf.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = (('Pelican', 'http://getpelican.com/'),
('Python.org', 'http://python.org/'),
('Jinja2', 'http://jinja.pocoo.org/'),
('You can modify those links in your config file', '#'),)
# Social widget
SOCIAL = (('You can add links in your config file', '#'),
('Another social link', '#'),)
DEFAULT_PAGINATION = 10
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
#!/usr/bin/env python
# -*- coding: utf-8 -*- #
from __future__ import unicode_literals
AUTHOR = 'Vitaly Potyarkin'
BIO = 'Unsorted ramblings, sometimes related to programming'
SITENAME = 'Randomize'
SITEURL = ''
PATH = 'content'
TIMEZONE = 'Europe/Moscow'
DEFAULT_LANG = 'EN'
# Feed generation is usually not desired when developing
FEED_ALL_ATOM = None
CATEGORY_FEED_ATOM = None
TRANSLATION_FEED_ATOM = None
AUTHOR_FEED_ATOM = None
AUTHOR_FEED_RSS = None
# Blogroll
LINKS = ()
# Social widget
SOCIAL = [
('calendar', '/archives.html'),
('tags', '/tags.html'),
('email', 'sio.wtf@gmail.com'),
('github', 'https://github.com/sio'),
]
DEFAULT_PAGINATION = 6
# Uncomment following line if you want document-relative URLs when developing
#RELATIVE_URLS = True
|
Replace default links and values
|
Replace default links and values
|
Python
|
apache-2.0
|
sio/potyarkin.ml,sio/potyarkin.ml
|
857124a12f10e3954c114c2b6b688857b80a77a5
|
Spectrum.py
|
Spectrum.py
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
x = [1,2,3,4,5,6]
y = [1,1,0.9,0.95,1,1]
test = Spectrum()
print(test)
print(test.pixel, test.flux)
test.pixel = x
test.flux = y
print(test)
print(test.pixel, test.flux, test.wavelength)
test2 = Spectrum(x, flux=y)
print(test2)
print(test2.pixel, test.flux)
print(test2.wavelength)
|
#!/usr/bin/python
from __future__ import print_function, division
# Spectrum Class
# Begun August 2016
# Jason Neal
class Spectrum:
""" Spectrum class represents and manipulates astronomical spectra. """
def __init__(self, pixel=[], flux=[], wavelength=[]):
""" Create a empty spectra """
self.pixel = pixel
self.flux = flux
self.wavelength = wavelength
# Try using Spectrum
#if __name__ == __main__:
|
Remove simple testing from inside class module
|
Remove simple testing from inside class module
|
Python
|
mit
|
jason-neal/spectrum_overload,jason-neal/spectrum_overload,jason-neal/spectrum_overload
|
530f67493ba0d044a0896aff39bdab2ea5f1cf15
|
__init__.py
|
__init__.py
|
from openerp.osv import orm
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
from openerp.osv import orm
from openerp.osv import osv
from openerp.tools.translate import _
__all__ = ['OEMetaSL']
def get_overrides():
overrides = {}
def add_override(func):
overrides[func.func_name] = func
@add_override
def copy(cls, cr, uid, rec_id, default=None, context=None):
# Raise by default. This method should be implemented to work.
raise osv.except_osv(
_(u"Warning"),
_(u"Copy is not supported for this item.")
)
for func_name, func in overrides.iteritems():
yield func_name, func
class OEMetaSL(orm.MetaModel):
def __init__(cls, name, bases, nmspc):
super(OEMetaSL, cls).__init__(name, bases, nmspc)
for func_name, func in get_overrides():
if not func_name in nmspc:
setattr(cls, func_name, func)
|
Add osv method from openerp
|
Add osv method from openerp
|
Python
|
agpl-3.0
|
xcgd/oemetasl
|
709017ea46cd3784983ef0ee64cfe608aa44cf0c
|
tests/integration/aiohttp_utils.py
|
tests/integration/aiohttp_utils.py
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json() # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
import asyncio
import aiohttp
@asyncio.coroutine
def aiohttp_request(loop, method, url, output='text', encoding='utf-8', **kwargs):
session = aiohttp.ClientSession(loop=loop)
response_ctx = session.request(method, url, **kwargs) # NOQA: E999
response = yield from response_ctx.__aenter__() # NOQA: E999
if output == 'text':
content = yield from response.text() # NOQA: E999
elif output == 'json':
content = yield from response.json(encoding=encoding) # NOQA: E999
elif output == 'raw':
content = yield from response.read() # NOQA: E999
response_ctx._resp.close()
yield from session.close()
return response, content
|
Fix aiohttp utils to pass encondig to response.json
|
Fix aiohttp utils to pass encondig to response.json
|
Python
|
mit
|
graingert/vcrpy,graingert/vcrpy,kevin1024/vcrpy,kevin1024/vcrpy
|
2a3fe3b5e08c91ab8d77569b02b36da63909f619
|
pysnmp/hlapi/v1arch/asyncore/sync/__init__.py
|
pysnmp/hlapi/v1arch/asyncore/sync/__init__.py
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
try:
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
except SyntaxError:
from pysnmp.hlapi.v1arch.asyncore.sync.compat.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.compat.ntforg import *
|
#
# This file is part of pysnmp software.
#
# Copyright (c) 2005-2019, Ilya Etingof <etingof@gmail.com>
# License: http://snmplabs.com/pysnmp/license.html
#
from pysnmp.proto.rfc1902 import *
from pysnmp.smi.rfc1902 import *
from pysnmp.hlapi.v1arch.auth import *
from pysnmp.hlapi.v1arch.asyncore.transport import *
from pysnmp.hlapi.v1arch.asyncore.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.dispatch import *
from pysnmp.hlapi.v1arch.asyncore.ntforg import *
from pysnmp.hlapi.v1arch.asyncore.sync.cmdgen import *
from pysnmp.hlapi.v1arch.asyncore.sync.ntforg import *
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
Remove the remnants of hlapi.v1arch.asyncore.sync.compat
|
Python
|
bsd-2-clause
|
etingof/pysnmp,etingof/pysnmp
|
674721b9b094fe7e63d3356cf76e7eec0cb9bb62
|
employees/serializers.py
|
employees/serializers.py
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
from .models import Employee
from rest_framework import serializers
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
depth = 1
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'role',
'skype_id',
'last_month_score',
'current_month_score',
'level',
'score',
'is_active',
'last_login')
class EmployeeListSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk',
'username',
'email',
'first_name',
'last_name',
'level',
'avatar',
'score',
'last_month_score',
'current_month_score')
class EmployeeAvatarSerializer(serializers.ModelSerializer):
class Meta:
model = Employee
fields = ('pk', 'avatar')
class EmployeeAuthenticationResponse(serializers.Serializer):
token = serializers.CharField(max_length=40)
user_id = serializers.IntegerField()
|
Add current_month_score and last_month_score to EmployeeListSerializer
|
Add current_month_score and last_month_score to EmployeeListSerializer
|
Python
|
apache-2.0
|
belatrix/BackendAllStars
|
25e7b4a2e297e9944b5065851c6e65eb40b11bcd
|
scripts/examples/OpenMV/99-Tests/unittests.py
|
scripts/examples/OpenMV/99-Tests/unittests.py
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, passed):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + ("PASSED" if passed == True else "FAILED"))
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_passed = True
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_passed = False
print_result(test, test_passed)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
# OpenMV Unit Tests.
#
import os, sensor, gc
TEST_DIR = "unittest"
TEMP_DIR = "unittest/temp"
DATA_DIR = "unittest/data"
SCRIPT_DIR = "unittest/script"
if not (TEST_DIR in os.listdir("")):
raise Exception('Unittest dir not found!')
print("")
test_failed = False
def print_result(test, result):
s = "Unittest (%s)"%(test)
padding = "."*(60-len(s))
print(s + padding + result)
for test in sorted(os.listdir(SCRIPT_DIR)):
if test.endswith(".py"):
test_result = "PASSED"
test_path = "/".join((SCRIPT_DIR, test))
try:
exec(open(test_path).read())
gc.collect()
if unittest(DATA_DIR, TEMP_DIR) == False:
raise Exception()
except Exception as e:
test_failed = True
test_result = "DISABLED" if "unavailable" in str(e) else "FAILED"
print_result(test, test_result)
if test_failed:
print("\nSome tests have FAILED!!!\n\n")
else:
print("\nAll tests PASSED.\n\n")
|
Update unittest to ignore disabled functions.
|
Update unittest to ignore disabled functions.
|
Python
|
mit
|
kwagyeman/openmv,kwagyeman/openmv,iabdalkader/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,kwagyeman/openmv,iabdalkader/openmv,openmv/openmv,openmv/openmv,iabdalkader/openmv,openmv/openmv
|
de228621deb5637ab0698ca23cf63ece46c5ddee
|
task/views.py
|
task/views.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Task.objects.all()
serializer_class = TaskSerializer
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from rest_framework import viewsets
from django.db.models import Q
from rest_framework.permissions import IsAuthenticated
from models import *
from serializers import *
# Create your views here.
class TaskListViewSet(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
serializer_class = TaskSerializer
def get_queryset(self):
"""
This view should return a list of all the tasks
for the currently logged user.
"""
user = self.request.user
task_object = Task.objects.filter(Q(created_by=user) | Q(assigned_to = user))
return task_object
|
Adjust the APIView query_set to return tasks created or assigned to the currently logged user
|
Adjust the APIView query_set to return tasks created or assigned to the currently logged user
|
Python
|
apache-2.0
|
toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile,toladata/TolaProfile
|
da91f170c106c46a0d858e887220bc691066cdaa
|
tests/dtypes_test.py
|
tests/dtypes_test.py
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
all_dtypes = [np.float64, np.float64, np.float64, np.float64, np.int64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=None), all_dtypes)
some_dtypes = [np.float64, np.int64, 'S25', np.object]
np.testing.assert_array_equal(ds.dtypes(columns=['x', 'mi', 'name', 'obj']), some_dtypes)
|
from common import *
def test_dtype(ds_local):
ds = ds_local
for name in ds.column_names:
assert ds[name].values.dtype == ds.dtype(ds[name])
def test_dtypes(ds_local):
ds = ds_local
assert (ds.dtypes.values == [ds[name].dtype for name in ds.column_names]).all()
|
Update of the dtypes unit-test.
|
Update of the dtypes unit-test.
|
Python
|
mit
|
maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex,maartenbreddels/vaex
|
db2d8da9109ab4a8aa51acbd80abb2088a7fd299
|
campus02/urls.py
|
campus02/urls.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^admin/', include(admin.site.urls)),
url(r'^', include('django.contrib.auth.urls')),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns(
'',
url(r'^', include('django.contrib.auth.urls')),
url(r'^admin/', include(admin.site.urls)),
url(r'^web/', include('campus02.web.urls', namespace='web')),
url(r'^', include('campus02.base.urls', namespace='base')),
)
|
Rearrange admin URL mount point.
|
Rearrange admin URL mount point.
|
Python
|
mit
|
fladi/django-campus02,fladi/django-campus02
|
8226571dc97230a486a3b59c8752411e038f04ee
|
openprescribing/matrixstore/tests/matrixstore_factory.py
|
openprescribing/matrixstore/tests/matrixstore_factory.py
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
connection = sqlite3.connect(":memory:")
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
import mock
import sqlite3
from matrixstore.connection import MatrixStore
from matrixstore import db
from matrixstore.tests.import_test_data_fast import import_test_data_fast
def matrixstore_from_data_factory(data_factory, end_date=None, months=None):
"""
Returns a new in-memory MatrixStore instance using the data from the
supplied DataFactory
"""
# We need this connection to be sharable across threads because
# LiveServerTestCase runs in a separate thread from the main test code
connection = sqlite3.connect(":memory:", check_same_thread=False)
end_date = max(data_factory.months)[:7] if end_date is None else end_date
months = len(data_factory.months) if months is None else months
import_test_data_fast(connection, data_factory, end_date, months=months)
return MatrixStore(connection)
def patch_global_matrixstore(matrixstore):
"""
Temporarily replace the global MatrixStore instance (as accessed via
`matrixstore.db.get_db`) with the supplied matrixstore
Returns a function which undoes the monkeypatching
"""
patcher = mock.patch("matrixstore.connection.MatrixStore.from_file")
mocked = patcher.start()
mocked.return_value = matrixstore
# There are memoized functions so we clear any previously memoized value
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
def stop_patching():
patcher.stop()
db.get_db.cache_clear()
db.get_row_grouper.cache_clear()
matrixstore.close()
return stop_patching
|
Fix MatrixStore test patching to work with LiveServerTestCase
|
Fix MatrixStore test patching to work with LiveServerTestCase
|
Python
|
mit
|
annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,ebmdatalab/openprescribing,annapowellsmith/openpresc,annapowellsmith/openpresc,ebmdatalab/openprescribing,ebmdatalab/openprescribing
|
31ce7c5c264e7648427f73b51cd966165e63ec23
|
beaver/redis_transport.py
|
beaver/redis_transport.py
|
import datetime
import redis
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
import datetime
import redis
import time
import urlparse
import beaver.transport
class RedisTransport(beaver.transport.Transport):
def __init__(self, file_config, beaver_config):
super(RedisTransport, self).__init__(file_config, beaver_config)
redis_url = beaver_config.get('redis_url')
_url = urlparse.urlparse(redis_url, scheme="redis")
_, _, _db = _url.path.rpartition("/")
self.redis = redis.StrictRedis(host=_url.hostname, port=_url.port, db=int(_db), socket_timeout=10)
self.redis_namespace = beaver_config.get('redis_namespace')
wait = 0
while 1:
if wait == 20:
break
time.sleep(0.1)
wait += 1
try:
self.redis.ping()
break
except redis.exceptions.ConnectionError:
pass
def callback(self, filename, lines):
timestamp = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S.%fZ")
for line in lines:
self.redis.rpush(
self.redis_namespace,
self.format(filename, timestamp, line)
)
|
Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect
|
Allow for initial connection lag. Helpful when waiting for an SSH proxy to connect
|
Python
|
mit
|
doghrim/python-beaver,Appdynamics/beaver,josegonzalez/python-beaver,doghrim/python-beaver,jlambert121/beaver,davidmoravek/python-beaver,josegonzalez/python-beaver,imacube/python-beaver,PierreF/beaver,zuazo-forks/beaver,zuazo-forks/beaver,thomasalrin/beaver,python-beaver/python-beaver,PierreF/beaver,rajmarndi/python-beaver,Open-Party/python-beaver,Appdynamics/beaver,Open-Party/python-beaver,davidmoravek/python-beaver,thomasalrin/beaver,imacube/python-beaver,timstoop/python-beaver,jlambert121/beaver,timstoop/python-beaver,rajmarndi/python-beaver,python-beaver/python-beaver
|
597a2ec7a6ff0bae0b43a67e8be675017fd1d7f1
|
falafel/mappers/tests/test_current_clocksource.py
|
falafel/mappers/tests/test_current_clocksource.py
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
|
from falafel.mappers.current_clocksource import CurrentClockSource
from falafel.tests import context_wrap
CLKSRC = """
tsc
"""
def test_get_current_clksr():
clksrc = CurrentClockSource(context_wrap(CLKSRC))
assert clksrc.data == "tsc"
assert clksrc.is_kvm is False
assert clksrc.is_vmi_timer != clksrc.is_tsc
|
Enhance coverage of current_closcksource to 100%
|
Enhance coverage of current_closcksource to 100%
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
196fe935afd6adfec5d205e88472d7ef607b4743
|
checkout.py
|
checkout.py
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(repo, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
("git clone %s"%repo),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: %s"%line)
loop = asyncio.get_event_loop()
task = async_exec(sys.argv[1], test_callback)
loop.run_until_complete(task)
|
__author__ = 'RMGiroux'
import asyncio
from asyncio import subprocess
import sys
class OutputCollector:
def __init__(self, name):
self.name = name
@asyncio.coroutine
def process_line(self, stream):
while not stream.at_eof():
line = yield from stream.readline()
print("%s: %s"%(name, line))
@asyncio.coroutine
def read_stdout(stream, callback):
while True:
line = yield from stream.readline()
print('received', repr(line))
if not line:
break
else:
callback(line)
@asyncio.coroutine
def async_exec(command, stdoutCallback):
fork = yield from asyncio.create_subprocess_shell(
(command),stdout=subprocess.PIPE,stderr=subprocess.STDOUT)
tasks = []
if fork.stdout is not None:
tasks.append(read_stdout(fork.stdout, stdoutCallback))
else:
print('No stdout')
yield from asyncio.wait(tasks)
retCode = yield from fork.wait()
return retCode
def test_callback(line):
print("Received: '%s'"%line)
loop = asyncio.get_event_loop()
tasks = []
for command in sys.argv[1:]:
task = async_exec(command, test_callback)
tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
# Test with
# PATH=$PWD/../bde-tools/bin:$PATH python3 ~/PycharmProjects/python_experiments/checkout.py \
# "export WAFLOCK=.waf-lock-dbg_exc_mt BDE_WAF_UFID=dbg_exc_mt; waf configure build --target=bsl" \
# "export WAFLOCK=.waf-lock-opt_exc_mt BDE_WAF_UFID=opt_exc_mt; waf configure build --target=bsl"
|
Add comment showing parallel waf invocation
|
Add comment showing parallel waf invocation
|
Python
|
apache-2.0
|
RMGiroux/python_experiments
|
603ad671c1f6976f75065a4365589a75e1e384ee
|
service_and_process/serializers.py
|
service_and_process/serializers.py
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
|
from .models import *
from rest_framework import serializers
class MasterWorkableSerializer(serializers.ModelSerializer):
class Meta:
model = MasterWorkable
fields = '__all__'
|
Add explicit fields in serializer
|
Add explicit fields in serializer
|
Python
|
apache-2.0
|
rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory,rameshgopalakrishnan/v_excel_inventory
|
f243d309e5168b5855045227c9c0a6b082bedc69
|
luigi/tasks/gtrnadb/__init__.py
|
luigi/tasks/gtrnadb/__init__.py
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import iglob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file to
create the CSV files, but does not run the secondary structure importing.
That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
for filename in iglob(config.pattern):
yield GtRNAdbJsonToCsv(input_file=filename)
|
# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from glob import glob
import luigi
from tasks.config import gtrnadb
from .json_to_csv import GtRNAdbJsonToCsv
class GtRNAdb(luigi.WrapperTask): # pylint: disable=R0904
"""
Imports all GtRNAdb data. This will generate a task for each separate file
to create the CSV files, but does not run the secondary structure
importing. That has to be trigger manually after this is complete.
"""
def requires(self):
config = gtrnadb()
files = glob(config.pattern)
if not files:
raise ValueError("No GtRNAdb data files file")
for filename in files:
yield GtRNAdbJsonToCsv(input_file=filename)
|
Check that there are data files to import
|
Check that there are data files to import
It is possible for the pattern to match nothing leading to no files
being imported. This is an error case so we raise an exception if it
happens.
|
Python
|
apache-2.0
|
RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline
|
9c7090215ecda3fd4d173c8c5f2d3e1462fbbeee
|
takePicture.py
|
takePicture.py
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
#os.unlink('greg.jpg')
img = cam.capture('gregTest.jpg')
time.sleep(.25)
#oc.rename('gregTemp.jpg', 'greg.jpg')
x +=1
exit()
|
import picamera as p
import os
import time
os.chdir('/home/pi/Desktop')
cam = p.PiCamera()
cam.resolution = (320,240)
cam.hflip = True
cam.vflip = True
x = 0
while x < 50:
os.unlink('gregTest.jpg')
img = cam.capture('tempGregTest.jpg')
oc.rename('gregTempTest.jpg', 'gregTest.jpg')
time.sleep(.25)
x +=1
exit()
|
Add temp file sequence to take picture file
|
Add temp file sequence to take picture file
|
Python
|
mit
|
jwarshaw/RaspberryDrive
|
9f05a8917ee6fd01a334ef2e1e57062be8ef13af
|
byceps/config_defaults.py
|
byceps/config_defaults.py
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
"""
byceps.config_defaults
~~~~~~~~~~~~~~~~~~~~~~
Default configuration values
:Copyright: 2006-2020 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import timedelta
from pathlib import Path
# database connection
SQLALCHEMY_ECHO = False
# Avoid connection errors after database becomes temporarily
# unreachable, then becomes reachable again.
SQLALCHEMY_ENGINE_OPTIONS = {'pool_pre_ping': True}
# Disable Flask-SQLAlchemy's tracking of object modifications.
SQLALCHEMY_TRACK_MODIFICATIONS = False
# job queue
JOBS_ASYNC = True
# metrics
METRICS_ENABLED = False
# RQ dashboard (for job queue)
RQ_DASHBOARD_ENABLED = False
RQ_POLL_INTERVAL = 2500
WEB_BACKGROUND = 'white'
# login sessions
PERMANENT_SESSION_LIFETIME = timedelta(14)
# localization
LOCALE = 'de_DE.UTF-8'
LOCALES_FORMS = ['de']
TIMEZONE = 'Europe/Berlin'
# static content files path
PATH_DATA = Path('./data')
# home page
ROOT_REDIRECT_TARGET = None
ROOT_REDIRECT_STATUS_CODE = 307
# shop
SHOP_ORDER_EXPORT_TIMEZONE = 'Europe/Berlin'
|
Enable DBMS pool pre-pinging to avoid connection errors
|
Enable DBMS pool pre-pinging to avoid connection errors
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
|
968b862f6e437b627776b9b8ccf6204434493101
|
tests/test_rover_instance.py
|
tests/test_rover_instance.py
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
|
from unittest import TestCase
from rover import Rover
class TestRover(TestCase):
def setUp(self):
self.rover = Rover()
def test_rover_compass(self):
assert self.rover.compass == ['N', 'E', 'S', 'W']
def test_rover_position(self):
assert self.rover.position == (self.rover.x, self.rover.y, self.rover.direction)
|
Add failing rover position reporting test
|
Add failing rover position reporting test
|
Python
|
mit
|
authentik8/rover
|
21835415f0224e08c7328151d4319ec73d67cbe1
|
station.py
|
station.py
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = eval(input("Enter the max capacity of the station: "))
#testfuntion()
self.escalators = eval(input("Enter the number of escalators in the station: "))
#testfuntion()
self.train_wait = eval(input("Enter the wait time between trains: "))
#testfuntion()
self.travelors_arriving = eval(input("How many people just exited the train? "))
#testfuntion()
self.travelors_departing = eval(input("How many people are waiting for the train? "))
#testfuntion()
|
"""Creates the station class"""
class Station:
"""
Each train station is an instance of the Station class.
Methods:
__init__: creates a new stations
total_station_pop: calculates total station population
"""
def __init__(self):
self.capacity = int(eval(input("Enter the max capacity of the station: ")))
#testfuntion()
self.escalators = int(eval(input("Enter the number of escalators in the station: ")))
#testfuntion()
self.train_wait = int(eval(input("Enter the wait time between trains in seconds: ")))
#testfuntion()
self.travelors_arriving = int(eval(input("How many people just exited the train? ")))
#testfuntion()
self.travelors_departing = int(eval(input("How many people are waiting for the train? ")))
#testfuntion()
|
Add int to input statements
|
Add int to input statements
Ref #23 #10
|
Python
|
mit
|
ForestPride/rail-problem
|
42d64b71db7a21355132d1c1573e12798e377b4c
|
incomplete/pythagoras.py
|
incomplete/pythagoras.py
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
pass
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
import sys
def gather_squares_triangles(p1,p2,depth)
""" Draw Square and Right Triangle given 2 points,
Recurse on new points
args:
p1,p2 (float,float) : absolute position on base vertices
depth (int) : decrementing counter that terminates recursion
return:
squares [(float,float,float,float)...] : absolute positions of
vertices of squares
triangles [(float,float,float)...] : absolute positions of
vertices of right triangles
"""
if depth == 0:
return [],[]
pd = (p2[0] - p1[0]),(p1[1] - p2[1])
p3 = (p2[0] - pd[1]),(p2[1] - pd[0])
p4 = (p1[0] - pd[1]),(p1[1] - pd[0])
p5 = (p4[0] + (pd[0] - pd[1])/2),(p4[1] - (pd[0] + pd[1])/2)
squares_left,triangles_left = gather_squares_triangles(p4,p5,depth-1)
squares_right,triangles_right = gather_squares_triangles(p5,p3,depth-1)
squares = [[p1,p2,p3,p4]]+squares_left+squares_right
triangles = [[p3,p4,p5]]+triangles_left+triangles_right
return squares,triangles
def task(argv):
""" Draw a Depth-7 Pytagoras Tree without the use of Trig Functions """
# Init Canvas
# Collect Vertices for squares and right triangles
# Draw Points
# Commit Canvas
return 0
if __name__ == "__main__":
sys.exit(task(sys.argv))
|
Gather Squares & Triangles Implemented
|
PythagTree: Gather Squares & Triangles Implemented
|
Python
|
mit
|
kpatel20538/Rosetta-Code-Python-Tasks
|
3609df9044fd72008234bae9145487f315096fcd
|
hcalendar/__init__.py
|
hcalendar/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
"""
python-hcalendar is a basic hCalendar parser
"""
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
python-hcalendar is a basic hCalendar parser
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
__version_info__ = {
'major': 0,
'minor': 2,
'micro': 0,
'releaselevel': 'final',
}
def get_version():
"""
Return the formatted version information
"""
vers = ["%(major)i.%(minor)i" % __version_info__, ]
if __version_info__['micro']:
vers.append(".%(micro)i" % __version_info__)
if __version_info__['releaselevel'] != 'final':
vers.append('%(releaselevel)s' % __version_info__)
return ''.join(vers)
__version__ = get_version()
try:
from .hcalendar import hCalendar
except ImportError:
pass
__all__ = ['hCalendar']
|
Fix hcalendar module __doc__ missing
|
Fix hcalendar module __doc__ missing
|
Python
|
mit
|
mback2k/python-hcalendar
|
5688ca60985db606a3d42078a017bd851c1f01f6
|
build/fbcode_builder/specs/fbthrift.py
|
build/fbcode_builder/specs/fbthrift.py
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
builder.add_option('krb5/krb5:git_hash', 'krb5-1.16.1-final')
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.github_project_workdir('krb5/krb5', 'src'),
builder.autoconf_install('krb5/krb5'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
#!/usr/bin/env python
# Copyright (c) Facebook, Inc. and its affiliates.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import specs.folly as folly
import specs.fizz as fizz
import specs.rsocket as rsocket
import specs.sodium as sodium
import specs.wangle as wangle
import specs.zstd as zstd
from shell_quoting import ShellQuoted
def fbcode_builder_spec(builder):
# This API should change rarely, so build the latest tag instead of master.
builder.add_option(
'no1msd/mstch:git_hash',
ShellQuoted('$(git describe --abbrev=0 --tags)')
)
return {
'depends_on': [folly, fizz, sodium, rsocket, wangle, zstd],
'steps': [
# This isn't a separete spec, since only fbthrift uses mstch.
builder.github_project_workdir('no1msd/mstch', 'build'),
builder.cmake_install('no1msd/mstch'),
builder.fb_github_cmake_install('fbthrift/thrift'),
],
}
|
Cut fbcode_builder dep for thrift on krb5
|
Cut fbcode_builder dep for thrift on krb5
Summary: [Thrift] Cut `fbcode_builder` dep for `thrift` on `krb5`. In the past, Thrift depended on Kerberos and the `krb5` implementation for its transport-layer security. However, Thrift has since migrated fully to Transport Layer Security for its transport-layer security and no longer has any build-time dependency on `krb5`. Clean this up.
Reviewed By: stevegury, vitaut
Differential Revision: D14814205
fbshipit-source-id: dca469d22098e34573674194facaaac6c4c6aa32
|
Python
|
apache-2.0
|
facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift,facebook/fbthrift
|
65e041bd03863563b52496c1cec81a0c9425f4ee
|
geonamescache/mappers.py
|
geonamescache/mappers.py
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(key):
if 'name' == from_key and key in mappings.country_names:
key = mappings.country_names[key]
item = dataset.get(key)
if item:
return item[to_key]
return mapper
|
# -*- coding: utf-8 -*-
from geonamescache import GeonamesCache
from . import mappings
def country(from_key='name', to_key='iso'):
"""Creates and returns a mapper function to access country data.
The mapper function that is returned must be called with one argument. In
the default case you call it with a name and it returns a 3-letter
ISO_3166-1 code, e. g. called with ``Spain`` it would return ``ESP``.
:param from_key: (optional) the country attribute you give as input.
Defaults to ``name``.
:param to_key: (optional) the country attribute you want as output.
Defaults to ``iso``.
:return: mapper
:rtype: function
"""
gc = GeonamesCache()
dataset = gc.get_dataset_by_key(gc.get_countries(), from_key)
def mapper(input):
# For country name inputs take the names mapping into account.
if 'name' == from_key:
input = mappings.country_names.get(input, input)
# If there is a record return the demanded attribute.
item = dataset.get(input)
if item:
return item[to_key]
return mapper
|
Add documentation for country mapper
|
Add documentation for country mapper
|
Python
|
mit
|
yaph/geonamescache,yaph/geonamescache
|
2814f5b2bbd2c53c165f13009eb85cb2c5030b57
|
chicago/search_indexes.py
|
chicago/search_indexes.py
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if obj.last_action_date:
now = app_timezone.localize(datetime.now())
# obj.last_action_date can be in the future
weeks_passed = (now - obj.last_action_date).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
from datetime import datetime
from councilmatic_core.haystack_indexes import BillIndex
from django.conf import settings
from haystack import indexes
import pytz
from chicago.models import ChicagoBill
app_timezone = pytz.timezone(settings.TIME_ZONE)
class ChicagoBillIndex(BillIndex, indexes.Indexable):
topics = indexes.MultiValueField(faceted=True)
def get_model(self):
return ChicagoBill
def prepare(self, obj):
data = super(ChicagoBillIndex, self).prepare(obj)
boost = 0
if data['last_action_date']:
today = app_timezone.localize(datetime.now()).date()
# data['last_action_date'] can be in the future
weeks_passed = (today - data['last_action_date']).days / 7 + 1
boost = 1 + 1.0 / max(weeks_passed, 1)
data['boost'] = boost
return data
def prepare_topics(self, obj):
return obj.topics
def prepare_last_action_date(self, obj):
if not obj.last_action_date:
action_dates = [a.date for a in obj.actions.all()]
if action_dates:
last_action_date = max(action_dates)
return datetime.strptime(last_action_date, '%Y-%m-%d').date()
return obj.last_action_date.date()
|
Use prepared data, rather than the object last action date, to determine boost
|
Use prepared data, rather than the object last action date, to determine boost
|
Python
|
mit
|
datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic,datamade/chi-councilmatic
|
91a551c0bc29d09cd2f034741c1291bfad7346db
|
tensorflow/tools/docker/jupyter_notebook_config.py
|
tensorflow/tools/docker/jupyter_notebook_config.py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
import os
from IPython.lib import passwd
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import os
from IPython.lib import passwd
c.NotebookApp.ip = '*'
c.NotebookApp.port = 8888
c.NotebookApp.open_browser = False
c.MultiKernelManager.default_kernel_name = 'python2'
# sets a password if PASSWORD is set in the environment
if 'PASSWORD' in os.environ:
c.NotebookApp.password = passwd(os.environ['PASSWORD'])
del os.environ['PASSWORD']
|
Move imports to beginning of code
|
Move imports to beginning of code
|
Python
|
apache-2.0
|
AnishShah/tensorflow,dhalleine/tensorflow,LUTAN/tensorflow,chemelnucfin/tensorflow,brchiu/tensorflow,jalexvig/tensorflow,Intel-tensorflow/tensorflow,benoitsteiner/tensorflow-xsmm,alistairlow/tensorflow,ageron/tensorflow,sandeepdsouza93/TensorFlow-15712,RapidApplicationDevelopment/tensorflow,ravindrapanda/tensorflow,rdipietro/tensorflow,rdipietro/tensorflow,guschmue/tensorflow,xzturn/tensorflow,Carmezim/tensorflow,tomasreimers/tensorflow-emscripten,jhaux/tensorflow,paolodedios/tensorflow,alheinecke/tensorflow-xsmm,aselle/tensorflow,handroissuazo/tensorflow,DavidNorman/tensorflow,markslwong/tensorflow,chris-chris/tensorflow,pcm17/tensorflow,HaebinShin/tensorflow,odejesush/tensorflow,chenjun0210/tensorflow,lukeiwanski/tensorflow,zycdragonball/tensorflow,davidzchen/tensorflow,pavelchristof/gomoku-ai,drpngx/tensorflow,rdipietro/tensorflow,LUTAN/tensorflow,code-sauce/tensorflow,adit-chandra/tensorflow,markslwong/tensorflow,HKUST-SING/tensorflow,renyi533/tensorflow,rabipanda/tensorflow,horance-liu/tensorflow,gojira/tensorflow,ghchinoy/tensorflow,bowang/tensorflow,nikste/tensorflow,DCSaunders/tensorflow,yongtang/tensorflow,Mazecreator/tensorflow,davidzchen/tensorflow,johndpope/tensorflow,seanli9jan/tensorflow,kobejean/tensorflow,benoitsteiner/tensorflow-opencl,chemelnucfin/tensorflow,TakayukiSakai/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jart/tensorflow,zycdragonball/tensorflow,apark263/tensorflow,Carmezim/tensorflow,TakayukiSakai/tensorflow,tomasreimers/tensorflow-emscripten,paolodedios/tensorflow,pcm17/tensorflow,code-sauce/tensorflow,manazhao/tf_recsys,theflofly/tensorflow,gunan/tensorflow,manipopopo/tensorflow,Mazecreator/tensorflow,with-git/tensorflow,admcrae/tensorflow,admcrae/tensorflow,JVillella/tensorflow,gunan/tensorflow,markslwong/tensorflow,tiagofrepereira2012/tensorflow,maciekcc/tensorflow,ivano666/tensorflow,yongtang/tensorflow,tornadozou/tensorflow,davidzchen/tensorflow,nikste/tensorflow,alsrgv/tensorflow,mortada/tensorflow,Intel-Corporation/tensorflow,JingJunYin/tensorflow,ishay2b/tensorflow,karllessard/tensorflow,alisidd/tensorflow,drpngx/tensorflow,snnn/tensorflow,tntnatbry/tensorflow,JVillella/tensorflow,sandeepgupta2k4/tensorflow,ibmsoe/tensorflow,alisidd/tensorflow,cancan101/tensorflow,mengxn/tensorflow,ppries/tensorflow,dendisuhubdy/tensorflow,Kongsea/tensorflow,mdrumond/tensorflow,RapidApplicationDevelopment/tensorflow,dongjoon-hyun/tensorflow,Mazecreator/tensorflow,alsrgv/tensorflow,tornadozou/tensorflow,gautam1858/tensorflow,scenarios/tensorflow,mortada/tensorflow,meteorcloudy/tensorflow,EvenStrangest/tensorflow,frreiss/tensorflow-fred,vrv/tensorflow,benoitsteiner/tensorflow-xsmm,ivano666/tensorflow,girving/tensorflow,RapidApplicationDevelopment/tensorflow,unsiloai/syntaxnet-ops-hack,strint/tensorflow,tillahoffmann/tensorflow,sandeepgupta2k4/tensorflow,raymondxyang/tensorflow,abhitopia/tensorflow,Moriadry/tensorflow,hsaputra/tensorflow,Kongsea/tensorflow,brchiu/tensorflow,anand-c-goog/tensorflow,markslwong/tensorflow,caisq/tensorflow,manazhao/tf_recsys,maciekcc/tensorflow,caisq/tensorflow,arborh/tensorflow,krikru/tensorflow-opencl,laszlocsomor/tensorflow,handroissuazo/tensorflow,strint/tensorflow,sandeepgupta2k4/tensorflow,manipopopo/tensorflow,laosiaudi/tensorflow,vrv/tensorflow,ppwwyyxx/tensorflow,asadziach/tensorflow,Mazecreator/tensorflow,AnishShah/tensorflow,rabipanda/tensorflow,zasdfgbnm/tensorflow,memo/tensorflow,sandeepgupta2k4/tensorflow,asimshankar/tensorflow,kchodorow/tensorflow,neilhan/tensorflow,Intel-tensorflow/tensorflow,raymondxyang/tensorflow,neilhan/tensorflow,hfp/tensorflow-xsmm,dhalleine/tensorflow,ibmsoe/tensorflow,nolanliou/tensorflow,asadziach/tensorflow,suiyuan2009/tensorflow,karllessard/tensorflow,gunan/tensorflow,strint/tensorflow,eerwitt/tensorflow,naturali/tensorflow,yufengg/tensorflow,gojira/tensorflow,llhe/tensorflow,Xeralux/tensorflow,SnakeJenny/TensorFlow,neilhan/tensorflow,kevin-coder/tensorflow-fork,thesuperzapper/tensorflow,HaebinShin/tensorflow,MoamerEncsConcordiaCa/tensorflow,aldian/tensorflow,jart/tensorflow,caisq/tensorflow,naturali/tensorflow,naturali/tensorflow,dongjoon-hyun/tensorflow,memo/tensorflow,Mazecreator/tensorflow,ageron/tensorflow,ran5515/DeepDecision,Bulochkin/tensorflow_pack,tongwang01/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,admcrae/tensorflow,ravindrapanda/tensorflow,ppwwyyxx/tensorflow,EvenStrangest/tensorflow,snnn/tensorflow,strint/tensorflow,ravindrapanda/tensorflow,Xeralux/tensorflow,calebfoss/tensorflow,mixturemodel-flow/tensorflow,jalexvig/tensorflow,sandeepgupta2k4/tensorflow,tornadozou/tensorflow,allenlavoie/tensorflow,benoitsteiner/tensorflow-xsmm,nikste/tensorflow,manjunaths/tensorflow,chenjun0210/tensorflow,JVillella/tensorflow,tornadozou/tensorflow,ibab/tensorflow,dendisuhubdy/tensorflow,annarev/tensorflow,alsrgv/tensorflow,eaplatanios/tensorflow,HKUST-SING/tensorflow,jhaux/tensorflow,AndreasMadsen/tensorflow,jart/tensorflow,MoamerEncsConcordiaCa/tensorflow,aam-at/tensorflow,zasdfgbnm/tensorflow,anilmuthineni/tensorflow,odejesush/tensorflow,aam-at/tensorflow,MycChiu/tensorflow,tornadozou/tensorflow,gibiansky/tensorflow,raymondxyang/tensorflow,guschmue/tensorflow,benoitsteiner/tensorflow,thjashin/tensorflow,gautam1858/tensorflow,mengxn/tensorflow,DCSaunders/tensorflow,mrry/tensorflow,laszlocsomor/tensorflow,meteorcloudy/tensorflow,hsaputra/tensorflow,Bismarrck/tensorflow,girving/tensorflow,llhe/tensorflow,Moriadry/tensorflow,yongtang/tensorflow,lukeiwanski/tensorflow-opencl,abhitopia/tensorflow,rabipanda/tensorflow,paolodedios/tensorflow,seaotterman/tensorflow,davidzchen/tensorflow,petewarden/tensorflow_makefile,dancingdan/tensorflow,Moriadry/tensorflow,with-git/tensorflow,cg31/tensorflow,cg31/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,jwlawson/tensorflow,eaplatanios/tensorflow,thjashin/tensorflow,xzturn/tensorflow,Intel-tensorflow/tensorflow,ArtsiomCh/tensorflow,XueqingLin/tensorflow,jeffzheng1/tensorflow,annarev/tensorflow,elingg/tensorflow,alivecor/tensorflow,nightjean/Deep-Learning,freedomtan/tensorflow,davidzchen/tensorflow,DCSaunders/tensorflow,HKUST-SING/tensorflow,johndpope/tensorflow,annarev/tensorflow,RapidApplicationDevelopment/tensorflow,anilmuthineni/tensorflow,Mistobaan/tensorflow,eerwitt/tensorflow,thesuperzapper/tensorflow,MycChiu/tensorflow,wangyum/tensorflow,alshedivat/tensorflow,ageron/tensorflow,jhaux/tensorflow,xzturn/tensorflow,scenarios/tensorflow,scenarios/tensorflow,xzturn/tensorflow,av8ramit/tensorflow,hehongliang/tensorflow,ninotoshi/tensorflow,guschmue/tensorflow,benoitsteiner/tensorflow,LUTAN/tensorflow,pcm17/tensorflow,krikru/tensorflow-opencl,tongwang01/tensorflow,cxxgtxy/tensorflow,manipopopo/tensorflow,gibiansky/tensorflow,jhseu/tensorflow,jwlawson/tensorflow,snnn/tensorflow,Xeralux/tensorflow,tongwang01/tensorflow,code-sauce/tensorflow,zasdfgbnm/tensorflow,nburn42/tensorflow,Mistobaan/tensorflow,TakayukiSakai/tensorflow,JingJunYin/tensorflow,dyoung418/tensorflow,alsrgv/tensorflow,xodus7/tensorflow,zasdfgbnm/tensorflow,nanditav/15712-TensorFlow,pierreg/tensorflow,dancingdan/tensorflow,zasdfgbnm/tensorflow,wangyum/tensorflow,juharris/tensorflow,ArtsiomCh/tensorflow,bowang/tensorflow,asimshankar/tensorflow,alsrgv/tensorflow,taknevski/tensorflow-xsmm,memo/tensorflow,freedomtan/tensorflow,eadgarchen/tensorflow,nolanliou/tensorflow,eaplatanios/tensorflow,dendisuhubdy/tensorflow,mortada/tensorflow,Mazecreator/tensorflow,asimshankar/tensorflow,markslwong/tensorflow,gautam1858/tensorflow,laosiaudi/tensorflow,kamcpp/tensorflow,XueqingLin/tensorflow,jostep/tensorflow,mrry/tensorflow,tntnatbry/tensorflow,ppries/tensorflow,xzturn/tensorflow,ville-k/tensorflow,yongtang/tensorflow,RapidApplicationDevelopment/tensorflow,davidzchen/tensorflow,chenjun0210/tensorflow,aselle/tensorflow,neilhan/tensorflow,jhaux/tensorflow,allenlavoie/tensorflow,dhalleine/tensorflow,XueqingLin/tensorflow,ivano666/tensorflow,johndpope/tensorflow,whn09/tensorflow,tillahoffmann/tensorflow,alsrgv/tensorflow,chris-chris/tensorflow,alshedivat/tensorflow,freedomtan/tensorflow,eerwitt/tensorflow,lakshayg/tensorflow,mixturemodel-flow/tensorflow,renyi533/tensorflow,nolanliou/tensorflow,snnn/tensorflow,annarev/tensorflow,ArtsiomCh/tensorflow,a-doumoulakis/tensorflow,Xeralux/tensorflow,drpngx/tensorflow,thjashin/tensorflow,MostafaGazar/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,maciekcc/tensorflow,thjashin/tensorflow,mavenlin/tensorflow,caisq/tensorflow,ArtsiomCh/tensorflow,meteorcloudy/tensorflow,Intel-Corporation/tensorflow,gojira/tensorflow,odejesush/tensorflow,lukeiwanski/tensorflow-opencl,kobejean/tensorflow,pavelchristof/gomoku-ai,alshedivat/tensorflow,gnieboer/tensorflow,yanchen036/tensorflow,vrv/tensorflow,ibab/tensorflow,ibmsoe/tensorflow,andrewcmyers/tensorflow,juharris/tensorflow,maciekcc/tensorflow,yaroslavvb/tensorflow,SnakeJenny/TensorFlow,ghchinoy/tensorflow,dhalleine/tensorflow,tongwang01/tensorflow,chenjun0210/tensorflow,aam-at/tensorflow,benoitsteiner/tensorflow,sjperkins/tensorflow,Moriadry/tensorflow,martinwicke/tensorflow,yanchen036/tensorflow,xodus7/tensorflow,SnakeJenny/TensorFlow,bowang/tensorflow,jostep/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,kamcpp/tensorflow,Kongsea/tensorflow,girving/tensorflow,tiagofrepereira2012/tensorflow,gibiansky/tensorflow,manjunaths/tensorflow,jhseu/tensorflow,benoitsteiner/tensorflow-opencl,MostafaGazar/tensorflow,rabipanda/tensorflow,lakshayg/tensorflow,meteorcloudy/tensorflow,LUTAN/tensorflow,cancan101/tensorflow,bowang/tensorflow,mdrumond/tensorflow,tornadozou/tensorflow,thjashin/tensorflow,lukeiwanski/tensorflow,mrry/tensorflow,handroissuazo/tensorflow,zasdfgbnm/tensorflow,jalexvig/tensorflow,pavelchristof/gomoku-ai,Mistobaan/tensorflow,Bismarrck/tensorflow,ibmsoe/tensorflow,tntnatbry/tensorflow,petewarden/tensorflow,manipopopo/tensorflow,johndpope/tensorflow,arborh/tensorflow,ville-k/tensorflow,ville-k/tensorflow,rdipietro/tensorflow,ppwwyyxx/tensorflow,ychfan/tensorflow,hfp/tensorflow-xsmm,pcm17/tensorflow,DavidNorman/tensorflow,seaotterman/tensorflow,code-sauce/tensorflow,jbedorf/tensorflow,lukeiwanski/tensorflow-opencl,tomasreimers/tensorflow-emscripten,lakshayg/tensorflow,aldian/tensorflow,frreiss/tensorflow-fred,Bulochkin/tensorflow_pack,karllessard/tensorflow,horance-liu/tensorflow,frreiss/tensorflow-fred,RapidApplicationDevelopment/tensorflow,SnakeJenny/TensorFlow,markslwong/tensorflow,sarvex/tensorflow,aldian/tensorflow,adamtiger/tensorflow,manazhao/tf_recsys,yaroslavvb/tensorflow,eaplatanios/tensorflow,naturali/tensorflow,Kongsea/tensorflow,ppwwyyxx/tensorflow,ishay2b/tensorflow,yongtang/tensorflow,chenjun0210/tensorflow,gautam1858/tensorflow,pierreg/tensorflow,asimshankar/tensorflow,hsaputra/tensorflow,cxxgtxy/tensorflow,dancingdan/tensorflow,DCSaunders/tensorflow,jwlawson/tensorflow,frreiss/tensorflow-fred,gunan/tensorflow,gnieboer/tensorflow,suiyuan2009/tensorflow,andrewcmyers/tensorflow,abhitopia/tensorflow,alistairlow/tensorflow,manipopopo/tensorflow,raymondxyang/tensorflow,AnishShah/tensorflow,anilmuthineni/tensorflow,mengxn/tensorflow,snnn/tensorflow,av8ramit/tensorflow,Intel-tensorflow/tensorflow,mortada/tensorflow,arborh/tensorflow,jhseu/tensorflow,ninotoshi/tensorflow,HaebinShin/tensorflow,DavidNorman/tensorflow,laszlocsomor/tensorflow,nburn42/tensorflow,alistairlow/tensorflow,anilmuthineni/tensorflow,Intel-tensorflow/tensorflow,LUTAN/tensorflow,dongjoon-hyun/tensorflow,yaroslavvb/tensorflow,aselle/tensorflow,jart/tensorflow,mixturemodel-flow/tensorflow,mdrumond/tensorflow,renyi533/tensorflow,chris-chris/tensorflow,apark263/tensorflow,johndpope/tensorflow,cancan101/tensorflow,Bulochkin/tensorflow_pack,tensorflow/tensorflow,benoitsteiner/tensorflow,asadziach/tensorflow,Mazecreator/tensorflow,dendisuhubdy/tensorflow,rabipanda/tensorflow,Bulochkin/tensorflow_pack,ghchinoy/tensorflow,laosiaudi/tensorflow,eerwitt/tensorflow,alivecor/tensorflow,jendap/tensorflow,manjunaths/tensorflow,meteorcloudy/tensorflow,lukeiwanski/tensorflow-opencl,raymondxyang/tensorflow,allenlavoie/tensorflow,tongwang01/tensorflow,nolanliou/tensorflow,mavenlin/tensorflow,kchodorow/tensorflow,Intel-tensorflow/tensorflow,pierreg/tensorflow,wangyum/tensorflow,anand-c-goog/tensorflow,Mazecreator/tensorflow,mixturemodel-flow/tensorflow,ZhangXinNan/tensorflow,ZhangXinNan/tensorflow,sarvex/tensorflow,Carmezim/tensorflow,JingJunYin/tensorflow,ibmsoe/tensorflow,Bulochkin/tensorflow_pack,jalexvig/tensorflow,Mistobaan/tensorflow,anilmuthineni/tensorflow,EvenStrangest/tensorflow,sjperkins/tensorflow,paolodedios/tensorflow,admcrae/tensorflow,manipopopo/tensorflow,jart/tensorflow,sarvex/tensorflow,gunan/tensorflow,eerwitt/tensorflow,anand-c-goog/tensorflow,tntnatbry/tensorflow,nburn42/tensorflow,lukeiwanski/tensorflow,tomasreimers/tensorflow-emscripten,petewarden/tensorflow,whn09/tensorflow,gautam1858/tensorflow,Intel-tensorflow/tensorflow,chris-chris/tensorflow,bowang/tensorflow,nolanliou/tensorflow,theflofly/tensorflow,jendap/tensorflow,jalexvig/tensorflow,renyi533/tensorflow,calebfoss/tensorflow,alheinecke/tensorflow-xsmm,renyi533/tensorflow,martinwicke/tensorflow,ibab/tensorflow,yaroslavvb/tensorflow,manazhao/tf_recsys,mdrumond/tensorflow,nanditav/15712-TensorFlow,ibab/tensorflow,handroissuazo/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,mortada/tensorflow,dyoung418/tensorflow,freedomtan/tensorflow,chemelnucfin/tensorflow,mdrumond/tensorflow,drpngx/tensorflow,MycChiu/tensorflow,benoitsteiner/tensorflow-opencl,annarev/tensorflow,LUTAN/tensorflow,rdipietro/tensorflow,codrut3/tensorflow,jeffzheng1/tensorflow,XueqingLin/tensorflow,DavidNorman/tensorflow,Kongsea/tensorflow,av8ramit/tensorflow,tomasreimers/tensorflow-emscripten,memo/tensorflow,tensorflow/tensorflow,gojira/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,ninotoshi/tensorflow,taknevski/tensorflow-xsmm,Bulochkin/tensorflow_pack,dongjoon-hyun/tensorflow,Intel-Corporation/tensorflow,kamcpp/tensorflow,ageron/tensorflow,xodus7/tensorflow,andrewcmyers/tensorflow,odejesush/tensorflow,tillahoffmann/tensorflow,xodus7/tensorflow,petewarden/tensorflow_makefile,cg31/tensorflow,mengxn/tensorflow,hsaputra/tensorflow,MostafaGazar/tensorflow,odejesush/tensorflow,Bismarrck/tensorflow,codrut3/tensorflow,tornadozou/tensorflow,alheinecke/tensorflow-xsmm,zasdfgbnm/tensorflow,jwlawson/tensorflow,kobejean/tensorflow,snnn/tensorflow,anand-c-goog/tensorflow,whn09/tensorflow,aselle/tensorflow,with-git/tensorflow,mortada/tensorflow,girving/tensorflow,Carmezim/tensorflow,aldian/tensorflow,jart/tensorflow,raymondxyang/tensorflow,lukeiwanski/tensorflow,a-doumoulakis/tensorflow,alshedivat/tensorflow,mavenlin/tensorflow,neilhan/tensorflow,Bismarrck/tensorflow,ageron/tensorflow,handroissuazo/tensorflow,meteorcloudy/tensorflow,apark263/tensorflow,girving/tensorflow,paolodedios/tensorflow,sandeepdsouza93/TensorFlow-15712,JingJunYin/tensorflow,petewarden/tensorflow,tiagofrepereira2012/tensorflow,drpngx/tensorflow,Carmezim/tensorflow,jeffzheng1/tensorflow,TakayukiSakai/tensorflow,tensorflow/tensorflow,nburn42/tensorflow,theflofly/tensorflow,alisidd/tensorflow,sjperkins/tensorflow,gnieboer/tensorflow,av8ramit/tensorflow,mdrumond/tensorflow,karllessard/tensorflow,kchodorow/tensorflow,scenarios/tensorflow,aldian/tensorflow,dyoung418/tensorflow,strint/tensorflow,unsiloai/syntaxnet-ops-hack,AnishShah/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ZhangXinNan/tensorflow,anand-c-goog/tensorflow,jbedorf/tensorflow,jostep/tensorflow,ppwwyyxx/tensorflow,adamtiger/tensorflow,chris-chris/tensorflow,petewarden/tensorflow,jendap/tensorflow,handroissuazo/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,unsiloai/syntaxnet-ops-hack,caisq/tensorflow,asimshankar/tensorflow,xodus7/tensorflow,juharris/tensorflow,Kongsea/tensorflow,bowang/tensorflow,alisidd/tensorflow,asimshankar/tensorflow,ishay2b/tensorflow,gunan/tensorflow,DCSaunders/tensorflow,ran5515/DeepDecision,abhitopia/tensorflow,jeffzheng1/tensorflow,handroissuazo/tensorflow,yufengg/tensorflow,krikru/tensorflow-opencl,ZhangXinNan/tensorflow,snnn/tensorflow,jendap/tensorflow,whn09/tensorflow,tntnatbry/tensorflow,ninotoshi/tensorflow,cg31/tensorflow,EvenStrangest/tensorflow,jhseu/tensorflow,XueqingLin/tensorflow,allenlavoie/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,ishay2b/tensorflow,laszlocsomor/tensorflow,nburn42/tensorflow,jart/tensorflow,theflofly/tensorflow,dancingdan/tensorflow,cxxgtxy/tensorflow,haeusser/tensorflow,tensorflow/tensorflow,mixturemodel-flow/tensorflow,meteorcloudy/tensorflow,jart/tensorflow,meteorcloudy/tensorflow,tillahoffmann/tensorflow,jhaux/tensorflow,Carmezim/tensorflow,tiagofrepereira2012/tensorflow,frreiss/tensorflow-fred,kobejean/tensorflow,lukeiwanski/tensorflow,petewarden/tensorflow,alshedivat/tensorflow,tiagofrepereira2012/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jendap/tensorflow,tillahoffmann/tensorflow,a-doumoulakis/tensorflow,xodus7/tensorflow,gautam1858/tensorflow,alsrgv/tensorflow,dancingdan/tensorflow,apark263/tensorflow,sandeepdsouza93/TensorFlow-15712,zycdragonball/tensorflow,Bulochkin/tensorflow_pack,unsiloai/syntaxnet-ops-hack,chris-chris/tensorflow,renyi533/tensorflow,mortada/tensorflow,jhseu/tensorflow,ville-k/tensorflow,taknevski/tensorflow-xsmm,tensorflow/tensorflow-experimental_link_static_libraries_once,asimshankar/tensorflow,krikru/tensorflow-opencl,chenjun0210/tensorflow,Carmezim/tensorflow,yaroslavvb/tensorflow,benoitsteiner/tensorflow-xsmm,alivecor/tensorflow,alisidd/tensorflow,theflofly/tensorflow,tiagofrepereira2012/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,adamtiger/tensorflow,dancingdan/tensorflow,MoamerEncsConcordiaCa/tensorflow,kamcpp/tensorflow,pavelchristof/gomoku-ai,anand-c-goog/tensorflow,jostep/tensorflow,tensorflow/tensorflow-pywrap_saved_model,kevin-coder/tensorflow-fork,yanchen036/tensorflow,chemelnucfin/tensorflow,nanditav/15712-TensorFlow,Xeralux/tensorflow,aldian/tensorflow,seanli9jan/tensorflow,sjperkins/tensorflow,XueqingLin/tensorflow,adit-chandra/tensorflow,renyi533/tensorflow,TakayukiSakai/tensorflow,MostafaGazar/tensorflow,freedomtan/tensorflow,xzturn/tensorflow,mavenlin/tensorflow,nightjean/Deep-Learning,hehongliang/tensorflow,haeusser/tensorflow,alshedivat/tensorflow,SnakeJenny/TensorFlow,Bismarrck/tensorflow,sandeepdsouza93/TensorFlow-15712,adit-chandra/tensorflow,petewarden/tensorflow_makefile,codrut3/tensorflow,llhe/tensorflow,MoamerEncsConcordiaCa/tensorflow,seaotterman/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,eadgarchen/tensorflow,allenlavoie/tensorflow,a-doumoulakis/tensorflow,cxxgtxy/tensorflow,ville-k/tensorflow,annarev/tensorflow,anilmuthineni/tensorflow,pcm17/tensorflow,raymondxyang/tensorflow,haeusser/tensorflow,Bulochkin/tensorflow_pack,jalexvig/tensorflow,alheinecke/tensorflow-xsmm,Intel-Corporation/tensorflow,kamcpp/tensorflow,aselle/tensorflow,benoitsteiner/tensorflow-xsmm,AndreasMadsen/tensorflow,haeusser/tensorflow,kobejean/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,AndreasMadsen/tensorflow,chemelnucfin/tensorflow,wangyum/tensorflow,nburn42/tensorflow,nburn42/tensorflow,tensorflow/tensorflow-pywrap_saved_model,code-sauce/tensorflow,alshedivat/tensorflow,jbedorf/tensorflow,taknevski/tensorflow-xsmm,brchiu/tensorflow,memo/tensorflow,asadziach/tensorflow,manazhao/tf_recsys,Xeralux/tensorflow,taknevski/tensorflow-xsmm,laosiaudi/tensorflow,AndreasMadsen/tensorflow,codrut3/tensorflow,kobejean/tensorflow,rabipanda/tensorflow,mrry/tensorflow,tntnatbry/tensorflow,sandeepgupta2k4/tensorflow,gautam1858/tensorflow,eaplatanios/tensorflow,tomasreimers/tensorflow-emscripten,AndreasMadsen/tensorflow,petewarden/tensorflow,brchiu/tensorflow,MycChiu/tensorflow,aldian/tensorflow,gunan/tensorflow,horance-liu/tensorflow,kamcpp/tensorflow,jhseu/tensorflow,asimshankar/tensorflow,vrv/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,johndpope/tensorflow,dancingdan/tensorflow,gnieboer/tensorflow,chenjun0210/tensorflow,kchodorow/tensorflow,taknevski/tensorflow-xsmm,guschmue/tensorflow,raymondxyang/tensorflow,Mistobaan/tensorflow,aam-at/tensorflow,ppwwyyxx/tensorflow,benoitsteiner/tensorflow,scenarios/tensorflow,tntnatbry/tensorflow,hfp/tensorflow-xsmm,manipopopo/tensorflow,anand-c-goog/tensorflow,adit-chandra/tensorflow,alistairlow/tensorflow,sandeepgupta2k4/tensorflow,asadziach/tensorflow,MostafaGazar/tensorflow,petewarden/tensorflow,frreiss/tensorflow-fred,freedomtan/tensorflow,krikru/tensorflow-opencl,alshedivat/tensorflow,MostafaGazar/tensorflow,hehongliang/tensorflow,llhe/tensorflow,pierreg/tensorflow,eaplatanios/tensorflow,dyoung418/tensorflow,alshedivat/tensorflow,hsaputra/tensorflow,thesuperzapper/tensorflow,aam-at/tensorflow,Bismarrck/tensorflow,tillahoffmann/tensorflow,rdipietro/tensorflow,nanditav/15712-TensorFlow,aam-at/tensorflow,ychfan/tensorflow,benoitsteiner/tensorflow-opencl,ppries/tensorflow,annarev/tensorflow,nanditav/15712-TensorFlow,brchiu/tensorflow,paolodedios/tensorflow,hsaputra/tensorflow,ravindrapanda/tensorflow,ZhangXinNan/tensorflow,dongjoon-hyun/tensorflow,pierreg/tensorflow,calebfoss/tensorflow,girving/tensorflow,AnishShah/tensorflow,davidzchen/tensorflow,jbedorf/tensorflow,dhalleine/tensorflow,seanli9jan/tensorflow,theflofly/tensorflow,ZhangXinNan/tensorflow,pcm17/tensorflow,JingJunYin/tensorflow,ArtsiomCh/tensorflow,Mistobaan/tensorflow,jalexvig/tensorflow,laszlocsomor/tensorflow,mrry/tensorflow,llhe/tensorflow,guschmue/tensorflow,yaroslavvb/tensorflow,with-git/tensorflow,ychfan/tensorflow,nightjean/Deep-Learning,thjashin/tensorflow,adit-chandra/tensorflow,drpngx/tensorflow,andrewcmyers/tensorflow,aam-at/tensorflow,thesuperzapper/tensorflow,nburn42/tensorflow,odejesush/tensorflow,mengxn/tensorflow,ppries/tensorflow,HKUST-SING/tensorflow,lakshayg/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,hfp/tensorflow-xsmm,zasdfgbnm/tensorflow,allenlavoie/tensorflow,sarvex/tensorflow,codrut3/tensorflow,RapidApplicationDevelopment/tensorflow,laosiaudi/tensorflow,hsaputra/tensorflow,adit-chandra/tensorflow,cancan101/tensorflow,freedomtan/tensorflow,jart/tensorflow,alheinecke/tensorflow-xsmm,snnn/tensorflow,ageron/tensorflow,jwlawson/tensorflow,nikste/tensorflow,cancan101/tensorflow,benoitsteiner/tensorflow-xsmm,abhitopia/tensorflow,martinwicke/tensorflow,tntnatbry/tensorflow,abhitopia/tensorflow,eerwitt/tensorflow,ychfan/tensorflow,mengxn/tensorflow,MoamerEncsConcordiaCa/tensorflow,rabipanda/tensorflow,benoitsteiner/tensorflow-xsmm,naturali/tensorflow,davidzchen/tensorflow,laszlocsomor/tensorflow,benoitsteiner/tensorflow-xsmm,ychfan/tensorflow,manjunaths/tensorflow,allenlavoie/tensorflow,petewarden/tensorflow_makefile,lukeiwanski/tensorflow,admcrae/tensorflow,alheinecke/tensorflow-xsmm,RapidApplicationDevelopment/tensorflow,hehongliang/tensorflow,TakayukiSakai/tensorflow,dyoung418/tensorflow,hfp/tensorflow-xsmm,ZhangXinNan/tensorflow,JingJunYin/tensorflow,renyi533/tensorflow,asadziach/tensorflow,HaebinShin/tensorflow,code-sauce/tensorflow,guschmue/tensorflow,horance-liu/tensorflow,zycdragonball/tensorflow,Xeralux/tensorflow,AnishShah/tensorflow,suiyuan2009/tensorflow,eadgarchen/tensorflow,yanchen036/tensorflow,ghchinoy/tensorflow,krikru/tensorflow-opencl,petewarden/tensorflow,mdrumond/tensorflow,mortada/tensorflow,EvenStrangest/tensorflow,alsrgv/tensorflow,kchodorow/tensorflow,XueqingLin/tensorflow,jendap/tensorflow,lukeiwanski/tensorflow,seaotterman/tensorflow,annarev/tensorflow,DCSaunders/tensorflow,dongjoon-hyun/tensorflow,eadgarchen/tensorflow,thesuperzapper/tensorflow,tensorflow/tensorflow-pywrap_saved_model,jendap/tensorflow,DavidNorman/tensorflow,whn09/tensorflow,EvenStrangest/tensorflow,elingg/tensorflow,markslwong/tensorflow,tensorflow/tensorflow,martinwicke/tensorflow,llhe/tensorflow,laosiaudi/tensorflow,martinwicke/tensorflow,freedomtan/tensorflow,elingg/tensorflow,ishay2b/tensorflow,naturali/tensorflow,cancan101/tensorflow,johndpope/tensorflow,MoamerEncsConcordiaCa/tensorflow,Bismarrck/tensorflow,chris-chris/tensorflow,sarvex/tensorflow,ppries/tensorflow,chemelnucfin/tensorflow,Intel-Corporation/tensorflow,ageron/tensorflow,benoitsteiner/tensorflow-opencl,asadziach/tensorflow,jwlawson/tensorflow,cg31/tensorflow,adit-chandra/tensorflow,jeffzheng1/tensorflow,dyoung418/tensorflow,alheinecke/tensorflow-xsmm,whn09/tensorflow,AndreasMadsen/tensorflow,aselle/tensorflow,whn09/tensorflow,juharris/tensorflow,nikste/tensorflow,Carmezim/tensorflow,nburn42/tensorflow,odejesush/tensorflow,mrry/tensorflow,girving/tensorflow,JingJunYin/tensorflow,codrut3/tensorflow,EvenStrangest/tensorflow,Bismarrck/tensorflow,apark263/tensorflow,caisq/tensorflow,Bismarrck/tensorflow,paolodedios/tensorflow,caisq/tensorflow,with-git/tensorflow,eerwitt/tensorflow,Xeralux/tensorflow,jbedorf/tensorflow,karllessard/tensorflow,theflofly/tensorflow,gojira/tensorflow,ivano666/tensorflow,mdrumond/tensorflow,adit-chandra/tensorflow,juharris/tensorflow,jbedorf/tensorflow,taknevski/tensorflow-xsmm,alisidd/tensorflow,pierreg/tensorflow,kobejean/tensorflow,gibiansky/tensorflow,HKUST-SING/tensorflow,sarvex/tensorflow,nolanliou/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,alisidd/tensorflow,neilhan/tensorflow,nightjean/Deep-Learning,manazhao/tf_recsys,tensorflow/tensorflow-pywrap_saved_model,tiagofrepereira2012/tensorflow,pcm17/tensorflow,asadziach/tensorflow,benoitsteiner/tensorflow,arborh/tensorflow,sarvex/tensorflow,benoitsteiner/tensorflow,memo/tensorflow,AnishShah/tensorflow,jwlawson/tensorflow,jalexvig/tensorflow,martinwicke/tensorflow,tensorflow/tensorflow-pywrap_saved_model,pavelchristof/gomoku-ai,code-sauce/tensorflow,jostep/tensorflow,davidzchen/tensorflow,krikru/tensorflow-opencl,yongtang/tensorflow,manjunaths/tensorflow,av8ramit/tensorflow,gibiansky/tensorflow,ravindrapanda/tensorflow,codrut3/tensorflow,maciekcc/tensorflow,pavelchristof/gomoku-ai,anilmuthineni/tensorflow,hsaputra/tensorflow,ghchinoy/tensorflow,gojira/tensorflow,sandeepgupta2k4/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,apark263/tensorflow,laszlocsomor/tensorflow,ppries/tensorflow,abhitopia/tensorflow,zasdfgbnm/tensorflow,tensorflow/tensorflow-pywrap_saved_model,manipopopo/tensorflow,benoitsteiner/tensorflow-xsmm,alistairlow/tensorflow,Carmezim/tensorflow,rabipanda/tensorflow,sjperkins/tensorflow,eaplatanios/tensorflow,ran5515/DeepDecision,arborh/tensorflow,petewarden/tensorflow,guschmue/tensorflow,codrut3/tensorflow,ageron/tensorflow,alheinecke/tensorflow-xsmm,kevin-coder/tensorflow-fork,lukeiwanski/tensorflow-opencl,MycChiu/tensorflow,strint/tensorflow,nightjean/Deep-Learning,apark263/tensorflow,karllessard/tensorflow,jwlawson/tensorflow,cxxgtxy/tensorflow,vrv/tensorflow,sandeepdsouza93/TensorFlow-15712,seaotterman/tensorflow,tensorflow/tensorflow,ravindrapanda/tensorflow,tensorflow/tensorflow,unsiloai/syntaxnet-ops-hack,zasdfgbnm/tensorflow,mengxn/tensorflow,alistairlow/tensorflow,gojira/tensorflow,jwlawson/tensorflow,XueqingLin/tensorflow,rdipietro/tensorflow,hfp/tensorflow-xsmm,frreiss/tensorflow-fred,karllessard/tensorflow,code-sauce/tensorflow,Kongsea/tensorflow,juharris/tensorflow,SnakeJenny/TensorFlow,mavenlin/tensorflow,Intel-Corporation/tensorflow,a-doumoulakis/tensorflow,chemelnucfin/tensorflow,davidzchen/tensorflow,gojira/tensorflow,arborh/tensorflow,aselle/tensorflow,rdipietro/tensorflow,Intel-Corporation/tensorflow,kevin-coder/tensorflow-fork,nolanliou/tensorflow,dendisuhubdy/tensorflow,codrut3/tensorflow,ravindrapanda/tensorflow,gibiansky/tensorflow,Intel-Corporation/tensorflow,dhalleine/tensorflow,frreiss/tensorflow-fred,mixturemodel-flow/tensorflow,jalexvig/tensorflow,adit-chandra/tensorflow,haeusser/tensorflow,bowang/tensorflow,RapidApplicationDevelopment/tensorflow,HKUST-SING/tensorflow,alivecor/tensorflow,ppwwyyxx/tensorflow,MostafaGazar/tensorflow,cxxgtxy/tensorflow,vrv/tensorflow,theflofly/tensorflow,ninotoshi/tensorflow,manipopopo/tensorflow,tensorflow/tensorflow,haeusser/tensorflow,benoitsteiner/tensorflow-opencl,petewarden/tensorflow_makefile,vrv/tensorflow,xzturn/tensorflow,ppwwyyxx/tensorflow,jeffzheng1/tensorflow,dancingdan/tensorflow,pcm17/tensorflow,ville-k/tensorflow,dendisuhubdy/tensorflow,AnishShah/tensorflow,markslwong/tensorflow,thjashin/tensorflow,apark263/tensorflow,ZhangXinNan/tensorflow,dhalleine/tensorflow,yaroslavvb/tensorflow,DavidNorman/tensorflow,alheinecke/tensorflow-xsmm,alshedivat/tensorflow,AndreasMadsen/tensorflow,ppries/tensorflow,sjperkins/tensorflow,brchiu/tensorflow,nanditav/15712-TensorFlow,yufengg/tensorflow,laosiaudi/tensorflow,vrv/tensorflow,Mistobaan/tensorflow,ychfan/tensorflow,ville-k/tensorflow,Mazecreator/tensorflow,hfp/tensorflow-xsmm,eaplatanios/tensorflow,guschmue/tensorflow,chemelnucfin/tensorflow,martinwicke/tensorflow,ran5515/DeepDecision,ville-k/tensorflow,adamtiger/tensorflow,dhalleine/tensorflow,ghchinoy/tensorflow,rabipanda/tensorflow,wangyum/tensorflow,Intel-tensorflow/tensorflow,with-git/tensorflow,nanditav/15712-TensorFlow,MostafaGazar/tensorflow,cg31/tensorflow,freedomtan/tensorflow,AnishShah/tensorflow,tomasreimers/tensorflow-emscripten,jbedorf/tensorflow,ppwwyyxx/tensorflow,seaotterman/tensorflow,alsrgv/tensorflow,xodus7/tensorflow,jendap/tensorflow,ivano666/tensorflow,kchodorow/tensorflow,andrewcmyers/tensorflow,suiyuan2009/tensorflow,SnakeJenny/TensorFlow,pavelchristof/gomoku-ai,gautam1858/tensorflow,ghchinoy/tensorflow,jart/tensorflow,lakshayg/tensorflow,benoitsteiner/tensorflow-xsmm,johndpope/tensorflow,sandeepdsouza93/TensorFlow-15712,xzturn/tensorflow,MoamerEncsConcordiaCa/tensorflow,snnn/tensorflow,xzturn/tensorflow,jwlawson/tensorflow,jostep/tensorflow,memo/tensorflow,mrry/tensorflow,caisq/tensorflow,kamcpp/tensorflow,thesuperzapper/tensorflow,strint/tensorflow,llhe/tensorflow,LUTAN/tensorflow,ibab/tensorflow,HaebinShin/tensorflow,frreiss/tensorflow-fred,mengxn/tensorflow,mengxn/tensorflow,sandeepgupta2k4/tensorflow,tensorflow/tensorflow-pywrap_saved_model,arborh/tensorflow,allenlavoie/tensorflow,johndpope/tensorflow,annarev/tensorflow,jbedorf/tensorflow,DavidNorman/tensorflow,nikste/tensorflow,ishay2b/tensorflow,JingJunYin/tensorflow,HaebinShin/tensorflow,rabipanda/tensorflow,MostafaGazar/tensorflow,alivecor/tensorflow,krikru/tensorflow-opencl,sandeepdsouza93/TensorFlow-15712,chemelnucfin/tensorflow,kamcpp/tensorflow,ageron/tensorflow,chenjun0210/tensorflow,a-doumoulakis/tensorflow,llhe/tensorflow,TakayukiSakai/tensorflow,kobejean/tensorflow,hfp/tensorflow-xsmm,cg31/tensorflow,caisq/tensorflow,hehongliang/tensorflow,lukeiwanski/tensorflow-opencl,calebfoss/tensorflow,rabipanda/tensorflow,renyi533/tensorflow,nburn42/tensorflow,tillahoffmann/tensorflow,Bismarrck/tensorflow,JVillella/tensorflow,mrry/tensorflow,dancingdan/tensorflow,alistairlow/tensorflow,aam-at/tensorflow,asimshankar/tensorflow,jhseu/tensorflow,ppwwyyxx/tensorflow,cg31/tensorflow,jhseu/tensorflow,Moriadry/tensorflow,drpngx/tensorflow,caisq/tensorflow,apark263/tensorflow,eaplatanios/tensorflow,guschmue/tensorflow,yufengg/tensorflow,ville-k/tensorflow,av8ramit/tensorflow,petewarden/tensorflow,arborh/tensorflow,gnieboer/tensorflow,arborh/tensorflow,eadgarchen/tensorflow,tntnatbry/tensorflow,dongjoon-hyun/tensorflow,jbedorf/tensorflow,nanditav/15712-TensorFlow,xzturn/tensorflow,gojira/tensorflow,anilmuthineni/tensorflow,andrewcmyers/tensorflow,asadziach/tensorflow,theflofly/tensorflow,drpngx/tensorflow,alistairlow/tensorflow,ZhangXinNan/tensorflow,adit-chandra/tensorflow,maciekcc/tensorflow,yanchen036/tensorflow,jeffzheng1/tensorflow,ychfan/tensorflow,HaebinShin/tensorflow,xodus7/tensorflow,Mistobaan/tensorflow,memo/tensorflow,martinwicke/tensorflow,tensorflow/tensorflow,ArtsiomCh/tensorflow,codrut3/tensorflow,thjashin/tensorflow,jhaux/tensorflow,eerwitt/tensorflow,seanli9jan/tensorflow,jbedorf/tensorflow,frreiss/tensorflow-fred,alisidd/tensorflow,nightjean/Deep-Learning,gibiansky/tensorflow,AnishShah/tensorflow,renyi533/tensorflow,manipopopo/tensorflow,krikru/tensorflow-opencl,mavenlin/tensorflow,calebfoss/tensorflow,andrewcmyers/tensorflow,mrry/tensorflow,seanli9jan/tensorflow,eadgarchen/tensorflow,karllessard/tensorflow,tomasreimers/tensorflow-emscripten,paolodedios/tensorflow,unsiloai/syntaxnet-ops-hack,ishay2b/tensorflow,girving/tensorflow,ageron/tensorflow,manjunaths/tensorflow,xodus7/tensorflow,snnn/tensorflow,admcrae/tensorflow,gunan/tensorflow,MoamerEncsConcordiaCa/tensorflow,wangyum/tensorflow,whn09/tensorflow,ibab/tensorflow,benoitsteiner/tensorflow,strint/tensorflow,rdipietro/tensorflow,aam-at/tensorflow,elingg/tensorflow,JVillella/tensorflow,asimshankar/tensorflow,jhaux/tensorflow,allenlavoie/tensorflow,Bismarrck/tensorflow,ibmsoe/tensorflow,karllessard/tensorflow,alistairlow/tensorflow,alivecor/tensorflow,seanli9jan/tensorflow,gautam1858/tensorflow,with-git/tensorflow,ran5515/DeepDecision,Xeralux/tensorflow,suiyuan2009/tensorflow,jhaux/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,manipopopo/tensorflow,LUTAN/tensorflow,DavidNorman/tensorflow,seaotterman/tensorflow,jeffzheng1/tensorflow,freedomtan/tensorflow,lukeiwanski/tensorflow-opencl,kchodorow/tensorflow,renyi533/tensorflow,jwlawson/tensorflow,alivecor/tensorflow,mortada/tensorflow,hehongliang/tensorflow,ppries/tensorflow,kevin-coder/tensorflow-fork,jhseu/tensorflow,xzturn/tensorflow,horance-liu/tensorflow,lukeiwanski/tensorflow,eadgarchen/tensorflow,dyoung418/tensorflow,jalexvig/tensorflow,admcrae/tensorflow,davidzchen/tensorflow,dancingdan/tensorflow,kevin-coder/tensorflow-fork,tillahoffmann/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,arborh/tensorflow,ibab/tensorflow,apark263/tensorflow,gunan/tensorflow,manjunaths/tensorflow,xodus7/tensorflow,hfp/tensorflow-xsmm,cg31/tensorflow,girving/tensorflow,calebfoss/tensorflow,jhseu/tensorflow,kchodorow/tensorflow,johndpope/tensorflow,gojira/tensorflow,ibmsoe/tensorflow,sjperkins/tensorflow,gibiansky/tensorflow,wangyum/tensorflow,drpngx/tensorflow,annarev/tensorflow,av8ramit/tensorflow,yongtang/tensorflow,Bulochkin/tensorflow_pack,horance-liu/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,frreiss/tensorflow-fred,laszlocsomor/tensorflow,brchiu/tensorflow,seaotterman/tensorflow,tongwang01/tensorflow,cancan101/tensorflow,alistairlow/tensorflow,benoitsteiner/tensorflow-opencl,elingg/tensorflow,martinwicke/tensorflow,jhseu/tensorflow,cancan101/tensorflow,MycChiu/tensorflow,petewarden/tensorflow,dongjoon-hyun/tensorflow,EvenStrangest/tensorflow,AnishShah/tensorflow,seanli9jan/tensorflow,kchodorow/tensorflow,karllessard/tensorflow,av8ramit/tensorflow,lukeiwanski/tensorflow,ninotoshi/tensorflow,ghchinoy/tensorflow,benoitsteiner/tensorflow,dendisuhubdy/tensorflow,jhseu/tensorflow,dongjoon-hyun/tensorflow,apark263/tensorflow,chemelnucfin/tensorflow,ychfan/tensorflow,strint/tensorflow,XueqingLin/tensorflow,renyi533/tensorflow,ivano666/tensorflow,DavidNorman/tensorflow,AndreasMadsen/tensorflow,sjperkins/tensorflow,chemelnucfin/tensorflow,kevin-coder/tensorflow-fork,seanli9jan/tensorflow,thesuperzapper/tensorflow,scenarios/tensorflow,ageron/tensorflow,ppwwyyxx/tensorflow,ghchinoy/tensorflow,adit-chandra/tensorflow,tornadozou/tensorflow,aam-at/tensorflow,ArtsiomCh/tensorflow,allenlavoie/tensorflow,thesuperzapper/tensorflow,DCSaunders/tensorflow,wangyum/tensorflow,yufengg/tensorflow,ArtsiomCh/tensorflow,horance-liu/tensorflow,DCSaunders/tensorflow,gnieboer/tensorflow,juharris/tensorflow,nikste/tensorflow,a-doumoulakis/tensorflow,JVillella/tensorflow,yaroslavvb/tensorflow,HKUST-SING/tensorflow,HKUST-SING/tensorflow,karllessard/tensorflow,aldian/tensorflow,adit-chandra/tensorflow,allenlavoie/tensorflow,brchiu/tensorflow,petewarden/tensorflow_makefile,eaplatanios/tensorflow,laosiaudi/tensorflow,ibab/tensorflow,handroissuazo/tensorflow,laszlocsomor/tensorflow,maciekcc/tensorflow,elingg/tensorflow,sandeepdsouza93/TensorFlow-15712,andrewcmyers/tensorflow,admcrae/tensorflow,ghchinoy/tensorflow,elingg/tensorflow,aam-at/tensorflow,code-sauce/tensorflow,chemelnucfin/tensorflow,dancingdan/tensorflow,MycChiu/tensorflow,Kongsea/tensorflow,calebfoss/tensorflow,neilhan/tensorflow,dyoung418/tensorflow,abhitopia/tensorflow,Moriadry/tensorflow,sarvex/tensorflow,yaroslavvb/tensorflow,gautam1858/tensorflow,ppwwyyxx/tensorflow,llhe/tensorflow,arborh/tensorflow,adamtiger/tensorflow,brchiu/tensorflow,yongtang/tensorflow,nightjean/Deep-Learning,MoamerEncsConcordiaCa/tensorflow,tensorflow/tensorflow-pywrap_saved_model,girving/tensorflow,sjperkins/tensorflow,unsiloai/syntaxnet-ops-hack,JingJunYin/tensorflow,theflofly/tensorflow,yongtang/tensorflow,xzturn/tensorflow,bowang/tensorflow,HKUST-SING/tensorflow,JVillella/tensorflow,DCSaunders/tensorflow,mixturemodel-flow/tensorflow,ivano666/tensorflow,ran5515/DeepDecision,yanchen036/tensorflow,ibmsoe/tensorflow,anilmuthineni/tensorflow,alshedivat/tensorflow,nolanliou/tensorflow,paolodedios/tensorflow,dendisuhubdy/tensorflow,aselle/tensorflow,jostep/tensorflow,av8ramit/tensorflow,odejesush/tensorflow,kamcpp/tensorflow,odejesush/tensorflow,seanli9jan/tensorflow,meteorcloudy/tensorflow,drpngx/tensorflow,jendap/tensorflow,eaplatanios/tensorflow,calebfoss/tensorflow,jbedorf/tensorflow,JingJunYin/tensorflow,ppries/tensorflow,maciekcc/tensorflow,dendisuhubdy/tensorflow,ran5515/DeepDecision,tongwang01/tensorflow,scenarios/tensorflow,petewarden/tensorflow,mavenlin/tensorflow,chenjun0210/tensorflow,petewarden/tensorflow_makefile,DavidNorman/tensorflow,lukeiwanski/tensorflow-opencl,hsaputra/tensorflow,haeusser/tensorflow,eerwitt/tensorflow,ghchinoy/tensorflow,laosiaudi/tensorflow,Mistobaan/tensorflow,aselle/tensorflow,admcrae/tensorflow,paolodedios/tensorflow,kevin-coder/tensorflow-fork,naturali/tensorflow,gunan/tensorflow,llhe/tensorflow,Xeralux/tensorflow,DavidNorman/tensorflow,yanchen036/tensorflow,LUTAN/tensorflow,seanli9jan/tensorflow,jostep/tensorflow,brchiu/tensorflow,kevin-coder/tensorflow-fork,scenarios/tensorflow,davidzchen/tensorflow,eadgarchen/tensorflow,SnakeJenny/TensorFlow,gunan/tensorflow,pierreg/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,benoitsteiner/tensorflow,unsiloai/syntaxnet-ops-hack,ghchinoy/tensorflow,DavidNorman/tensorflow,alisidd/tensorflow,pierreg/tensorflow,freedomtan/tensorflow,mixturemodel-flow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,gibiansky/tensorflow,gnieboer/tensorflow,theflofly/tensorflow,aselle/tensorflow,hehongliang/tensorflow,jendap/tensorflow,sandeepdsouza93/TensorFlow-15712,frreiss/tensorflow-fred,lakshayg/tensorflow,jhaux/tensorflow,handroissuazo/tensorflow,wangyum/tensorflow,ninotoshi/tensorflow,freedomtan/tensorflow,ville-k/tensorflow,nolanliou/tensorflow,jbedorf/tensorflow,alsrgv/tensorflow,lukeiwanski/tensorflow,yufengg/tensorflow,tensorflow/tensorflow-pywrap_saved_model,elingg/tensorflow,jalexvig/tensorflow,ravindrapanda/tensorflow,snnn/tensorflow,nightjean/Deep-Learning,tomasreimers/tensorflow-emscripten,manjunaths/tensorflow,alsrgv/tensorflow,horance-liu/tensorflow,asimshankar/tensorflow,cancan101/tensorflow,ninotoshi/tensorflow,Moriadry/tensorflow,thesuperzapper/tensorflow,chris-chris/tensorflow,dongjoon-hyun/tensorflow,manjunaths/tensorflow,cxxgtxy/tensorflow,elingg/tensorflow,tongwang01/tensorflow,Bulochkin/tensorflow_pack,taknevski/tensorflow-xsmm,hfp/tensorflow-xsmm,lukeiwanski/tensorflow-opencl,zycdragonball/tensorflow,Intel-tensorflow/tensorflow,aam-at/tensorflow,horance-liu/tensorflow,hfp/tensorflow-xsmm,TakayukiSakai/tensorflow,ravindrapanda/tensorflow,gautam1858/tensorflow,pcm17/tensorflow,DCSaunders/tensorflow,MycChiu/tensorflow,zycdragonball/tensorflow,HaebinShin/tensorflow,kevin-coder/tensorflow-fork,xodus7/tensorflow,sjperkins/tensorflow,arborh/tensorflow,kobejean/tensorflow,tiagofrepereira2012/tensorflow,av8ramit/tensorflow,whn09/tensorflow,tensorflow/tensorflow,calebfoss/tensorflow,alsrgv/tensorflow,Bulochkin/tensorflow_pack,laszlocsomor/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,yongtang/tensorflow,neilhan/tensorflow,memo/tensorflow,nikste/tensorflow,chris-chris/tensorflow,annarev/tensorflow,av8ramit/tensorflow,naturali/tensorflow,a-doumoulakis/tensorflow,ivano666/tensorflow,girving/tensorflow,dendisuhubdy/tensorflow,guschmue/tensorflow,dongjoon-hyun/tensorflow,Xeralux/tensorflow,petewarden/tensorflow_makefile,suiyuan2009/tensorflow,ageron/tensorflow,zasdfgbnm/tensorflow,gnieboer/tensorflow,nburn42/tensorflow,AndreasMadsen/tensorflow,eadgarchen/tensorflow,kevin-coder/tensorflow-fork,alivecor/tensorflow,eadgarchen/tensorflow,vrv/tensorflow,Mistobaan/tensorflow,nanditav/15712-TensorFlow,anand-c-goog/tensorflow,benoitsteiner/tensorflow-opencl,zycdragonball/tensorflow,benoitsteiner/tensorflow-opencl,Bulochkin/tensorflow_pack,anand-c-goog/tensorflow,lakshayg/tensorflow,adamtiger/tensorflow,Moriadry/tensorflow,yanchen036/tensorflow,ibmsoe/tensorflow,gautam1858/tensorflow,jendap/tensorflow,with-git/tensorflow,ZhangXinNan/tensorflow,gnieboer/tensorflow,cxxgtxy/tensorflow,thjashin/tensorflow,MycChiu/tensorflow,neilhan/tensorflow,nolanliou/tensorflow,horance-liu/tensorflow,benoitsteiner/tensorflow-xsmm,seaotterman/tensorflow,pavelchristof/gomoku-ai,ZhangXinNan/tensorflow,aselle/tensorflow,jhaux/tensorflow,Mistobaan/tensorflow,kobejean/tensorflow,ychfan/tensorflow,brchiu/tensorflow,taknevski/tensorflow-xsmm,abhitopia/tensorflow,juharris/tensorflow,seanli9jan/tensorflow,paolodedios/tensorflow,yongtang/tensorflow,nikste/tensorflow,theflofly/tensorflow,gojira/tensorflow,hsaputra/tensorflow,mdrumond/tensorflow,lakshayg/tensorflow,suiyuan2009/tensorflow,jeffzheng1/tensorflow,adamtiger/tensorflow,haeusser/tensorflow,markslwong/tensorflow,meteorcloudy/tensorflow,scenarios/tensorflow,haeusser/tensorflow,manazhao/tf_recsys,mavenlin/tensorflow,kobejean/tensorflow,yufengg/tensorflow,gunan/tensorflow
|
dd51e13a2a7b4e4005127ca0e409d0882179b39f
|
bluebottle/mail/__init__.py
|
bluebottle/mail/__init__.py
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
msg = EmailMultiAlternatives(subject=subject, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
from django.contrib.sites.models import Site
from django.template.loader import get_template
from django.utils import translation
from bluebottle.clients.context import ClientContext
from bluebottle.clients.mail import EmailMultiAlternatives
from bluebottle.clients import properties
def send_mail(template_name, subject, to, **kwargs):
if hasattr(to, 'primary_language') and to.primary_language:
translation.activate(to.primary_language)
kwargs.update({
'receiver': to,
'site': 'https://{0}'.format(Site.objects.get_current().domain)
})
context = ClientContext(kwargs)
subject = unicode(subject) # Unlazy the translatable string subject within activated language.
text_content = get_template('{0}.txt'.format(template_name)).render(context)
html_content = get_template('{0}.html'.format(template_name)).render(context)
if hasattr(to, 'primary_language') and to.primary_language:
translation.deactivate()
from_email = properties.CONTACT_EMAIL
msg = EmailMultiAlternatives(subject=subject, from_email=from_email, body=text_content, to=[to.email])
msg.attach_alternative(html_content, "text/html")
return msg.send()
|
Use CONTACT_EMAIL als default from address
|
Use CONTACT_EMAIL als default from address
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle
|
ab0f6115c50bea63856c1e880249ad4bdca3ce42
|
src/web/urls.py
|
src/web/urls.py
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls')),
]
|
from django.conf.urls import include, url
from django.contrib import admin
from django.contrib.auth import views as auth_views
urlpatterns = [
url(r'^login/', auth_views.login, name='login',
kwargs={'redirect_authenticated_user': True}),
url(r'^logout/', auth_views.logout, {'next_page': '/login'}, name='logout'),
url(r'^admin/', admin.site.urls),
url(r'^playbooks/', include('ansible.urls', namespace='ansible')),
]
|
Add ansible namespace in root URLconf
|
Add ansible namespace in root URLconf
|
Python
|
bsd-3-clause
|
lozadaOmr/ansible-admin,lozadaOmr/ansible-admin,lozadaOmr/ansible-admin
|
5bf24464b00257a9fa5f66047a2f7815c1e4f8fb
|
tweepy/utils.py
|
tweepy/utils.py
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%f%z"
)
|
# Tweepy
# Copyright 2010-2021 Joshua Roesslein
# See LICENSE for details.
import datetime
def list_to_csv(item_list):
if item_list:
return ','.join(map(str, item_list))
def parse_datetime(datetime_string):
return datetime.datetime.strptime(
datetime_string, "%Y-%m-%dT%H:%M:%S.%fZ"
).replace(tzinfo=datetime.timezone.utc)
# Use %z when support for Python 3.6 is dropped
|
Fix parse_datetime to parse API datetime string format with Python 3.6
|
Fix parse_datetime to parse API datetime string format with Python 3.6
The '%z' directive didn't accept 'Z' until Python 3.7
|
Python
|
mit
|
svven/tweepy,tweepy/tweepy
|
a0740ec8373a3a178e3e83b4ec2768621c697181
|
versions/rattoolsversions.py
|
versions/rattoolsversions.py
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
#!/usr/bin/env python
#
# RatToolsDev
#
# The development versions of rattools
#
# Author P G Jones - 15/10/2012 <p.g.jones@qmul.ac.uk> : First revision
####################################################################################################
import rattools
class RatToolsDev(rattools.RatToolsDevelopment):
def __init__(self, system):
""" Initialise dev version."""
super(RatToolsDev, self).__init__("rattools-dev", system, "root-5.34.02")
class RatTools42(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools42, self).__init__("rattools-4.2", system, "root-5.34.02", "rat-4.2",
"release-4.20")
class RatTools41(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools41, self).__init__("rattools-4.1", system, "root-5.34.02", "rat-4.1",
"release-4.10")
class RatTools4(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools4, self).__init__("rattools-4", system, "root-5.32.04", "rat-4",
"release-4.00")
class RatTools1(rattools.RatToolsRelease):
def __init__(self, system):
""" Initialise an arbitrary snaphot version."""
super(RatTools1, self).__init__("rattools-1", system, "root-5.32.04", "rat-4",
"ebd71f14121dee64f6d0f01b72730b29b075e6d6")
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
Add fixed release rat-tools versions 4, 4.1, 4.2
|
Python
|
mit
|
mjmottram/snoing,mjmottram/snoing
|
988f4aec1588f409f296e89acb47040cb2606cf8
|
ocradmin/plugins/numpy_nodes.py
|
ocradmin/plugins/numpy_nodes.py
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
import node
import manager
import stages
import numpy
class Rotate90Node(node.Node):
"""
Rotate a Numpy image by num*90 degrees.
"""
arity = 1
stage = stages.FILTER_BINARY
name = "Numpy::Rotate90"
_parameters = [{
"name": "num",
"value": 1,
}]
def validate(self):
super(Rotate90Node, self).validate()
if not self._params.get("num"):
raise node.UnsetParameterError("num")
try:
num = int(self._params.get("num"))
except TypeError:
raise node.InvalidParameterError("'num' must be an integer")
def _eval(self):
image = self.get_input_data(0)
return numpy.rot90(image, int(self._params.get("num", 1)))
class Rotate90GrayNode(Rotate90Node):
"""
Grayscale version of above.
"""
stage = stages.FILTER_GRAY
name = "Numpy::Rotate90Gray"
class Manager(manager.StandardManager):
"""
Handle Tesseract nodes.
"""
@classmethod
def get_node(self, name, **kwargs):
if name.find("::") != -1:
name = name.split("::")[-1]
if name == "Rotate90":
return Rotate90Node(**kwargs)
elif name == "Rotate90Gray":
return Rotate90Node(**kwargs)
@classmethod
def get_nodes(cls, *oftypes):
return super(Manager, cls).get_nodes(
*oftypes, globals=globals())
if __name__ == "__main__":
for n in Manager.get_nodes():
print n
|
Add a grayscale rotation node (for testing)
|
Add a grayscale rotation node (for testing)
|
Python
|
apache-2.0
|
vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium,vitorio/ocropodium
|
dd237d82426ebbc3d2854641e8e73e2001857b67
|
damn/templatetags/damn.py
|
damn/templatetags/damn.py
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, name=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and name is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if name is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(name=name, alias=alias, mode=mode, deps=args)
return ''
|
from django import template
from django.utils.safestring import mark_safe
from ..processors import AssetRegistry
register = template.Library()
class AssetsNode(template.Node):
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context):
context.render_context['AMN'] = AssetRegistry()
content = self.nodelist.render(context)
# Now output out tags
extra_tags = '\n'.join(context.render_context['AMN'].render(context))
return mark_safe(extra_tags) + content
@register.tag
def assets(parser, token):
nodelist = parser.parse()
return AssetsNode(nodelist)
@register.simple_tag(takes_context=True)
def asset(context, filename=None, alias=None, mode=None, *args):
'''
{% asset alias mode=? ... %}
{% asset file.js ... %}
{% asset name depends depends... %}
alias = short name for asset
file = static relative filename
mode = asset mode [inferred from filename extension]
args == dependencies [aliases or files]
'''
if alias is None and filename is None:
raise template.TemplateSyntaxError(
'asset tag requires at least one of name or alias'
)
if filename is None and mode is None:
raise template.TemplateSyntaxError(
'asset tag reqires mode when using an alias'
)
context.render_context['AMN'].add_asset(filename=filename, alias=alias, mode=mode, deps=args)
return ''
|
Rename 'name' argument to 'filename'
|
Rename 'name' argument to 'filename'
|
Python
|
bsd-2-clause
|
funkybob/django-amn
|
7769e5ddd5784b7e56b75fc33f25b0f40ecaa99e
|
cryptex/exchange/__init__.py
|
cryptex/exchange/__init__.py
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy
from cryptex.exchange.btce import BTCE
|
from cryptex.exchange.exchange import Exchange
from cryptex.exchange.cryptsy import Cryptsy, CryptsyPublic
from cryptex.exchange.btce import BTCE, BTCEPublic
|
Add public imports to exchange module
|
Add public imports to exchange module
|
Python
|
mit
|
coink/cryptex
|
88f341c6a9d079c89537feb1fb0aa8908732421a
|
evennia/server/migrations/0002_auto_20190128_1820.py
|
evennia/server/migrations/0002_auto_20190128_1820.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = value
conf.save()
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-01-28 18:20
import pickle
from django.db import migrations, models
import evennia.utils.picklefield
from evennia.utils.utils import to_bytes, to_str
def migrate_serverconf(apps, schema_editor):
"""
Move server conf from a custom binary field into a PickleObjectField
"""
ServerConfig = apps.get_model("server", "ServerConfig")
for conf in ServerConfig.objects.all():
value = pickle.loads(to_bytes(conf.db_value))
conf.db_value2 = to_str(value)
conf.save(update_fields=["db_value2"])
class Migration(migrations.Migration):
dependencies = [
('server', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='serverconfig',
name='db_value2',
field=evennia.utils.picklefield.PickledObjectField(help_text='The data returned when the config value is accessed. Must be written as a Python literal if editing through the admin interface. Attribute values which are not Python literals cannot be edited through the admin interface.', null=True, verbose_name='value'),
),
# migrate data
migrations.RunPython(migrate_serverconf, migrations.RunPython.noop),
]
|
Fix migration for various situations
|
Fix migration for various situations
|
Python
|
bsd-3-clause
|
jamesbeebop/evennia,jamesbeebop/evennia,jamesbeebop/evennia
|
f3dd0c94c0c7be2a5ebc2c0df59dd9fb15969eb9
|
ghpythonremote/_configure_ironpython_installation.py
|
ghpythonremote/_configure_ironpython_installation.py
|
import sys
import pip
from .helpers import get_rhino_ironpython_path
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary :all:', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
import sys
import pip
import logging
from .helpers import get_rhino_ironpython_path
logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
if __name__ == '__main__':
location = None
if len(sys.argv) > 1:
location = sys.argv[1]
rhino_ironpython_path = get_rhino_ironpython_path(location=location)
package_name = __package__.split('.')[0]
pip_cmd = ['install', package_name, '--target="' + rhino_ironpython_path + '"',
'--upgrade', '--no-binary all', '--no-compile', '--ignore-requires-python']
print('\n\nThis will install ghpythonremote in Rhino IronPython with the command:')
print('pip ' + ' '.join(pip_cmd))
pip.main(pip_cmd)
|
Correct --no-binary option, incorrect formatting in pypi doc
|
Correct --no-binary option, incorrect formatting in pypi doc
|
Python
|
mit
|
Digital-Structures/ghpythonremote,pilcru/ghpythonremote
|
b9dd3a5d2f52f6cebb55b322cf4ddb2b9e1d8ccc
|
arches/db/install/truncate_db.py
|
arches/db/install/truncate_db.py
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ POSTGIS_TEMPLATE }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" TEMPLATE={{POSTGIS_TEMPLATE}}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
import os
import inspect
import subprocess
from django.template import Template
from django.template import Context
from django.conf import settings
from arches.management.commands import utils
def create_sqlfile(database_settings, path_to_file):
context = Context(database_settings)
postgres_version = subprocess.check_output(["psql", "--version"])
if int(postgres_version.split('.')[1]) >= 2:
context['PID'] = "pid"
else:
context['PID'] = "procpid"
t = Template(
"SELECT pg_terminate_backend({{ PID }}) from pg_stat_activity where datname='{{ NAME }}';\n"
"\n"
"DROP DATABASE IF EXISTS {{ NAME }};\n"
"\n"
"CREATE DATABASE {{ NAME }}\n"
" WITH ENCODING='UTF8'\n"
" OWNER={{ USER }}\n"
" CONNECTION LIMIT=-1;\n"
"\n"
)
utils.write_to_file(path_to_file, t.render(context));
|
Remove reference to postgis template. Django now installs postgis when database is created.
|
Remove reference to postgis template. Django now installs postgis when database is created.
|
Python
|
agpl-3.0
|
archesproject/arches,cvast/arches,cvast/arches,archesproject/arches,cvast/arches,cvast/arches,archesproject/arches,archesproject/arches
|
7a308233707e7e024311a3767367875921c6217b
|
graphiter/models.py
|
graphiter/models.py
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"")
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
from django.db import models
class Chart(models.Model):
title = models.CharField(max_length=50)
url = models.CharField(max_length=1024)
def __unicode__(self):
return self.title
class Page(models.Model):
title = models.CharField(max_length=50)
slug = models.SlugField()
charts = models.ManyToManyField(Chart)
time_from = models.CharField(max_length=50, default=u"-24h")
time_until = models.CharField(max_length=50, default=u"", blank=True)
image_width = models.PositiveIntegerField(default=1200)
image_height = models.PositiveIntegerField(default=400)
def __unicode__(self):
return self.title
|
Add blank=True for Page.time_until field
|
Add blank=True for Page.time_until field
|
Python
|
bsd-2-clause
|
jwineinger/django-graphiter
|
fd697a0a4a4aeb3455ec7b7e8b3ed38ce0eb4502
|
test/sockettest.py
|
test/sockettest.py
|
import kaa
@kaa.coroutine()
def new_client(client):
ip, port = client.address
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = tls.TLSSocket()
#remote = kaa.Socket()
yield remote.connect('www.freevo.org:80')
#yield remote.connect('urandom.ca:443')
#try:
# yield remote.starttls_client()
#except:
# print "TLS ERROR"
# return
remote.write('GET / HTTP/1.0\n\n')
while remote.connected:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
from kaa.net import tls
#server = tls.TLSSocket()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
import logging
import kaa
from kaa.net.tls import TLSSocket
log = logging.getLogger('tls').ensureRootHandler()
@kaa.coroutine()
def new_client(client):
ip, port = client.peer[:2]
print 'New connection from %s:%s' % (ip, port)
#yield client.starttls_server()
client.write('Hello %s, connecting from port %d\n' % (ip, port))
remote = TLSSocket()
yield remote.connect('www.google.com:443')
yield remote.starttls_client()
yield remote.write('GET / HTTP/1.0\n\n')
while remote.readable:
data = yield remote.read()
yield client.write(data)
client.write('\n\nBye!\n')
client.close()
server = kaa.Socket()
server.signals['new-client'].connect(new_client)
server.listen(8080)
print "Connect to localhost:8080"
kaa.main.run()
|
Fix TLS support in socket test
|
Fix TLS support in socket test
|
Python
|
lgpl-2.1
|
freevo/kaa-base,freevo/kaa-base
|
91c620e228ad73e2e34efbd60813ed35b3f9ef46
|
tests/test_dtool_dataset_freeze.py
|
tests/test_dtool_dataset_freeze.py
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import freeze
runner = CliRunner()
# Create an empty dataset
dataset_name = "my_dataset"
dataset = DataSet(dataset_name, data_directory="data")
dataset.persist_to_path(".")
# Add some files to it.
dest_dir = os.path.join(".", dataset.data_directory, "sample_files")
shutil.copytree(SAMPLE_FILES_DIR, dest_dir)
# At this point the manifest has not been updated.
assert len(dataset.identifiers) == 0
result = runner.invoke(freeze, ["."])
assert result.exit_code == 0
# Manifest has been updated.
assert len(dataset.identifiers) == 2
|
"""Test the ``dtool dataset create`` command."""
import os
import shutil
from click.testing import CliRunner
from dtoolcore import DataSet, ProtoDataSet
from . import chdir_fixture, tmp_dir_fixture # NOQA
from . import SAMPLE_FILES_DIR
def test_dataset_freeze_functional(chdir_fixture): # NOQA
from dtool_create.dataset import create, freeze
runner = CliRunner()
dataset_name = "my_dataset"
result = runner.invoke(create, [dataset_name])
assert result.exit_code == 0
# At this point we have a proto dataset
dataset_abspath = os.path.abspath(dataset_name)
dataset_uri = "disk:{}".format(dataset_abspath)
dataset = ProtoDataSet.from_uri(dataset_uri)
# Add a file to the proto dataset.
sample_file_abspath = os.path.join(dataset_abspath, "data", "hello.txt")
with open(sample_file_abspath, "w") as fh:
fh.write("hello world")
result = runner.invoke(freeze, [dataset_uri])
assert result.exit_code == 0
# Now we have a dataset.
dataset = DataSet.from_uri(dataset_uri)
# Manifest has been updated.
assert len(dataset.identifiers) == 1
|
Fix the freeze functional test
|
Fix the freeze functional test
|
Python
|
mit
|
jic-dtool/dtool-create
|
fc9296957122fe7499711c0b0801c96e286b6725
|
turnstile/utils.py
|
turnstile/utils.py
|
import sys
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition(':')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ImportError, ValueError, AttributeError) as exc:
# Convert it into an import error
raise ImportError("Failed to import %s: %s" % (import_str, exc))
|
import sys
def import_class(import_str):
"""Returns a class from a string including module and class."""
mod_str, _sep, class_str = import_str.rpartition(':')
try:
__import__(mod_str)
return getattr(sys.modules[mod_str], class_str)
except (ImportError, ValueError, AttributeError) as exc:
# Convert it into an import error
raise ImportError("Failed to import %s: %s" % (import_str, exc))
class ignore_except(object):
"""Context manager to ignore all exceptions."""
def __enter__(self):
"""Entry does nothing."""
pass
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Return True to mark the exception as handled."""
return True
|
Add a context manager to ignore exceptions
|
Add a context manager to ignore exceptions
|
Python
|
apache-2.0
|
klmitch/turnstile
|
d18d4f72ed24177143a9dbcf74fc1c001235ded5
|
batch_effect.py
|
batch_effect.py
|
#!/usr/bin/env python
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="id attribute of object to manipulate")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
#!/usr/bin/env python
import argparse
import csv
import shutil
import subprocess
import sys
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Chain together Inkscape extensions")
parser.add_argument('--id', type=str, action='append', dest='ids', default=[],
help="ID attributes of objects to manipulate. Passed to all extensions.")
parser.add_argument('--csvpath', type=str, required=True,
help="Path to .csv file containing command lines")
parser.add_argument('svgpath', type=str, nargs='?', default='',
help="Path to temporary SVG file to use for input to the first extension")
args = parser.parse_args()
with open(args.csvpath, 'rb') as f:
# Make an argument list of the ids
id_args = []
for id in args.ids:
id_args.extend(('--id', id))
# Take input for the first call from temporary file or stdin
if args.svgpath:
stream = open(args.svgpath)
else:
stream = sys.stdin
# Execute all the calls
for row in csv.reader(f):
# Insert the ids into the call
call = row[:1] + id_args + row[1:]
# Make the call
p = subprocess.Popen(call, stdin=stream, stdout=subprocess.PIPE)
# Close our handle to the input pipe because we no longer need it
stream.close()
# Grab the output pipe for input into the next call
stream = p.stdout
# Send output from last call on stdout
shutil.copyfileobj(stream, sys.stdout)
|
Clarify --id parameter help text
|
Clarify --id parameter help text
|
Python
|
mit
|
jturner314/inkscape-batch-effect
|
3672d178ac4f9a3f9308acf1e43e9eea663fe30a
|
OnlineParticipationDataset/pipelines.py
|
OnlineParticipationDataset/pipelines.py
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json
from datetime import datetime
from scrapy.exporters import JsonLinesItemExporter
class OnlineparticipationdatasetPipeline(object):
def process_item(self, item, spider):
return item
class JsonWriterPipeline(object):
def open_spider(self, spider):
self.file = open("downloads/items_"+spider.name+".json", 'wb')
self.exporter = JsonLinesItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
|
# -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: http://doc.scrapy.org/en/latest/topics/item-pipeline.html
import json,os
from datetime import datetime
from scrapy.exporters import JsonLinesItemExporter
path = "downloads"
class OnlineparticipationdatasetPipeline(object):
def process_item(self, item, spider):
return item
class JsonWriterPipeline(object):
def open_spider(self, spider):
if not os.path.isdir(path):
os.makedirs(path)
self.file = open("downloads/items_"+spider.name+".json", 'wb')
self.exporter = JsonLinesItemExporter(self.file, encoding='utf-8', ensure_ascii=False)
self.exporter.start_exporting()
def close_spider(self, spider):
self.exporter.finish_exporting()
self.file.close()
def process_item(self, item, spider):
self.exporter.export_item(item)
return item
|
Create path if it doesnt exists
|
Create path if it doesnt exists
|
Python
|
mit
|
Liebeck/OnlineParticipationDatasets
|
4fd6a98a887a59dabcc41361a6ba2791393d875e
|
test/tests/python-pip-requests-ssl/container.py
|
test/tests/python-pip-requests-ssl/container.py
|
import pip
pip.main(['install', '-q', 'requests'])
import requests
r = requests.get('https://google.com')
assert(r.status_code == 200)
|
import subprocess, sys
subprocess.check_call([sys.executable, '-m', 'pip', 'install', 'requests'])
import requests
r = requests.get('https://google.com')
assert(r.status_code == 200)
|
Fix "python-pip-requests-ssl" test to no longer "import pip"
|
Fix "python-pip-requests-ssl" test to no longer "import pip"
(https://blog.python.org/2018/04/pip-10-has-been-released.html)
> In addition, the previously announced reorganisation of pip's internals has now taken place. Unless you are the author of code that imports the pip module (or a user of such code), this change will not affect you. If you are affected, please report the issue to the author of the offending code (refer them to https://mail.python.org/pipermail/distutils-sig/2017-October/031642.html for the details of the announcement).
|
Python
|
apache-2.0
|
docker-library/official-images,docker-flink/official-images,docker-library/official-images,31z4/official-images,davidl-zend/official-images,jperrin/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,31z4/official-images,dinogun/official-images,docker-flink/official-images,neo-technology/docker-official-images,docker-solr/official-images,31z4/official-images,chorrell/official-images,dinogun/official-images,docker-solr/official-images,docker-library/official-images,infosiftr/stackbrew,davidl-zend/official-images,docker-library/official-images,neo-technology/docker-official-images,docker-flink/official-images,infosiftr/stackbrew,docker-solr/official-images,robfrank/official-images,docker-flink/official-images,thresheek/official-images,31z4/official-images,robfrank/official-images,robfrank/official-images,infosiftr/stackbrew,infosiftr/stackbrew,docker-flink/official-images,chorrell/official-images,chorrell/official-images,docker-solr/official-images,docker-flink/official-images,neo-technology/docker-official-images,thresheek/official-images,docker-library/official-images,docker-solr/official-images,jperrin/official-images,dinogun/official-images,thresheek/official-images,chorrell/official-images,davidl-zend/official-images,neo-technology/docker-official-images,31z4/official-images,docker-flink/official-images,davidl-zend/official-images,jperrin/official-images,thresheek/official-images,thresheek/official-images,neo-technology/docker-official-images,docker-library/official-images,chorrell/official-images,robfrank/official-images,31z4/official-images,chorrell/official-images,thresheek/official-images,docker-solr/official-images,neo-technology/docker-official-images,31z4/official-images,robfrank/official-images,robfrank/official-images,davidl-zend/official-images,neo-technology/docker-official-images,infosiftr/stackbrew,docker-library/official-images,thresheek/official-images,neo-technology/docker-official-images,jperrin/official-images,neo-technology/docker-official-images,docker-library/official-images,docker-library/official-images,docker-library/official-images,infosiftr/stackbrew,chorrell/official-images,dinogun/official-images,31z4/official-images,chorrell/official-images,robfrank/official-images,docker-flink/official-images,jperrin/official-images,thresheek/official-images,docker-solr/official-images,davidl-zend/official-images,docker-flink/official-images,docker-library/official-images,dinogun/official-images,31z4/official-images,davidl-zend/official-images,davidl-zend/official-images,docker-solr/official-images,jperrin/official-images,jperrin/official-images,davidl-zend/official-images,jperrin/official-images,robfrank/official-images,chorrell/official-images,docker-flink/official-images,thresheek/official-images,31z4/official-images,docker-flink/official-images,jperrin/official-images,neo-technology/docker-official-images,docker-library/official-images,jperrin/official-images,infosiftr/stackbrew,dinogun/official-images,robfrank/official-images,jperrin/official-images,robfrank/official-images,robfrank/official-images,dinogun/official-images,31z4/official-images,31z4/official-images,docker-flink/official-images,chorrell/official-images,docker-library/official-images,davidl-zend/official-images,dinogun/official-images,infosiftr/stackbrew,docker-solr/official-images,dinogun/official-images,thresheek/official-images,docker-flink/official-images,jperrin/official-images,robfrank/official-images,docker-solr/official-images,thresheek/official-images,docker-solr/official-images,neo-technology/docker-official-images,thresheek/official-images,docker-solr/official-images,docker-solr/official-images,docker-library/official-images,robfrank/official-images,dinogun/official-images,jperrin/official-images,chorrell/official-images,infosiftr/stackbrew,infosiftr/stackbrew,dinogun/official-images,davidl-zend/official-images,31z4/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,31z4/official-images,dinogun/official-images,chorrell/official-images,infosiftr/stackbrew,neo-technology/docker-official-images,davidl-zend/official-images,docker-solr/official-images,thresheek/official-images,davidl-zend/official-images,dinogun/official-images,chorrell/official-images,thresheek/official-images,infosiftr/stackbrew
|
e75201bb34f7e930c04e393ff630fdbc1549fd77
|
core/admin/migrations/versions/8f9ea78776f4_.py
|
core/admin/migrations/versions/8f9ea78776f4_.py
|
"""empty message
Revision ID: 8f9ea78776f4
Revises: 3b7eee912b41
Create Date: 2022-03-11 13:53:08.996055
"""
# revision identifiers, used by Alembic.
revision = '8f9ea78776f4'
down_revision = '3b7eee912b41'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('spam_mark_as_read', sa.Boolean(), nullable=False))
def downgrade():
op.drop_column('user', 'spam_mark_as_read')
|
"""empty message
Revision ID: 8f9ea78776f4
Revises: 3b7eee912b41
Create Date: 2022-03-11 13:53:08.996055
"""
# revision identifiers, used by Alembic.
revision = '8f9ea78776f4'
down_revision = '3b7eee912b41'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('user', sa.Column('spam_mark_as_read', sa.Boolean(), nullable=False, server_default=sa.sql.expression.false()))
def downgrade():
op.drop_column('user', 'spam_mark_as_read')
|
Add default to column spam_mark_as_read
|
Add default to column spam_mark_as_read
|
Python
|
mit
|
kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io,kaiyou/freeposte.io
|
9c44a1c97b1ba60fd7340c15dd82ebe1ceb6d8c3
|
icekit/project/settings/_production.py
|
icekit/project/settings/_production.py
|
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
# EMAIL_HOST = ''
# EMAIL_HOST_USER = ''
LOGGING['handlers']['logfile']['backupCount'] = 100
MIDDLEWARE_CLASSES = (
('django.middleware.cache.UpdateCacheMiddleware', ) +
MIDDLEWARE_CLASSES +
('django.middleware.cache.FetchFromCacheMiddleware', )
)
TEMPLATES_DJANGO['OPTIONS']['loaders'] = [
(
'django.template.loaders.cached.Loader',
TEMPLATES_DJANGO['OPTIONS']['loaders'],
),
]
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
# AWS_ACCESS_KEY_ID = ''
AWS_STORAGE_BUCKET_NAME = SITE_SLUG
ENABLE_S3_MEDIA = True
|
from ._base import *
SITE_PUBLIC_PORT = None # Default: SITE_PORT
# DJANGO ######################################################################
CACHES['default'].update({
# 'BACKEND': 'django_redis.cache.RedisCache',
'BACKEND': 'redis_lock.django_cache.RedisCache',
'LOCATION': 'redis://redis:6379/1',
})
# EMAIL_HOST = ''
# EMAIL_HOST_USER = ''
LOGGING['handlers']['logfile']['backupCount'] = 100
# CELERY EMAIL ################################################################
CELERY_EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# STORAGES ####################################################################
# AWS_ACCESS_KEY_ID = ''
AWS_STORAGE_BUCKET_NAME = SITE_SLUG
ENABLE_S3_MEDIA = True
|
Disable cache middleware and template loader by default. Avoid premature optimisation.
|
Disable cache middleware and template loader by default. Avoid premature optimisation.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
a244623642cdf26bd6615cdc7ff2540c9361d10d
|
tmapi/models/typed.py
|
tmapi/models/typed.py
|
from django.db import models
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
self.type = construct_type
self.save()
|
from django.db import models
from tmapi.exceptions import ModelConstraintException
from construct import Construct
class Typed (Construct, models.Model):
"""Indicates that a Topic Maps construct is typed. `Association`s,
`Role`s, `Occurrence`s, and `Name`s are typed."""
type = models.ForeignKey('Topic', related_name='typed_%(class)ss')
class Meta:
abstract = True
app_label = 'tmapi'
def get_type (self):
"""Returns the type of this construct.
:rtype: the `Topic` that represents the type
"""
return self.type
def set_type (self, construct_type):
"""Sets the type of this construct. Any previous type is overridden.
:param construct_type: the `Topic` that should define the
nature of this construct
"""
if construct_type is None:
raise ModelConstraintException
self.type = construct_type
self.save()
|
Raise an exception when setting a construct's type to None.
|
Raise an exception when setting a construct's type to None.
|
Python
|
apache-2.0
|
ajenhl/django-tmapi
|
a4931218fbb00d646dbc8de6f8861e4647ef7ab4
|
lib/rapidsms/tests/test_backend_irc.py
|
lib/rapidsms/tests/test_backend_irc.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
|
Rename test class (sloppy cut n' paste job)
|
Rename test class (sloppy cut n' paste job)
|
Python
|
bsd-3-clause
|
catalpainternational/rapidsms,caktus/rapidsms,ken-muturi/rapidsms,rapidsms/rapidsms-core-dev,catalpainternational/rapidsms,caktus/rapidsms,lsgunth/rapidsms,peterayeni/rapidsms,rapidsms/rapidsms-core-dev,lsgunth/rapidsms,dimagi/rapidsms,ken-muturi/rapidsms,peterayeni/rapidsms,peterayeni/rapidsms,lsgunth/rapidsms,ehealthafrica-ci/rapidsms,peterayeni/rapidsms,unicefuganda/edtrac,ehealthafrica-ci/rapidsms,dimagi/rapidsms-core-dev,unicefuganda/edtrac,ehealthafrica-ci/rapidsms,ken-muturi/rapidsms,unicefuganda/edtrac,dimagi/rapidsms-core-dev,eHealthAfrica/rapidsms,dimagi/rapidsms,catalpainternational/rapidsms,caktus/rapidsms,eHealthAfrica/rapidsms,catalpainternational/rapidsms,eHealthAfrica/rapidsms,lsgunth/rapidsms
|
865651b0d23274d0dcbd9e3123ea9497a06172cf
|
docker_scripts/lib/common.py
|
docker_scripts/lib/common.py
|
# -*- coding: utf-8 -*-
import docker
import os
import sys
import requests
DEFAULT_TIMEOUT_SECONDS = 600
def docker_client():
# Default timeout 10 minutes
try:
timeout = int(os.getenv('DOCKER_TIMEOUT', 600))
except ValueError as e:
print("Provided timeout value: %s cannot be parsed as integer, exiting." %
os.getenv('DOCKER_TIMEOUT'))
sys.exit(1)
if not timeout > 0:
print(
"Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout)
sys.exit(1)
# Default base url for the connection
base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock')
try:
client = docker.Client(base_url=base_url, timeout=timeout)
except docker.errors.DockerException as e:
print("Error while creating the Docker client: %s" % e)
print(
"Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
if client and valid_docker_connection(client):
return client
else:
print(
"Could not connect to the Docker daemon, please make sure the Docker daemon is running.")
if os.environ.get('DOCKER_CONNECTION'):
print(
"If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
def valid_docker_connection(client):
try:
return client.ping()
except requests.exceptions.ConnectionError:
return False
|
# -*- coding: utf-8 -*-
import docker
import os
import sys
import requests
DEFAULT_TIMEOUT_SECONDS = 600
def docker_client():
# Default timeout 10 minutes
try:
timeout = int(os.getenv('DOCKER_TIMEOUT', 600))
except ValueError as e:
print("Provided timeout value: %s cannot be parsed as integer, exiting." %
os.getenv('DOCKER_TIMEOUT'))
sys.exit(1)
if not timeout > 0:
print(
"Provided timeout value needs to be greater than zero, currently: %s, exiting." % timeout)
sys.exit(1)
# Default base url for the connection
base_url = os.getenv('DOCKER_CONNECTION', 'unix://var/run/docker.sock')
try:
client = docker.AutoVersionClient(base_url=base_url, timeout=timeout)
except docker.errors.DockerException as e:
print("Error while creating the Docker client: %s" % e)
print(
"Please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
if client and valid_docker_connection(client):
return client
else:
print(
"Could not connect to the Docker daemon, please make sure the Docker daemon is running.")
if os.environ.get('DOCKER_CONNECTION'):
print(
"If Docker daemon is running, please make sure that you specified valid parameters in the 'DOCKER_CONNECTION' environment variable.")
sys.exit(1)
def valid_docker_connection(client):
try:
return client.ping()
except requests.exceptions.ConnectionError:
return False
|
Use AutoVersionClient to fix client incompatibity issues
|
Use AutoVersionClient to fix client incompatibity issues
Fixes #35
|
Python
|
mit
|
lichia/docker-scripts,jpopelka/docker-scripts,goldmann/docker-scripts,goldmann/docker-squash,TomasTomecek/docker-scripts
|
b888e6e6fac1a8dca0c8b64134de0380f9c5096b
|
mopidy_beets/__init__.py
|
mopidy_beets/__init__.py
|
from __future__ import unicode_literals
import os
from mopidy import ext, config
from mopidy.exceptions import ExtensionError
__version__ = '1.0.4'
class BeetsExtension(ext.Extension):
dist_name = 'Mopidy-Beets'
ext_name = 'beets'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(BeetsExtension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
return schema
def validate_environment(self):
try:
import requests # noqa
except ImportError as e:
raise ExtensionError('Library requests not found', e)
def get_backend_classes(self):
from .actor import BeetsBackend
return [BeetsBackend]
|
from __future__ import unicode_literals
import os
from mopidy import ext, config
__version__ = '1.0.4'
class BeetsExtension(ext.Extension):
dist_name = 'Mopidy-Beets'
ext_name = 'beets'
version = __version__
def get_default_config(self):
conf_file = os.path.join(os.path.dirname(__file__), 'ext.conf')
return config.read(conf_file)
def get_config_schema(self):
schema = super(BeetsExtension, self).get_config_schema()
schema['hostname'] = config.Hostname()
schema['port'] = config.Port()
return schema
def get_backend_classes(self):
from .actor import BeetsBackend
return [BeetsBackend]
|
Remove env check as Mopidy checks deps automatically
|
ext: Remove env check as Mopidy checks deps automatically
|
Python
|
mit
|
mopidy/mopidy-beets
|
9d7b39708a2abd953aedf6c211fe242d86caed78
|
pymacaroons/field_encryptors/secret_box_encryptor.py
|
pymacaroons/field_encryptors/secret_box_encryptor.py
|
from base64 import standard_b64encode, standard_b64decode
import nacl.bindings
import nacl.utils
from nacl.secret import SecretBox
from pymacaroons.field_encryptors.base_field_encryptor import (
BaseFieldEncryptor
)
from pymacaroons.utils import (
truncate_or_pad, convert_to_bytes, convert_to_string
)
class SecretBoxEncryptor(BaseFieldEncryptor):
def __init__(self, signifier=None, nonce=None):
super(SecretBoxEncryptor, self).__init__(
signifier=signifier or 'sbe::'
)
self.nonce = (nonce or
nacl.utils.random(nacl.bindings.crypto_secretbox_NONCEBYTES))
def encrypt(self, signature, field_data):
encrypt_key = truncate_or_pad(signature)
box = SecretBox(key=encrypt_key)
encrypted = box.encrypt(convert_to_bytes(field_data), nonce=self.nonce)
return self._signifier + standard_b64encode(encrypted)
def decrypt(self, signature, field_data):
key = truncate_or_pad(signature)
box = SecretBox(key=key)
encoded = convert_to_bytes(field_data[len(self.signifier):])
decrypted = box.decrypt(standard_b64decode(encoded))
return convert_to_string(decrypted)
|
from base64 import standard_b64encode, standard_b64decode
import nacl.bindings
import nacl.utils
from nacl.secret import SecretBox
from pymacaroons.field_encryptors.base_field_encryptor import (
BaseFieldEncryptor
)
from pymacaroons.utils import (
truncate_or_pad, convert_to_bytes, convert_to_string
)
class SecretBoxEncryptor(BaseFieldEncryptor):
def __init__(self, signifier=None, nonce=None):
super(SecretBoxEncryptor, self).__init__(
signifier=signifier or 'sbe::'
)
self.nonce = nonce or nacl.utils.random(
nacl.bindings.crypto_secretbox_NONCEBYTES
)
def encrypt(self, signature, field_data):
encrypt_key = truncate_or_pad(signature)
box = SecretBox(key=encrypt_key)
encrypted = box.encrypt(convert_to_bytes(field_data), nonce=self.nonce)
return self._signifier + standard_b64encode(encrypted)
def decrypt(self, signature, field_data):
key = truncate_or_pad(signature)
box = SecretBox(key=key)
encoded = convert_to_bytes(field_data[len(self.signifier):])
decrypted = box.decrypt(standard_b64decode(encoded))
return convert_to_string(decrypted)
|
Break how flake8 wants me to break
|
Break how flake8 wants me to break
|
Python
|
mit
|
matrix-org/pymacaroons,matrix-org/pymacaroons
|
158987eebbcd2d58270cf55cc42aa6e2e5738390
|
pathvalidate/__init__.py
|
pathvalidate/__init__.py
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <gogogo.vm@gmail.com>
"""
from __future__ import absolute_import
from ._error import NullNameError
from ._error import InvalidCharError
from ._common import _validate_null_string
from ._app import validate_excel_sheet_name
from ._app import sanitize_excel_sheet_name
from ._file import validate_filename
from ._file import validate_file_path
from ._file import sanitize_filename
from ._file import sanitize_file_path
from ._symbol import replace_symbol
from ._var_name import validate_python_var_name
from ._var_name import sanitize_python_var_name
|
Add imports for error classes
|
Add imports for error classes
|
Python
|
mit
|
thombashi/pathvalidate
|
b916f1e3ad294c780c782e93222f018fa57ee981
|
apps/bluebottle_utils/models.py
|
apps/bluebottle_utils/models.py
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField()
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
from django.db import models
from django_countries import CountryField
class Address(models.Model):
"""
A postal address.
"""
address_line1 = models.CharField(max_length=100, blank=True)
address_line2 = models.CharField(max_length=100, blank=True)
city = models.CharField(max_length=100, blank=True)
state = models.CharField(max_length=100, blank=True)
country = CountryField(blank=True)
zip_code = models.CharField(max_length=20, blank=True)
def __unicode__(self):
return self.address_line1[:80]
class Meta:
abstract = True
|
Allow country to be empty in Address model.
|
Allow country to be empty in Address model.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
bc8e064e41d43a4579c8111f1480b55e660ca186
|
pep8ify/fixes/fix_tabs.py
|
pep8ify/fixes/fix_tabs.py
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
SPACES = ' ' * 4
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
from __future__ import unicode_literals
from lib2to3.fixer_base import BaseFix
from lib2to3.pytree import Leaf
from .utils import SPACES
class FixTabs(BaseFix):
'''
For new projects, spaces-only are strongly recommended over tabs. Most
editors have features that make this easy to do.
'''
def match(self, node):
if node.prefix.count('\t') or (isinstance(node, Leaf)
and node.value.count('\t')):
return True
return False
def transform(self, node, results):
new_prefix = node.prefix.replace('\t', SPACES)
new_value = node.value.replace('\t', SPACES)
if node.prefix != new_prefix or node.value != new_value:
node.prefix = new_prefix
node.value = new_value
node.changed()
|
Use globally define number of spaces.
|
Clean-up: Use globally define number of spaces.
|
Python
|
apache-2.0
|
spulec/pep8ify
|
64533459ffa6c33d4708bbc6ff0de9f295ef771d
|
permissions/tests/base.py
|
permissions/tests/base.py
|
from django.test import TestCase as BaseTestCase
from django.test import RequestFactory
from permissions import PermissionsRegistry as BasePermissionsRegistry
class PermissionsRegistry(BasePermissionsRegistry):
def _get_model_instance(self, model, **kwargs):
return model(**kwargs)
class Model:
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TestCase(BaseTestCase):
def setUp(self):
self.registry = PermissionsRegistry()
self.request_factory = RequestFactory()
|
from django.test import TestCase as BaseTestCase
from django.test import RequestFactory
from permissions import PermissionsRegistry as BasePermissionsRegistry
class PermissionsRegistry(BasePermissionsRegistry):
def _get_model_instance(self, model, **kwargs):
return model(**kwargs)
class Model:
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class User(Model):
def __init__(self, **kwargs):
kwargs.setdefault('permissions', [])
super(User, self).__init__(**kwargs)
def is_anonymous(self):
return False
class AnonymousUser(User):
def is_anonymous(self):
return True
class TestCase(BaseTestCase):
def setUp(self):
self.registry = PermissionsRegistry()
self.request_factory = RequestFactory()
|
Add mock User and Anonymous user classes for testing
|
Add mock User and Anonymous user classes for testing
|
Python
|
mit
|
PSU-OIT-ARC/django-perms,wylee/django-perms
|
80264289fa7fc4085069df011cd8499c64767dc5
|
elang/tools/pre-commit.py
|
elang/tools/pre-commit.py
|
#
# Pre-commit check
#
import pipes
import sys
def main():
git_pipe = pipes.Template()
git_pipe.prepend('git diff --cached --name-status', '.-')
diff_output = git_pipe.open('files', 'r')
lines = diff_output.readlines()
exit_code = 0
for line in lines:
line = line.rstrip();
if len(line) == 0:
continue
words = line.split()
if words[0] == 'D':
continue
cpplint_pipe = pipes.Template()
command_line = 'cpplint %(name)s 2>&1' % {'name': words[1]}
cpplint_pipe.prepend(command_line, '.-');
outputs = cpplint_pipe.open('files', 'r').readlines()
if outputs[len(outputs) - 1] == 'Total errors found: 0\n':
continue
exit_code = 1
for output in outputs:
output = output.rstrip()
print output
diff_output.close()
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
#
# Pre-commit check
#
import pipes
import sys
EXCLUDE_DIRS = [
'base/',
'build/',
'testing/',
'third_party/',
]
def shouldCheck(path):
for dir in EXCLUDE_DIRS:
if path.startswith(dir):
return False
return True
def main():
git_pipe = pipes.Template()
git_pipe.prepend('git diff --cached --name-status', '.-')
diff_output = git_pipe.open('files', 'r')
lines = diff_output.readlines()
exit_code = 0
for line in lines:
line = line.rstrip();
if len(line) == 0:
continue
words = line.split()
if words[0] == 'D':
continue
cpplint_pipe = pipes.Template()
if not shouldCheck(words[1]):
continue
command_line = 'cpplint %(name)s 2>&1' % {'name': words[1]}
cpplint_pipe.prepend(command_line, '.-');
outputs = cpplint_pipe.open('files', 'r').readlines()
if outputs[len(outputs) - 1] == 'Total errors found: 0\n':
continue
exit_code = 1
for output in outputs:
output = output.rstrip()
print output
diff_output.close()
sys.exit(exit_code)
if __name__ == '__main__':
main()
|
Exclude base/, build/, third_party/ and tools/ from cpplint.
|
tools: Exclude base/, build/, third_party/ and tools/ from cpplint.
|
Python
|
apache-2.0
|
eval1749/elang,eval1749/elang,eval1749/elang,eval1749/elang,eval1749/elang
|
cefaa6c8f0fd3c26be2bf6fba75d01b2f5095a34
|
strapmin/widgets.py
|
strapmin/widgets.py
|
from django import forms
from django.forms.util import flatatt
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
class RichTextEditorWidget(forms.Textarea):
class Media:
js = ('admin/js/ckeditor/ckeditor.js',
'admin/js/ckeditor/jquery-ckeditor.js')
def render(self, name, value, attrs={}):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('ckeditor/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': force_text(value),
'id': final_attrs['id'],
}))
|
from django import forms
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
try:
from django.forms.utils import flatatt
except ImportError:
from django.forms.util import flatatt
class RichTextEditorWidget(forms.Textarea):
class Media:
js = ('admin/js/ckeditor/ckeditor.js',
'admin/js/ckeditor/jquery-ckeditor.js')
def render(self, name, value, attrs={}):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('ckeditor/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': force_text(value),
'id': final_attrs['id'],
}))
|
Fix flatatt import path for Django 1.9
|
Fix flatatt import path for Django 1.9
|
Python
|
bsd-2-clause
|
knyghty/strapmin,knyghty/strapmin,knyghty/strapmin
|
61909686132143db127deb31d582e44f5b198729
|
project/gym_app/admin.py
|
project/gym_app/admin.py
|
from django.contrib import admin
from gym_app.models import Task, Athlete, PersonalTrainer, BodyScreening, WorkoutPlan, Tracker, MailBox, Message
from django.contrib.auth.models import Permission
# Register your models here.
admin.site.register(Task)
admin.site.register(Permission)
admin.site.register(Athlete)
admin.site.register(PersonalTrainer)
admin.site.register(BodyScreening)
admin.site.register(WorkoutPlan)
admin.site.register(Tracker)
admin.site.register(MailBox)
admin.site.register(Message)
|
from django.contrib import admin
from gym_app.models import Task
from django.contrib.auth.models import Permission
# Register your models here.
admin.site.register(Task)
admin.site.register(Permission)
|
Remove permission to edit tables
|
Remove permission to edit tables
|
Python
|
mit
|
brunoliveira8/managyment,brunoliveira8/managyment,brunoliveira8/managyment
|
752cc60f8c41d098ff8772ed400ac3ae209d9e0f
|
lib/exp/featx/__init__.py
|
lib/exp/featx/__init__.py
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.prepare import Prepare
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
pp = Prepare(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(pp.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load("s_{:03d}_kps".format(sid))
sd = self.load("s_{:03d}_des".format(sid))
fk = self.load("f_{:03d}_kps".format(fid))
fd = self.load("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
__all__ = []
from lib.exp.featx.base import Feats
from lib.exp.tools.slider import Slider
from lib.exp.tools.video import Video
from lib.exp.pre import Reducer
class Featx(Feats):
def __init__(self, root, name):
Feats.__init__(self, root, name)
def get_slide_feats(self):
ss = Slider(self.root, self.name)
imgl = ss.get_slides(None, gray=True, resize=True)
self.feats(imgl, prefix="s")
def get_frame_feats(self):
rr = Reducer(self.root, self.name)
vv = Video(self.root, self.name)
imgl = vv.get_frames(rr.frame_ids(), gray=True)
self.feats(imgl, prefix="f")
def get_feats_pair(self, sid, fid):
"""
Get features by given `slide`, `frame` pairs
"""
sk = self.load("s_{:03d}_kps".format(sid))
sd = self.load("s_{:03d}_des".format(sid))
fk = self.load("f_{:03d}_kps".format(fid))
fd = self.load("f_{:03d}_des".format(fid))
return dict(sk=sk, sd=sd, fk=fk, fd=fd)
|
Use `reducer` to replace `prepare`
|
Use `reducer` to replace `prepare`
|
Python
|
agpl-3.0
|
speed-of-light/pyslider
|
159e1e210480c0037b3a550e70b77dbfce34bbca
|
ptyme/ptyme.py
|
ptyme/ptyme.py
|
#!/bin/env python3
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
print(time)
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
house = 0
time = time[0].split('m')
print(time)
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
time = time[0].split('s')
if time:
seconds = time[0]
else:
print("commands go here")
if __name__ == "__main__":
main()
|
#!/bin/env python3
from sys import argv
def main():
parseArgs()
print("Nope.")
print(argv)
# go through via characters
def parseArgsChar():
pass()
# while this works, it only works when _h_m_s format
# might want to not do that
def parseArgs():
if len(argv) > 1:
time = argv[1].split('h')
if 'm' not in time[0] and 'n' not in time[0]:
hours = time[0]
else:
hours = 0
print(time)
print(hours)
time = time[1].split('m')
if 's' not in time[0]:
minutes = time[0]
else:
minutes = 0
print(time)
print(minutes)
time = time[1].split('s')
if time:
seconds = time[0]
else:
seconds = 0
print(time)
print(seconds)
else:
print("commands go here")
if __name__ == "__main__":
main()
|
Update current parse, add alt parse frame
|
Update current parse, add alt parse frame
|
Python
|
mit
|
jabocg/ptyme
|
dff5a8650c5d7ed5b5bab12b36ac5d61541dbb4e
|
python/day3.py
|
python/day3.py
|
import sys
def read_sides(line):
return map(int, line.split())
def valid_triangle((a, b, c)):
return a + b > c and b + c > a and a + c > b
if __name__ == '__main__':
print len(filter(valid_triangle, map(read_sides, sys.stdin)))
|
import sys
import itertools
def grouper(iterable, n, fillvalue=None):
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
def transpose(xs):
return zip(*xs)
def read_sides(line):
return map(int, line.split())
def valid_triangle((a, b, c)):
return a + b > c and b + c > a and a + c > b
if __name__ == '__main__':
print len(filter(valid_triangle,
grouper(
itertools.chain.from_iterable(
transpose(map(read_sides, sys.stdin))), 3)))
|
Implement part 2 of day 3 Python solution.
|
Implement part 2 of day 3 Python solution.
|
Python
|
mit
|
jonathanj/advent2016
|
65f0ef0ae523059db6ecf3856c9e2695cad81d38
|
montage/__init__.py
|
montage/__init__.py
|
from commands import *
from wrappers import *
__version__ = '0.9.1'
|
from commands import *
from wrappers import *
__version__ = '0.9.1'
# Check whether Montage is installed
installed = False
for dir in os.environ['PATH'].split(':'):
if os.path.exists(dir + '/mProject'):
installed = True
break
if not installed:
raise Exception("Montage commands are not in your PATH")
|
Check whether Montage commands are available
|
Check whether Montage commands are available
|
Python
|
bsd-3-clause
|
astrofrog/montage-wrapper,astrofrog/python-montage,astrofrog/montage-wrapper,jat255/montage-wrapper,vterron/montage-wrapper,astropy/montage-wrapper
|
a2d77c167ea8ae3a62183a56b10cd121dc476481
|
openfisca_france/conf/cache_blacklist.py
|
openfisca_france/conf/cache_blacklist.py
|
# When using openfisca for a large population, having too many variables in cache make openfisca performances drop.
# The following variables are intermadiate results and do not need to be cached in those usecases.
cache_blacklist = set([
'aide_logement_loyer_retenu',
'aide_logement_charges',
'aide_logement_R0',
'aide_logement_taux_famille',
'aide_logement_taux_loyer',
'aide_logement_participation_personelle'
])
|
# When using openfisca for a large population, having too many variables in cache make openfisca performances drop.
# The following variables are intermadiate results and do not need to be cached in those usecases.
cache_blacklist = set([
'aide_logement_loyer_retenu',
'aide_logement_charges',
'aide_logement_R0',
'aide_logement_taux_famille',
'aide_logement_taux_loyer',
'aide_logement_participation_personelle',
'aide_logement_loyer_seuil_degressivite',
'aide_logement_loyer_seuil_suppression',
'aide_logement_montant_brut_avant_degressivite',
])
|
Add intermediary variables in cache blacklist
|
Add intermediary variables in cache blacklist
|
Python
|
agpl-3.0
|
antoinearnoud/openfisca-france,antoinearnoud/openfisca-france,sgmap/openfisca-france,sgmap/openfisca-france
|
f34330817414f72494aec359c169e5d6d9d1568f
|
examples/quotes/quotes.py
|
examples/quotes/quotes.py
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
my_name = input("Name: ")
nw0.advertise(my_name)
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
|
import sys
print(sys.version_info)
import random
import time
import networkzero as nw0
quotes = [
"Humpty Dumpty sat on a wall",
"Hickory Dickory Dock",
"Baa Baa Black Sheep",
"Old King Cole was a merry old sould",
]
def main(address_pattern=None):
my_name = input("Name: ")
nw0.advertise(my_name, address_pattern)
while True:
services = [(name, address) for (name, address) in nw0.discover_all() if name != my_name]
for name, address in services:
topic, message = nw0.wait_for_notification(address, "quote", wait_for_s=0)
if topic:
print("%s says: %s" % (name, message))
quote = random.choice(quotes)
nw0.send_notification(address, "quote", quote)
time.sleep(0.5)
if __name__ == '__main__':
main(*sys.argv[1:])
|
Allow for a wildcard address
|
Allow for a wildcard address
|
Python
|
mit
|
tjguk/networkzero,tjguk/networkzero,tjguk/networkzero
|
6fe391b2e2f9b88a6835a6636a5d58810852ab5e
|
pyhole/tests/test_log.py
|
pyhole/tests/test_log.py
|
# Copyright 2011-2016 Josh Kearney
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger(name="test")
except SystemExit:
logger.setup_logger(name="test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
# Copyright 2011-2016 Josh Kearney
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pyhole Log Unit Tests"""
import os
import unittest
from pyhole.core import logger
from pyhole.core import utils
class TestLogger(unittest.TestCase):
def test_logger(self):
test_log_dir = utils.get_home_directory() + "logs/"
try:
# NOTE(jk0): If the configuration file doesn't exist, the config
# class will generate it and raise a SystemExit.
logger.setup_logger("test")
except SystemExit:
logger.setup_logger("test")
test_log = logger.get_logger("TEST")
self.assertEqual("TEST", test_log.name)
self.assertEqual(test_log.level, 0)
os.unlink(test_log_dir + "test.log")
|
Use setup_logger properly in tests.
|
Use setup_logger properly in tests.
|
Python
|
apache-2.0
|
jk0/pyhole,jk0/pyhole,jk0/pyhole
|
f868a9181d659c2440a50e6e325ad2ae5b99f5c8
|
project_recalculate/models/resource_calendar.py
|
project_recalculate/models/resource_calendar.py
|
# -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
|
# -*- coding: utf-8 -*-
# See README.rst file on addon root folder for license details
from openerp import models, api
from datetime import datetime, timedelta
class ResourceCalendar(models.Model):
_inherit = 'resource.calendar'
@api.v7
def get_working_days_of_date(self, cr, uid, id, start_dt=None, end_dt=None,
leaves=None, compute_leaves=False,
resource_id=None, default_interval=None,
context=None):
context = context or {}
context['tz'] = 'UTC'
if start_dt is None:
start_dt = datetime.now().replace(hour=0, minute=0, second=0)
if end_dt is None:
end_dt = datetime.now().replace(hour=23, minute=59, second=59)
days = 0
current = start_dt
while current <= end_dt:
if id is None:
days += 1
else:
end_day = current.replace(hour=23, minute=59, second=59)
end = end_dt if end_day > end_dt else end_day
working_intervals = self.get_working_intervals_of_day(
cr, uid, id, start_dt=current, end_dt=end, leaves=leaves,
compute_leaves=compute_leaves, resource_id=resource_id,
default_interval=default_interval, context=context)
if working_intervals:
days += 1
next = current + timedelta(days=1)
current = next
return days
|
Define UTC as tz in get_working_days_of_date method
|
[FIX] Define UTC as tz in get_working_days_of_date method
|
Python
|
agpl-3.0
|
Endika/project,NeovaHealth/project-service,OCA/project-service,Antiun/project,eezee-it/project-service,Antiun/project-service,acsone/project-service,dreispt/project-service,xpansa/project-service,ddico/project,acsone/project,akretion/project-service,sergiocorato/project-service,dreispt/project
|
1a830d0581f2baed76cb48eeee5f32d465737657
|
src/artgraph/plugins/infobox.py
|
src/artgraph/plugins/infobox.py
|
from artgraph.node import NodeTypes
from artgraph.plugins import Plugin
class InfoboxPlugin(Plugin):
def __init__(self, node):
self._node = node
@staticmethod
def get_target_node_type():
return NodeTypes.ARTIST
def get_nodes(self):
from artgraph.node import Node, NodeTypes
from artgraph.relationship import AssociatedActRelationship
relationships = []
wikicode = self.get_wikicode(self._node.get_dbtitle())
if wikicode:
templates = wikicode.filter_templates()
for t in templates:
if t.name.matches('Infobox musical artist'):
# Fill in current node info
if t.has('birth_name'):
name = str(t.get('birth_name').value)
db = self.get_artistgraph_connection()
cursor = db.cursor()
cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id()))
db.commit()
db.close()
associated_acts = t.get('associated_acts')
for w in associated_acts.value.filter_wikilinks():
relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST)))
return relationships
|
from artgraph.node import NodeTypes
from artgraph.plugins import Plugin
class InfoboxPlugin(Plugin):
def __init__(self, node):
self._node = node
@staticmethod
def get_target_node_type():
return NodeTypes.ARTIST
def get_nodes(self):
from artgraph.node import Node, NodeTypes
from artgraph.relationship import AssociatedActRelationship
relationships = []
wikicode = self.get_wikicode(self._node.get_dbtitle())
if wikicode:
templates = wikicode.filter_templates()
for t in templates:
if t.name.matches('Infobox musical artist'):
# Fill in current node info
if t.has('birth_name'):
name = str(t.get('birth_name').value)
db = self.get_artistgraph_connection()
cursor = db.cursor()
cursor.execute("UPDATE artist SET name = %s WHERE artistID = %s", (name, self._node.get_id()))
db.commit()
db.close()
if not t.has('associated_acts'):
continue
associated_acts = t.get('associated_acts')
for w in associated_acts.value.filter_wikilinks():
relationships.append(AssociatedActRelationship(self._node, Node(str(w.title), NodeTypes.ARTIST)))
return relationships
|
Check if there are associated acts before querying for them
|
Check if there are associated acts before querying for them
|
Python
|
mit
|
dMaggot/ArtistGraph
|
a5441719e8f12cc16189b2ca110c878decfed120
|
airflow/operators/mysql_operator.py
|
airflow/operators/mysql_operator.py
|
import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
*args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(self.sql, parameters=self.parameters)
|
import logging
from airflow.hooks import MySqlHook
from airflow.models import BaseOperator
from airflow.utils import apply_defaults
class MySqlOperator(BaseOperator):
"""
Executes sql code in a specific MySQL database
:param mysql_conn_id: reference to a specific mysql database
:type mysql_conn_id: string
:param sql: the sql code to be executed
:type sql: Can receive a str representing a sql statement,
a list of str (sql statements), or reference to a template file.
Template reference are recognized by str ending in '.sql'
"""
template_fields = ('sql',)
template_ext = ('.sql',)
ui_color = '#ededed'
@apply_defaults
def __init__(
self, sql, mysql_conn_id='mysql_default', parameters=None,
autocommit=False, *args, **kwargs):
super(MySqlOperator, self).__init__(*args, **kwargs)
self.mysql_conn_id = mysql_conn_id
self.sql = sql
self.autocommit = autocommit
self.parameters = parameters
def execute(self, context):
logging.info('Executing: ' + str(self.sql))
hook = MySqlHook(mysql_conn_id=self.mysql_conn_id)
hook.run(
self.sql,
autocommit=self.autocommit,
parameters=self.parameters)
|
Allow auto-commit option for Mysql Operator
|
Allow auto-commit option for Mysql Operator
|
Python
|
apache-2.0
|
jlowin/airflow,lxneng/incubator-airflow,cjqian/incubator-airflow,dhuang/incubator-airflow,gritlogic/incubator-airflow,forevernull/incubator-airflow,modsy/incubator-airflow,jesusfcr/airflow,ronfung/incubator-airflow,nathanielvarona/airflow,holygits/incubator-airflow,akosel/incubator-airflow,wxiang7/airflow,fenglu-g/incubator-airflow,NielsZeilemaker/incubator-airflow,mtagle/airflow,gtoonstra/airflow,holygits/incubator-airflow,ronfung/incubator-airflow,sekikn/incubator-airflow,subodhchhabra/airflow,wxiang7/airflow,malmiron/incubator-airflow,Twistbioscience/incubator-airflow,jbhsieh/incubator-airflow,NielsZeilemaker/incubator-airflow,zack3241/incubator-airflow,Tagar/incubator-airflow,jfantom/incubator-airflow,modsy/incubator-airflow,juvoinc/airflow,saguziel/incubator-airflow,cademarkegard/airflow,d-lee/airflow,danielvdende/incubator-airflow,Acehaidrey/incubator-airflow,artwr/airflow,artwr/airflow,vijaysbhat/incubator-airflow,sid88in/incubator-airflow,lxneng/incubator-airflow,Twistbioscience/incubator-airflow,wndhydrnt/airflow,lxneng/incubator-airflow,wileeam/airflow,zack3241/incubator-airflow,holygits/incubator-airflow,jwi078/incubator-airflow,aminghadersohi/airflow,gilt/incubator-airflow,ronfung/incubator-airflow,cfei18/incubator-airflow,MetrodataTeam/incubator-airflow,Twistbioscience/incubator-airflow,cademarkegard/airflow,airbnb/airflow,mattuuh7/incubator-airflow,dgies/incubator-airflow,airbnb/airflow,alexvanboxel/airflow,gilt/incubator-airflow,hamedhsn/incubator-airflow,mtdewulf/incubator-airflow,KL-WLCR/incubator-airflow,plypaul/airflow,skudriashev/incubator-airflow,wolfier/incubator-airflow,mylons/incubator-airflow,mattuuh7/incubator-airflow,DEVELByte/incubator-airflow,dud225/incubator-airflow,DEVELByte/incubator-airflow,dhuang/incubator-airflow,griffinqiu/airflow,stverhae/incubator-airflow,zodiac/incubator-airflow,dmitry-r/incubator-airflow,andyxhadji/incubator-airflow,nathanielvarona/airflow,r39132/airflow,andrewmchen/incubator-airflow,andyxhadji/incubator-airflow,Fokko/incubator-airflow,alexvanboxel/airflow,jiwang576/incubator-airflow,DinoCow/airflow,bolkedebruin/airflow,easytaxibr/airflow,hamedhsn/incubator-airflow,vijaysbhat/incubator-airflow,jhsenjaliya/incubator-airflow,MortalViews/incubator-airflow,owlabs/incubator-airflow,dud225/incubator-airflow,dud225/incubator-airflow,caseyching/incubator-airflow,N3da/incubator-airflow,aminghadersohi/airflow,asnir/airflow,OpringaoDoTurno/airflow,mrares/incubator-airflow,jfantom/incubator-airflow,yiqingj/airflow,mrares/incubator-airflow,kerzhner/airflow,mrkm4ntr/incubator-airflow,jiwang576/incubator-airflow,bolkedebruin/airflow,wndhydrnt/airflow,dud225/incubator-airflow,ronfung/incubator-airflow,spektom/incubator-airflow,ty707/airflow,AllisonWang/incubator-airflow,gilt/incubator-airflow,caseyching/incubator-airflow,ty707/airflow,Chedi/airflow,yiqingj/airflow,wolfier/incubator-airflow,vineet-rh/incubator-airflow,hgrif/incubator-airflow,aminghadersohi/airflow,zoyahav/incubator-airflow,criccomini/airflow,jesusfcr/airflow,mylons/incubator-airflow,mrares/incubator-airflow,preete-dixit-ck/incubator-airflow,biln/airflow,OpringaoDoTurno/airflow,DinoCow/airflow,dgies/incubator-airflow,plypaul/airflow,jlowin/airflow,neovintage/airflow,edgarRd/incubator-airflow,skudriashev/incubator-airflow,nathanielvarona/airflow,lyft/incubator-airflow,sdiazb/airflow,DEVELByte/incubator-airflow,CloverHealth/airflow,moritzpein/airflow,saguziel/incubator-airflow,modsy/incubator-airflow,andrewmchen/incubator-airflow,wileeam/airflow,owlabs/incubator-airflow,rishibarve/incubator-airflow,dmitry-r/incubator-airflow,Fokko/incubator-airflow,yiqingj/airflow,wooga/airflow,forevernull/incubator-airflow,sdiazb/airflow,wndhydrnt/airflow,MetrodataTeam/incubator-airflow,artwr/airflow,ledsusop/airflow,sergiohgz/incubator-airflow,mtustin-handy/airflow,sid88in/incubator-airflow,cfei18/incubator-airflow,zodiac/incubator-airflow,cfei18/incubator-airflow,Acehaidrey/incubator-airflow,edgarRd/incubator-airflow,zoyahav/incubator-airflow,skudriashev/incubator-airflow,dmitry-r/incubator-airflow,malmiron/incubator-airflow,Fokko/incubator-airflow,mrares/incubator-airflow,sekikn/incubator-airflow,btallman/incubator-airflow,N3da/incubator-airflow,dgies/incubator-airflow,easytaxibr/airflow,RealImpactAnalytics/airflow,andyxhadji/incubator-airflow,lyft/incubator-airflow,cfei18/incubator-airflow,nathanielvarona/airflow,sdiazb/airflow,CloverHealth/airflow,mtdewulf/incubator-airflow,criccomini/airflow,saguziel/incubator-airflow,fenglu-g/incubator-airflow,asnir/airflow,apache/incubator-airflow,jbhsieh/incubator-airflow,Acehaidrey/incubator-airflow,yati-sagade/incubator-airflow,vineet-rh/incubator-airflow,yati-sagade/incubator-airflow,mrkm4ntr/incubator-airflow,adamhaney/airflow,brandsoulmates/incubator-airflow,kerzhner/airflow,mtagle/airflow,mistercrunch/airflow,adrpar/incubator-airflow,apache/incubator-airflow,zodiac/incubator-airflow,ProstoMaxim/incubator-airflow,NielsZeilemaker/incubator-airflow,stverhae/incubator-airflow,plypaul/airflow,DinoCow/airflow,yoziru-desu/airflow,Tagar/incubator-airflow,jiwang576/incubator-airflow,mistercrunch/airflow,jwi078/incubator-airflow,janczak10/incubator-airflow,Acehaidrey/incubator-airflow,aminghadersohi/airflow,hgrif/incubator-airflow,danielvdende/incubator-airflow,Twistbioscience/incubator-airflow,andyxhadji/incubator-airflow,moritzpein/airflow,mistercrunch/airflow,asnir/airflow,plypaul/airflow,d-lee/airflow,sergiohgz/incubator-airflow,adamhaney/airflow,Acehaidrey/incubator-airflow,yk5/incubator-airflow,sdiazb/airflow,dhuang/incubator-airflow,ledsusop/airflow,mtdewulf/incubator-airflow,malmiron/incubator-airflow,N3da/incubator-airflow,rishibarve/incubator-airflow,gritlogic/incubator-airflow,brandsoulmates/incubator-airflow,yiqingj/airflow,criccomini/airflow,vineet-rh/incubator-airflow,stverhae/incubator-airflow,jfantom/incubator-airflow,CloverHealth/airflow,subodhchhabra/airflow,wndhydrnt/airflow,wileeam/airflow,ProstoMaxim/incubator-airflow,btallman/incubator-airflow,MortalViews/incubator-airflow,mtustin-handy/airflow,biln/airflow,MortalViews/incubator-airflow,btallman/incubator-airflow,apache/incubator-airflow,wileeam/airflow,adrpar/incubator-airflow,hgrif/incubator-airflow,airbnb/airflow,r39132/airflow,brandsoulmates/incubator-airflow,neovintage/airflow,opensignal/airflow,gritlogic/incubator-airflow,apache/airflow,biln/airflow,gilt/incubator-airflow,N3da/incubator-airflow,biln/airflow,zack3241/incubator-airflow,danielvdende/incubator-airflow,cademarkegard/airflow,Acehaidrey/incubator-airflow,Tagar/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,yati-sagade/incubator-airflow,juvoinc/airflow,bolkedebruin/airflow,asnir/airflow,yk5/incubator-airflow,RealImpactAnalytics/airflow,KL-WLCR/incubator-airflow,danielvdende/incubator-airflow,fenglu-g/incubator-airflow,cfei18/incubator-airflow,mrkm4ntr/incubator-airflow,juvoinc/airflow,griffinqiu/airflow,wooga/airflow,OpringaoDoTurno/airflow,apache/airflow,yoziru-desu/airflow,mattuuh7/incubator-airflow,spektom/incubator-airflow,modsy/incubator-airflow,saguziel/incubator-airflow,NielsZeilemaker/incubator-airflow,AllisonWang/incubator-airflow,gtoonstra/airflow,mylons/incubator-airflow,Tagar/incubator-airflow,vijaysbhat/incubator-airflow,d-lee/airflow,jlowin/airflow,jbhsieh/incubator-airflow,nathanielvarona/airflow,andrewmchen/incubator-airflow,DinoCow/airflow,holygits/incubator-airflow,KL-WLCR/incubator-airflow,cjqian/incubator-airflow,opensignal/airflow,cjqian/incubator-airflow,criccomini/airflow,akosel/incubator-airflow,mattuuh7/incubator-airflow,MetrodataTeam/incubator-airflow,bolkedebruin/airflow,r39132/airflow,lyft/incubator-airflow,d-lee/airflow,apache/airflow,brandsoulmates/incubator-airflow,Fokko/incubator-airflow,mtustin-handy/airflow,sergiohgz/incubator-airflow,wxiang7/airflow,neovintage/airflow,jesusfcr/airflow,caseyching/incubator-airflow,edgarRd/incubator-airflow,lyft/incubator-airflow,griffinqiu/airflow,adrpar/incubator-airflow,ProstoMaxim/incubator-airflow,sekikn/incubator-airflow,gritlogic/incubator-airflow,sergiohgz/incubator-airflow,ProstoMaxim/incubator-airflow,adamhaney/airflow,hamedhsn/incubator-airflow,yoziru-desu/airflow,subodhchhabra/airflow,airbnb/airflow,gtoonstra/airflow,Chedi/airflow,sid88in/incubator-airflow,AllisonWang/incubator-airflow,vijaysbhat/incubator-airflow,jwi078/incubator-airflow,zoyahav/incubator-airflow,caseyching/incubator-airflow,jhsenjaliya/incubator-airflow,jesusfcr/airflow,cademarkegard/airflow,griffinqiu/airflow,preete-dixit-ck/incubator-airflow,danielvdende/incubator-airflow,CloverHealth/airflow,adamhaney/airflow,rishibarve/incubator-airflow,mtagle/airflow,juvoinc/airflow,skudriashev/incubator-airflow,OpringaoDoTurno/airflow,stverhae/incubator-airflow,jgao54/airflow,forevernull/incubator-airflow,RealImpactAnalytics/airflow,janczak10/incubator-airflow,preete-dixit-ck/incubator-airflow,btallman/incubator-airflow,andrewmchen/incubator-airflow,mtustin-handy/airflow,janczak10/incubator-airflow,jfantom/incubator-airflow,wooga/airflow,hamedhsn/incubator-airflow,cfei18/incubator-airflow,malmiron/incubator-airflow,dhuang/incubator-airflow,mtdewulf/incubator-airflow,yk5/incubator-airflow,jhsenjaliya/incubator-airflow,ty707/airflow,ty707/airflow,jiwang576/incubator-airflow,Chedi/airflow,danielvdende/incubator-airflow,nathanielvarona/airflow,hgrif/incubator-airflow,gtoonstra/airflow,wooga/airflow,DEVELByte/incubator-airflow,jwi078/incubator-airflow,bolkedebruin/airflow,wolfier/incubator-airflow,wolfier/incubator-airflow,dgies/incubator-airflow,yati-sagade/incubator-airflow,vineet-rh/incubator-airflow,sekikn/incubator-airflow,kerzhner/airflow,zack3241/incubator-airflow,RealImpactAnalytics/airflow,artwr/airflow,AllisonWang/incubator-airflow,jgao54/airflow,sid88in/incubator-airflow,jgao54/airflow,jbhsieh/incubator-airflow,dmitry-r/incubator-airflow,mrkm4ntr/incubator-airflow,alexvanboxel/airflow,jlowin/airflow,akosel/incubator-airflow,MortalViews/incubator-airflow,subodhchhabra/airflow,moritzpein/airflow,KL-WLCR/incubator-airflow,apache/airflow,apache/airflow,mtagle/airflow,easytaxibr/airflow,owlabs/incubator-airflow,mistercrunch/airflow,easytaxibr/airflow,ledsusop/airflow,janczak10/incubator-airflow,owlabs/incubator-airflow,apache/airflow,ledsusop/airflow,MetrodataTeam/incubator-airflow,zodiac/incubator-airflow,kerzhner/airflow,lxneng/incubator-airflow,adrpar/incubator-airflow,Chedi/airflow,opensignal/airflow,rishibarve/incubator-airflow,alexvanboxel/airflow,jhsenjaliya/incubator-airflow,wxiang7/airflow,yoziru-desu/airflow,apache/incubator-airflow,r39132/airflow,fenglu-g/incubator-airflow,mylons/incubator-airflow,spektom/incubator-airflow,akosel/incubator-airflow,zoyahav/incubator-airflow,jgao54/airflow,yk5/incubator-airflow,edgarRd/incubator-airflow,neovintage/airflow,spektom/incubator-airflow,moritzpein/airflow,preete-dixit-ck/incubator-airflow,forevernull/incubator-airflow
|
21d9b2f89a7eb9a6801a48c2586cc360e6be47c3
|
LTA_to_UVFITS.py
|
LTA_to_UVFITS.py
|
def lta_to_uvfits():
lta_files = glob.glob('*.lta*')
#flag_files = glob.glob('*.FLAGS*')
for i in range(len(lta_files)):
lta_file_name = lta_files[i]
uvfits_file_name = lta_file_name +'.UVFITS'
spam.convert_lta_to_uvfits( lta_file_name, uvfits_file_name )
|
def lta_to_uvfits():
lta_files = glob.glob('*.lta*')
#flag_files = glob.glob('*.FLAGS*')
for i in range(len(lta_files)):
lta_file_name = lta_files[i]
uvfits_file_name = lta_file_name +'.UVFITS'
spam.convert_lta_to_uvfits( lta_file_name, uvfits_file_name )
return lta_files
|
Return LTA files to use as argument in main thread code
|
Return LTA files to use as argument in main thread code
|
Python
|
mit
|
NCRA-TIFR/gadpu,NCRA-TIFR/gadpu
|
dee7b02d0cdd6969b4228086ab9af77ad1da60ef
|
asymmetric_jwt_auth/models.py
|
asymmetric_jwt_auth/models.py
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_pem_public_key, load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
is_valid = False
exc = None
for load in (load_pem_public_key, load_ssh_public_key):
if not is_valid:
try:
load(value.encode('utf-8'), default_backend())
is_valid = True
except Exception as e:
exc = e
if not is_valid:
raise ValidationError('Public key is invalid: %s' % exc)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
Allow PEM format keys through validation
|
Allow PEM format keys through validation
|
Python
|
isc
|
crgwbr/asymmetric_jwt_auth,crgwbr/asymmetric_jwt_auth
|
5aef0b64477248f6cdadfd864a6d05cbc6939f09
|
trex/serializers.py
|
trex/serializers.py
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import HyperlinkedModelSerializer
from trex.models.project import Project, Entry
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("name", "description", "active", "created", "entries")
class EntryDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Entry
fields = ("date", "duration", "description", "state", "user", "created")
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework.serializers import (
HyperlinkedModelSerializer, HyperlinkedIdentityField,
)
from trex.models.project import Project, Entry
class ProjectSerializer(HyperlinkedModelSerializer):
class Meta:
model = Project
fields = ("url", "name", "description", "active", "created")
class ProjectDetailSerializer(HyperlinkedModelSerializer):
entries = HyperlinkedIdentityField(view_name="project-entries-list")
class Meta:
model = Project
fields = ("name", "description", "active", "created", "entries")
class EntryDetailSerializer(HyperlinkedModelSerializer):
class Meta:
model = Entry
fields = ("date", "duration", "description", "state", "user", "created")
|
Use HyperlinkedIdentityField for entries in ProjectDetailSerializer
|
Use HyperlinkedIdentityField for entries in ProjectDetailSerializer
|
Python
|
mit
|
bjoernricks/trex,bjoernricks/trex
|
90ab0bfbac851a52f0e48f5186a727692e699a6f
|
geodj/youtube.py
|
geodj/youtube.py
|
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'viewCount'
query.racy = 'exclude'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'duration': int(entry.media.duration.seconds),
})
return results
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
if entry.rating is not None and float(entry.rating.average) < 3:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if artist.lower() not in entry.media.title.text.lower():
return False
return True
|
from gdata.youtube.service import YouTubeService, YouTubeVideoQuery
from django.utils.encoding import smart_str
class YoutubeMusic:
def __init__(self):
self.service = YouTubeService()
def search(self, artist):
query = YouTubeVideoQuery()
query.vq = artist
query.orderby = 'viewCount'
query.racy = 'exclude'
query.categories.append("/Music")
feed = self.service.YouTubeQuery(query)
results = []
for entry in feed.entry:
if not self.is_valid_entry(artist, entry):
continue
results.append({
'url': entry.media.player.url,
'title': smart_str(entry.media.title.text),
'duration': int(entry.media.duration.seconds),
})
return {'artist': smart_str(artist), 'results': results}
def is_valid_entry(self, artist, entry):
duration = int(entry.media.duration.seconds)
if entry.rating is not None and float(entry.rating.average) < 3:
return False
if duration < (2 * 60) or duration > (9 * 60):
return False
if smart_str(artist).lower() not in smart_str(entry.media.title.text).lower():
return False
return True
|
Use smart_str and include artist in results
|
Use smart_str and include artist in results
|
Python
|
mit
|
6/GeoDJ,6/GeoDJ
|
8815507f8e334238d269468e08c22f4415e58528
|
spacy/lang/es/__init__.py
|
spacy/lang/es/__init__.py
|
# coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class Spanish(Language):
lang = 'es'
class Defaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
__all__ = ['Spanish']
|
# coding: utf8
from __future__ import unicode_literals
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
from .tag_map import TAG_MAP
from .stop_words import STOP_WORDS
from .lemmatizer import LOOKUP
from ..tokenizer_exceptions import BASE_EXCEPTIONS
from ...language import Language
from ...lemmatizerlookup import Lemmatizer
from ...attrs import LANG
from ...util import update_exc
class SpanishDefaults(Language.Defaults):
lex_attr_getters = dict(Language.Defaults.lex_attr_getters)
lex_attr_getters[LANG] = lambda text: 'es'
tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
tag_map = dict(TAG_MAP)
stop_words = set(STOP_WORDS)
@classmethod
def create_lemmatizer(cls, nlp=None):
return Lemmatizer(LOOKUP)
class Spanish(Language):
lang = 'es'
Defaults = SpanishDefaults
__all__ = ['Spanish']
|
Move SpanishDefaults out of Language class, for pickle
|
Move SpanishDefaults out of Language class, for pickle
|
Python
|
mit
|
recognai/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,honnibal/spaCy,spacy-io/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,honnibal/spaCy,aikramer2/spaCy,spacy-io/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,aikramer2/spaCy,explosion/spaCy,recognai/spaCy,honnibal/spaCy,aikramer2/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy
|
13da95de0f2fb17ec6cbf005be33db3b3d348831
|
motivation_text/models.py
|
motivation_text/models.py
|
from django.db import models
from patient.models import Patient
from django.utils.encoding import smart_unicode
class MotivationText(models.Model):
patient = models.ForeignKey(Patient, null=False)
text = models.TextField(default='', blank=False)
time_created = models.DateTimeField(null=False, auto_now_add=True, auto_now=False)
def __unicode__(self):
return smart_unicode(
"Motivational text for "
+ self.patient.user.first_name + " " + self.patient.user.last_name
+ " created at " + str(self.time_created)
)
class Meta():
ordering = ['-id']
TEXT_INFORMATION = 'I'
TEXT_MOTIVATION = 'M'
TYPES = [
(TEXT_INFORMATION, 'InformationText'),
(TEXT_MOTIVATION, 'MotivationText'),
]
type = models.CharField(max_length=1, choices=TYPES, null=False, default='M')
|
from django.db import models
from patient.models import Patient
from django.utils.encoding import smart_unicode
class MotivationText(models.Model):
patient = models.ForeignKey(Patient, null=False)
text = models.TextField(default='', blank=False)
time_created = models.DateTimeField(null=False, auto_now_add=True, auto_now=False)
def __unicode__(self):
return smart_unicode(
("InformationText" if self.type == 'I' else 'MotivationText')
+ " for " + self.patient.user.get_full_name()
+ " created at " + str(self.time_created)
)
class Meta():
ordering = ['-id']
TEXT_INFORMATION = 'I'
TEXT_MOTIVATION = 'M'
TYPES = [
(TEXT_INFORMATION, 'InformationText'),
(TEXT_MOTIVATION, 'MotivationText'),
]
type = models.CharField(max_length=1, choices=TYPES, null=False, default='M')
|
Improve unicode for motivational texts
|
Improve unicode for motivational texts
|
Python
|
mit
|
sigurdsa/angelika-api
|
f42ba1bebb0e7f92222d8a66f94e2550b4dde9e1
|
helpers/custom_filters.py
|
helpers/custom_filters.py
|
import json
def strslice(s, length):
if not isinstance(s, basestring):
s = str(s)
return s[:length]
def urlencode(s):
if isinstance(s, unicode):
s = s.encode('utf-8')
import urllib
return urllib.quote(s)
def json_filter(data):
return json.dumps(data)
def datetimeformat(value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
filters = {
'strslice': strslice,
'urlencode': urlencode,
'json': json_filter,
'datetime': datetimeformat,
}
|
import json
def strslice(s, length):
if not isinstance(s, basestring):
s = str(s)
return s[:length]
def urlencode(s):
if isinstance(s, unicode):
s = s.encode('utf-8')
import urllib
return urllib.quote(s)
def json_filter(data):
return json.dumps(data)
def datetimeformat(value, format='%H:%M / %d-%m-%Y'):
return value.strftime(format)
def neat_time(dt):
"""Return the time in dt as a neat string.
Examples:
>>> neat_time(time(7, 30))
7:30AM
>>> neat_time(time(14, 00))
2PM
"""
if dt.minute:
timestring = dt.strftime('%I:%M%p')
else:
timestring = dt.strftime('%I%p')
if timestring[0] == '0':
timestring = timestring[1:]
return timestring
filters = {
'strslice': strslice,
'urlencode': urlencode,
'json': json_filter,
'datetime': datetimeformat,
'neattime': neat_time,
}
|
Add a neattime custom filter to give me pretty times.
|
Add a neattime custom filter to give me pretty times.
|
Python
|
agpl-3.0
|
watchcat/cbu-rotterdam,codeforamerica/Change-By-Us,watchcat/cbu-rotterdam,watchcat/cbu-rotterdam,localprojects/Change-By-Us,watchcat/cbu-rotterdam,localprojects/Change-By-Us,codeforeurope/Change-By-Us,codeforeurope/Change-By-Us,watchcat/cbu-rotterdam,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,codeforeurope/Change-By-Us,codeforamerica/Change-By-Us,localprojects/Change-By-Us,localprojects/Change-By-Us,codeforamerica/Change-By-Us
|
3af22fd5583ee110f731b9e1ebecba67ebee2bd4
|
sendwithus/exceptions.py
|
sendwithus/exceptions.py
|
class SendwithusError(Exception):
"""Base class for Sendwithus API errors"""
class AuthenticationError(SendwithusError):
"""API Authentication Failed"""
class APIError(SendwithusError):
"""4xx - Invalid Request (Client error)"""
class ServerError(SendwithusError):
"""5xx - Failed Request (Server error)"""
|
class SendwithusError(Exception):
"""Base class for Sendwithus API errors"""
def __init__(self, content=None):
self.content = content
class AuthenticationError(SendwithusError):
"""API Authentication Failed"""
class APIError(SendwithusError):
"""4xx - Invalid Request (Client error)"""
class ServerError(SendwithusError):
"""5xx - Failed Request (Server error)"""
|
Add a constructor to SendwithusError that stores content
|
Add a constructor to SendwithusError that stores content
|
Python
|
apache-2.0
|
sendwithus/sendwithus_python
|
93512f1837a4e72752d4ffa07ac49e1f2cd5a7f6
|
opensimplex_test.py
|
opensimplex_test.py
|
import random
import time
from PIL import Image # Depends on the Pillow lib
from opensimplex import OpenSimplexNoise
WIDTH = 512
HEIGHT = 512
FEATURE_SIZE = 24
def main():
random.seed(time.time())
seed = random.randint(0, 100000)
simplex = OpenSimplexNoise(seed)
im = Image.new('L', (WIDTH, HEIGHT))
for y in range(0, HEIGHT):
for x in range(0, WIDTH):
#value = simplex.noise2d(x / FEATURE_SIZE, y / FEATURE_SIZE)
value = simplex.noise2d(x * 0.05, y * 0.05)
color = int((value + 1) * 128)
im.putpixel((x, y), color)
im.show()
if __name__ == '__main__':
main()
|
from PIL import Image # Depends on the Pillow lib
from opensimplex import OpenSimplexNoise
WIDTH = 512
HEIGHT = 512
FEATURE_SIZE = 24
def main():
simplex = OpenSimplexNoise()
im = Image.new('L', (WIDTH, HEIGHT))
for y in range(0, HEIGHT):
for x in range(0, WIDTH):
#value = simplex.noise2d(x / FEATURE_SIZE, y / FEATURE_SIZE)
value = simplex.noise2d(x * 0.05, y * 0.05)
color = int((value + 1) * 128)
im.putpixel((x, y), color)
im.show()
if __name__ == '__main__':
main()
|
Use default seed for the test.
|
Use default seed for the test.
|
Python
|
mit
|
lmas/opensimplex,antiface/opensimplex
|
9249dc161e9fdd64e15a42f644232c43cb6875b2
|
src/dependenpy/plugins.py
|
src/dependenpy/plugins.py
|
# -*- coding: utf-8 -*-
"""dependenpy plugins module."""
try:
from archan import Provider, Argument, DSM as ArchanDSM
from .dsm import DSM as DependenpyDSM
from .helpers import guess_depth
class InternalDependencies(Provider):
"""Dependenpy provider for Archan."""
identifier = 'dependenpy.InternalDependencies'
name = 'Internal Dependencies'
description = 'Provide matrix data about internal dependencies ' \
'in a set of packages.'
arguments = (
Argument('packages', list, 'The list of packages to check for.'),
Argument('enforce_init', bool, default=True,
description='Whether to assert presence of '
'__init__.py files in directories.'),
Argument('depth', int, 'The depth of the matrix to generate.'),
)
def get_dsm(self, packages, enforce_init=True, depth=None):
"""
Provide matrix data for internal dependencies in a set of packages.
Args:
*packages (list): the list of packages to check for.
enforce_init (bool):
whether to assert presence of __init__.py files
in directories.
depth (int): the depth of the matrix to generate.
Returns:
archan.DSM: instance of archan DSM.
"""
dsm = DependenpyDSM(*packages, enforce_init=enforce_init)
if depth is None:
depth = guess_depth(packages)
matrix = dsm.as_matrix(depth=depth)
return ArchanDSM(data=matrix.data, entities=matrix.keys)
except ImportError:
class InternalDependencies(object):
"""Empty dependenpy provider."""
|
# -*- coding: utf-8 -*-
"""dependenpy plugins module."""
try:
from archan import Provider, Argument, DesignStructureMatrix as ArchanDSM
from .dsm import DSM as DependenpyDSM
from .helpers import guess_depth
class InternalDependencies(Provider):
"""Dependenpy provider for Archan."""
identifier = 'dependenpy.InternalDependencies'
name = 'Internal Dependencies'
description = 'Provide matrix data about internal dependencies ' \
'in a set of packages.'
argument_list = (
Argument('packages', list, 'The list of packages to check for.'),
Argument('enforce_init', bool, default=True,
description='Whether to assert presence of '
'__init__.py files in directories.'),
Argument('depth', int, 'The depth of the matrix to generate.'),
)
def get_data(self, packages, enforce_init=True, depth=None):
"""
Provide matrix data for internal dependencies in a set of packages.
Args:
*packages (list): the list of packages to check for.
enforce_init (bool):
whether to assert presence of __init__.py files
in directories.
depth (int): the depth of the matrix to generate.
Returns:
archan.DSM: instance of archan DSM.
"""
dsm = DependenpyDSM(*packages, enforce_init=enforce_init)
if depth is None:
depth = guess_depth(packages)
matrix = dsm.as_matrix(depth=depth)
return ArchanDSM(data=matrix.data, entities=matrix.keys)
except ImportError:
class InternalDependencies(object):
"""Empty dependenpy provider."""
|
Update archan provider for archan 3.0
|
Update archan provider for archan 3.0
|
Python
|
isc
|
Pawamoy/dependenpy,Pawamoy/dependenpy
|
5da820b85f9e55a54639856bdd698c35b866833c
|
fireplace/cards/gvg/neutral_epic.py
|
fireplace/cards/gvg/neutral_epic.py
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
def OWN_MINION_SUMMON(self, minion):
if minion.atk == 1:
return [Buff(minion, "GVG_104a")]
|
from ..utils import *
##
# Minions
# Hobgoblin
class GVG_104:
def OWN_CARD_PLAYED(self, card):
if card.type == CardType.MINION and card.atk == 1:
return [Buff(card, "GVG_104a")]
|
Fix Hobgoblin to trigger only on cards played
|
Fix Hobgoblin to trigger only on cards played
|
Python
|
agpl-3.0
|
smallnamespace/fireplace,jleclanche/fireplace,liujimj/fireplace,Meerkov/fireplace,amw2104/fireplace,butozerca/fireplace,oftc-ftw/fireplace,Ragowit/fireplace,NightKev/fireplace,smallnamespace/fireplace,Meerkov/fireplace,liujimj/fireplace,butozerca/fireplace,Ragowit/fireplace,beheh/fireplace,oftc-ftw/fireplace,amw2104/fireplace
|
48b2460c718af88e8140b108d4a9acd9258ade8c
|
gargoyle/__init__.py
|
gargoyle/__init__.py
|
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
from pkgutil import extend_path
__path__ = extend_path(__path__, __name__)
|
__import__('pkg_resources').declare_namespace(__name__)
|
Change to vanilla namespace package
|
Change to vanilla namespace package
|
Python
|
apache-2.0
|
disqus/gutter,disqus/gutter,kalail/gutter,kalail/gutter,kalail/gutter
|
c083481eed1578551daab7ece2e34b3ff4aece82
|
accelerator/migrations/0044_add_sitetree_sidenav_toggle.py
|
accelerator/migrations/0044_add_sitetree_sidenav_toggle.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-03-20 18:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0043_remove_exclude_fields'),
]
operations = [
migrations.RemoveField(
model_name='programfamily',
name='side_navigation',
),
migrations.AddField(
model_name='programfamily',
name='use_site_tree_side_nav',
field=models.BooleanField(default=False, help_text='Show the new-style side navigation'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2019-03-20 18:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accelerator', '0043_remove_exclude_fields'),
]
help_text = 'Show the new-style side navigation'
operations = [
migrations.RemoveField(
model_name='programfamily',
name='side_navigation',
),
migrations.AddField(
model_name='programfamily',
name='use_site_tree_side_nav',
field=models.BooleanField(default=False,
help_text=help_text),
),
]
|
Fix style on migration - waste of time, but whatever
|
Fix style on migration - waste of time, but whatever
|
Python
|
mit
|
masschallenge/django-accelerator,masschallenge/django-accelerator
|
9cc4ce152ba8d683db1371a963dae2621f1b8fc6
|
dbaas/dbaas/celeryconfig.py
|
dbaas/dbaas/celeryconfig.py
|
import os
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://localhost:%s/0' % REDIS_PORT)
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
|
import os
from django.conf import settings
REDIS_PORT = os.getenv('DBAAS_NOTIFICATION_BROKER_PORT', '6379')
BROKER_URL = os.getenv(
'DBAAS_NOTIFICATION_BROKER_URL', 'redis://{}:{}/0'.format(settings.REDIS_HOST, REDIS_PORT))
CELERYD_TASK_TIME_LIMIT = 10800
CELERY_TRACK_STARTED = True
CELERY_IGNORE_RESULT = False
CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend'
CELERYBEAT_MAX_LOOP_INTERVAL = 5
CELERY_TIMEZONE = os.getenv('DJANGO_TIME_ZONE', 'America/Sao_Paulo')
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYD_LOG_FORMAT = "[%(asctime)s: %(processName)s %(name)s %(levelname)s] %(message)s"
CELERY_ALWAYS_EAGER = False
CELERYD_LOG_COLOR = False
CELERYD_PREFETCH_MULTIPLIER = 1
|
Fix celery config to respect the host configured on settings
|
Fix celery config to respect the host configured on settings
|
Python
|
bsd-3-clause
|
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
|
a8ec60daaee52603a1c3bab879a5eee9f0fd931b
|
ddd/dataobjects/datatype.py
|
ddd/dataobjects/datatype.py
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',conversion=None,unit='-',constant=False):
self.basetype=basetype
if not conversion:
self.conversion=DddConversion(type='binary',fraction=1)
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
'''
Created on 24.04.2016
@author: killian
'''
from objects import dddobject,DataObject
from conversions import DddConversion
@dddobject('datatype')
class DddDatatype(DataObject):
def __init__(self,basetype='',bitsize=8,signed=False,conversion=None,unit='-',constant=False):
self.basetype=basetype
self.bitsize=bitsize
self.signed=signed
if not conversion:
self.conversion=DddConversion(type='1to1')
else:
self.conversion=conversion
self.unit=unit
self.constant=constant
def getJsonDict(self,hashed=False):
tmp = DataObject.getJsonDict(self,False)
tmp.update({'basetype':self.basetype,
'bitsize':self.bitsize,
'signed':self.signed,
'unit':self.unit,
'constant':self.constant,
'conversion':self.conversion})
return tmp
def get_name(self):
return self.basetype.upper()+'_'+self.conversion.get_name()
def accept(self,visitor):
visitor.pre_order(self)
self.conversion.accept(visitor)
visitor.post_order(self)
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Split basetype of DddDatatype into basetype,bitsize,signed
|
Python
|
mit
|
toesus/ddd,Sauci/ddd,toesus/ddd,Sauci/ddd,Sauci/ddd
|
6bd59ef149ec32f7d0a64ea2ad50a2729aceb6f5
|
fluent_contents/models/mixins.py
|
fluent_contents/models/mixins.py
|
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
from django.core.cache import cache
class CachedModelMixin(object):
"""
Mixin to add cache expiration to a model.
"""
clear_cache_on_add = False
def save(self, *args, **kwargs):
is_new = not self.pk or self._state.adding
super(CachedModelMixin, self).save(*args, **kwargs)
if not is_new or self.clear_cache_on_add:
self.clear_cache()
save.alters_data = True
def delete(self, *args, **kwargs):
deleted_pk = self.pk
collector_result = super(CachedModelMixin, self).delete(*args, **kwargs)
# Temporary restore to allow get_cache_keys() / plugin.get_output_cache_keys() to read the PK
self.pk = deleted_pk
self.clear_cache()
self.pk = None
return collector_result
# Must restore these options, or risk removing with a template print statement.
delete.alters_data = True
def clear_cache(self):
"""
Delete the cache keys associated with this model.
"""
cache.delete_many(self.get_cache_keys())
clear_cache.alters_data = True
def get_cache_keys(self):
"""
Get a list of all cache keys associated with this model.
"""
raise NotImplementedError("Implement get_cache_keys() or clear_cache()")
|
Make sure our CachedModelMixin.delete() also returns the collector results
|
Make sure our CachedModelMixin.delete() also returns the collector results
|
Python
|
apache-2.0
|
edoburu/django-fluent-contents,django-fluent/django-fluent-contents,edoburu/django-fluent-contents,edoburu/django-fluent-contents,django-fluent/django-fluent-contents,django-fluent/django-fluent-contents
|
a88f0ab8be669f573f2869617717653bb008e11b
|
sheldon/bot.py
|
sheldon/bot.py
|
# -*- coding: utf-8 -*-
"""
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
__author__ = 'Lises team'
__version__ = '0.1'
__email__ = 'zhidkovseva@gmail.com'
class Sheldon():
pass
class Plugin():
pass
class Adapter():
pass
|
# -*- coding: utf-8 -*-
"""
@author: Lises team
@contact: zhidkovseva@gmail.com
@license: The MIT license
Copyright (C) 2015
"""
# Python 2 compatibility
if sys.version_info[0] == 2:
# Exceptions for bot
from exceptions import *
# Tool for loading plugins and adapters
from modules import *
# Tool for loading config from project folder
from config import *
else:
# Exceptions for bot
from .exceptions import *
# Tool for loading plugins and adapters
from .modules import *
# Tool for loading config from project folder
from .config import *
class Sheldon():
"""
Main class of the bot.
Run script creating new instance of this class and run it.
"""
def __init__(self):
"""
Function for loading bot.
:return:
"""
# Creating empty lists for plugins and adapters
self.plugins = []
self.adapters = []
def load_plugins(self):
"""
Function for collecting and loading plugins from plugins folder.
:return:
"""
class Plugin():
pass
class Adapter():
pass
|
Update structure of Sheldon class
|
Update structure of Sheldon class
|
Python
|
mit
|
lises/sheldon
|
caa4dcdcf7e936f352eea22513433d8f8deca2ab
|
sahara/tests/unit/utils/test_hashabledict.py
|
sahara/tests/unit/utils/test_hashabledict.py
|
# Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import testtools
from sahara.utils import hashabledict as h
class HashableDictTest(testtools.TestCase):
def test_is_hashable(self):
hd = h.HashableDict()
hd['one'] = 'oneValue'
self.assertTrue(isinstance(hd, collections.Hashable))
|
# Copyright (c) 2013 Hortonworks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import collections
import testtools
from sahara.utils import hashabledict as h
class HashableDictTest(testtools.TestCase):
def test_is_hashable_collection(self):
dct = h.HashableDict(one='oneValue')
self.assertIsInstance(dct, collections.Hashable)
def test_hash_consistency(self):
dct1 = h.HashableDict(one='oneValue')
dct2 = h.HashableDict(one='oneValue')
self.assertEqual(hash(dct1), hash(dct2))
|
Improve unit test for HashableDict
|
Improve unit test for HashableDict
We have HashableDict introduced to network info storing, but hash
function of this implementation was never tested in unit tests.
Change-Id: Id48c9172ca63e19b397dc131d85ed631874142cd
|
Python
|
apache-2.0
|
openstack/sahara,esikachev/sahara-backup,ekasitk/sahara,henaras/sahara,egafford/sahara,tellesnobrega/sahara,ekasitk/sahara,egafford/sahara,henaras/sahara,zhangjunli177/sahara,zhangjunli177/sahara,esikachev/sahara-backup,esikachev/sahara-backup,openstack/sahara,henaras/sahara,ekasitk/sahara,crobby/sahara,crobby/sahara,crobby/sahara,tellesnobrega/sahara,zhangjunli177/sahara
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.