commit
stringlengths 40
40
| old_file
stringlengths 4
236
| new_file
stringlengths 4
236
| old_contents
stringlengths 1
3.26k
| new_contents
stringlengths 16
4.43k
| subject
stringlengths 16
624
| message
stringlengths 17
3.29k
| lang
stringclasses 5
values | license
stringclasses 13
values | repos
stringlengths 5
91.5k
|
|---|---|---|---|---|---|---|---|---|---|
6086b970e6c37ca4f343291a35bbb9e533109c1c
|
flask_wiki/backend/routes.py
|
flask_wiki/backend/routes.py
|
from flask_wiki.backend.backend import api
from flask_wiki.backend.views import PageView
api.add_resource(PageView, '/pages-list', endpoint='pages-list')
|
from flask_wiki.backend.backend import api
from flask_wiki.backend.views import PageView, PageDetail
api.add_resource(PageView, '/pages-list', endpoint='pages-list')
api.add_resource(PageDetail, '/pages/<slug>', endpoint='page-detail')
|
Support for page-detail url added.
|
Support for page-detail url added.
|
Python
|
bsd-2-clause
|
gcavalcante8808/flask-wiki,gcavalcante8808/flask-wiki,gcavalcante8808/flask-wiki
|
76a2248ffe8c64b15a6f7d307b6d7c726e97165c
|
alerts/cloudtrail_logging_disabled.py
|
alerts/cloudtrail_logging_disabled.py
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
#
# Contributors:
# Brandon Myers bmyers@mozilla.com
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
#!/usr/bin/env python
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
# Copyright (c) 2017 Mozilla Corporation
#
# Contributors:
# Brandon Myers bmyers@mozilla.com
from lib.alerttask import AlertTask
from query_models import SearchQuery, TermMatch
class AlertCloudtrailLoggingDisabled(AlertTask):
def main(self):
search_query = SearchQuery(minutes=30)
search_query.add_must([
TermMatch('_type', 'cloudtrail'),
TermMatch('eventName', 'StopLogging'),
])
search_query.add_must_not(TermMatch('errorCode', 'AccessDenied'))
self.filtersManual(search_query)
self.searchEventsSimple()
self.walkEvents()
def onEvent(self, event):
category = 'AWSCloudtrail'
tags = ['cloudtrail', 'aws', 'cloudtrailpagerduty']
severity = 'CRITICAL'
summary = 'Cloudtrail Logging Disabled: ' + event['_source']['requestParameters']['name']
return self.createAlertDict(summary, category, tags, [event], severity)
|
Send Cloudtrail logging disabled alert to MOC
|
Send Cloudtrail logging disabled alert to MOC
|
Python
|
mpl-2.0
|
mozilla/MozDef,Phrozyn/MozDef,ameihm0912/MozDef,gdestuynder/MozDef,ameihm0912/MozDef,mpurzynski/MozDef,mozilla/MozDef,gdestuynder/MozDef,jeffbryner/MozDef,jeffbryner/MozDef,mpurzynski/MozDef,mpurzynski/MozDef,Phrozyn/MozDef,jeffbryner/MozDef,ameihm0912/MozDef,mozilla/MozDef,ameihm0912/MozDef,jeffbryner/MozDef,gdestuynder/MozDef,mozilla/MozDef,Phrozyn/MozDef,gdestuynder/MozDef,mpurzynski/MozDef,Phrozyn/MozDef
|
6099451fe088fe74945bbeedeeee66896bd7ff3d
|
voctocore/lib/sources/__init__.py
|
voctocore/lib/sources/__init__.py
|
import logging
from lib.config import Config
from lib.sources.decklinkavsource import DeckLinkAVSource
from lib.sources.imgvsource import ImgVSource
from lib.sources.tcpavsource import TCPAVSource
from lib.sources.testsource import TestSource
from lib.sources.videoloopsource import VideoLoopSource
log = logging.getLogger('AVSourceManager')
sources = {}
def spawn_source(name, port, has_audio=True, has_video=True,
force_num_streams=None):
kind = Config.getSourceKind(name)
if kind == 'img':
sources[name] = ImgVSource(name)
elif kind == 'decklink':
sources[name] = DeckLinkAVSource(name, has_audio, has_video)
elif kind == 'test':
sources[name] = TestSource(name, has_audio, has_video)
elif kind == 'videoloop':
sources[name] = VideoLoopSource(name)
elif kind == 'tcp':
sources[name] = TCPAVSource(name, port, has_audio, has_video,
force_num_streams)
else:
log.warning('Unknown source kind "%s", defaulting to "tcp"', kind)
return sources[name]
def restart_source(name):
assert False, "restart_source() not implemented"
|
import logging
from lib.config import Config
from lib.sources.decklinkavsource import DeckLinkAVSource
from lib.sources.imgvsource import ImgVSource
from lib.sources.tcpavsource import TCPAVSource
from lib.sources.testsource import TestSource
from lib.sources.videoloopsource import VideoLoopSource
log = logging.getLogger('AVSourceManager')
sources = {}
def spawn_source(name, port, has_audio=True, has_video=True,
force_num_streams=None):
kind = Config.getSourceKind(name)
if kind == 'img':
sources[name] = ImgVSource(name)
elif kind == 'decklink':
sources[name] = DeckLinkAVSource(name, has_audio, has_video)
elif kind == 'videoloop':
sources[name] = VideoLoopSource(name)
elif kind == 'tcp':
sources[name] = TCPAVSource(name, port, has_audio, has_video,
force_num_streams)
else:
if kind != 'test':
log.warning('Unknown value "%s" in attribute "kind" in definition of source %s (see section [source.%s] in configuration). Falling back to kind "test".', kind, name, name)
sources[name] = TestSource(name, has_audio, has_video)
return sources[name]
def restart_source(name):
assert False, "restart_source() not implemented"
|
Use test sources as the default in configuration (and improve warning message, when falling back to)
|
Use test sources as the default in configuration (and improve warning message, when falling back to)
|
Python
|
mit
|
voc/voctomix,voc/voctomix
|
3d9d1b10149655030d172de38f9caeb5906d093c
|
source/lucidity/__init__.py
|
source/lucidity/__init__.py
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
from .template import Template
|
# :coding: utf-8
# :copyright: Copyright (c) 2013 Martin Pengelly-Phillips
# :license: See LICENSE.txt.
import os
import uuid
import imp
from .template import Template
def discover_templates(paths=None, recursive=True):
'''Search *paths* for mount points and load templates from them.
*paths* should be a list of filesystem paths to search for mount points.
If not specified will try to use value from environment variable
:envvar:`LUCIDITY_TEMPLATE_PATH`.
A mount point is a Python file that defines a 'register' function. The
function should return a list of instantiated
:py:class:`~lucidity.template.Template` objects.
If *recursive* is True (the default) then all directories under a path
will also be searched.
'''
templates = []
if paths is None:
paths = os.environ.get('LUCIDITY_TEMPLATE_PATH', '').split(os.pathsep)
for path in paths:
for base, directories, filenames in os.walk(path):
for filename in filenames:
_, extension = os.path.splitext(filename)
if extension != '.py':
continue
module_path = os.path.join(base, filename)
module_name = uuid.uuid4().hex
module = imp.load_source(module_name, module_path)
try:
registered = module.register()
except AttributeError:
pass
else:
if registered:
templates.extend(registered)
if not recursive:
del directories[:]
return templates
|
Add helper method to load templates from disk.
|
Add helper method to load templates from disk.
|
Python
|
apache-2.0
|
4degrees/lucidity,nebukadhezer/lucidity,BigRoy/lucidity
|
0e2e30382def1f911987ca22fce5adc6c6b73fb6
|
airship/__init__.py
|
airship/__init__.py
|
import os
import json
from flask import Flask, render_template
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
jsonbody = json.dumps(channels)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return
return app
|
import os
import json
from flask import Flask, render_template
def jsonate(obj, escaped):
jsonbody = json.dumps(obj)
if escaped:
jsonbody = jsonbody.replace("</", "<\\/")
return jsonbody
def channels_json(station, escaped=False):
channels = [{"name": channel} for channel in station.channels()]
return jsonate(channels, escaped)
def grefs_json(station, channel, escaped=False):
grefs = [{"name": gref} for gref in station.grefs(channel)]
return jsonate(grefs, escaped)
def make_airship(station):
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html", channels_json=channels_json(station, True))
@app.route("/channels")
def list_channels():
return channels_json(station)
@app.route("/grefs/<channel>")
def list_grefs(channel):
return grefs_json(station, channel)
return app
|
Fix the grefs route in the airship server
|
Fix the grefs route in the airship server
|
Python
|
mit
|
richo/airship,richo/airship,richo/airship
|
6f83b42ae9aaf9cd23bc8d15b66157a75bbc3aed
|
util/createCollector.py
|
util/createCollector.py
|
import os
import sys
import subprocesses
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
sigCacheDir = os.path.join(subprocesses.normExpUserPath("~"), "fuzzsigcache")
if not os.path.exists(sigCacheDir):
os.mkdir(sigCacheDir)
collector = Collector(tool=tool, sigCacheDir=sigCacheDir)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
import os
import sys
THIS_SCRIPT_DIRECTORY = os.path.dirname(os.path.abspath(__file__))
fuzzManagerPath = os.path.abspath(os.path.join(THIS_SCRIPT_DIRECTORY, os.pardir, os.pardir, 'FuzzManager'))
if not os.path.exists(fuzzManagerPath):
print "Please check out Lithium and FuzzManager side-by-side with funfuzz. Links in https://github.com/MozillaSecurity/funfuzz/#setup"
sys.exit(2)
sys.path.append(fuzzManagerPath)
from Collector.Collector import Collector
def createCollector(tool):
assert tool == "DOMFuzz" or tool == "jsfunfuzz"
collector = Collector(tool=tool)
return collector
def printCrashInfo(crashInfo):
if crashInfo.createShortSignature() != "No crash detected":
print
print "crashInfo:"
print " Short Signature: " + crashInfo.createShortSignature()
print " Class name: " + crashInfo.__class__.__name__ # "NoCrashInfo", etc
print " Stack trace: " + repr(crashInfo.backtrace)
print
def printMatchingSignature(match):
print "Matches signature in FuzzManager:"
print " Signature description: " + match[1].get('shortDescription')
print " Signature file: " + match[0]
print
|
Use the signature (cache) directory specified in .fuzzmanagerconf
|
Use the signature (cache) directory specified in .fuzzmanagerconf
|
Python
|
mpl-2.0
|
nth10sd/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,MozillaSecurity/funfuzz,nth10sd/funfuzz,nth10sd/funfuzz
|
28add39cbd964d9a26ff8f12c1ee3668b765c7a7
|
perforce/p4login.py
|
perforce/p4login.py
|
#!/usr/bin/env python3
"""Script to automate logging into Perforce.
Use P4API to log in to the server.
"""
import P4
def main():
"""Log in to the Perforce server."""
# Yep, pretty much that easy.
p4 = P4.P4()
p4.connect()
p4.run_login()
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
"""Script to automate logging into Perforce."""
import subprocess
import sys
def main():
"""Log in to the Perforce server."""
# Yep, pretty much that easy.
result = subprocess.check_output(['p4', 'set', '-q', 'P4PASSWD'])
passwd = result.strip().split('=')[1]
proc = subprocess.Popen(['p4', 'login'], stdin=subprocess.PIPE)
proc.communicate(passwd)
sys.exit(proc.returncode)
if __name__ == "__main__":
main()
|
Use p4 cli instead of p4 api
|
Use p4 cli instead of p4 api
|
Python
|
bsd-3-clause
|
nlfiedler/devscripts,nlfiedler/devscripts
|
ad42da9cb3c944f5bd5e953f947a0be96a4b8e17
|
astropy/samp/tests/test_hub_proxy.py
|
astropy/samp/tests/test_hub_proxy.py
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmpdir):
lockfile = tmpdir.join('.samptest').realpath().strpath
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
from astropy.samp import conf
from astropy.samp.hub import SAMPHubServer
from astropy.samp.hub_proxy import SAMPHubProxy
def setup_module(module):
conf.use_internet = False
class TestHubProxy:
def setup_method(self, method):
self.hub = SAMPHubServer(web_profile=False, mode='multiple', pool_size=1)
self.hub.start()
self.proxy = SAMPHubProxy()
self.proxy.connect(hub=self.hub, pool_size=1)
def teardown_method(self, method):
if self.proxy.is_connected:
self.proxy.disconnect()
self.hub.stop()
def test_is_connected(self):
assert self.proxy.is_connected
def test_disconnect(self):
self.proxy.disconnect()
def test_ping(self):
self.proxy.ping()
def test_registration(self):
result = self.proxy.register(self.proxy.lockfile["samp.secret"])
self.proxy.unregister(result['samp.private-key'])
def test_custom_lockfile(tmp_path):
lockfile = str(tmp_path / '.samptest')
hub = SAMPHubServer(web_profile=False, lockfile=lockfile, pool_size=1)
hub.start()
proxy = SAMPHubProxy()
proxy.connect(hub=hub, pool_size=1)
hub.stop()
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
Replace `tmpdir` with `tmp_path` in `samp` tests
|
Python
|
bsd-3-clause
|
pllim/astropy,mhvk/astropy,lpsinger/astropy,lpsinger/astropy,mhvk/astropy,larrybradley/astropy,pllim/astropy,lpsinger/astropy,lpsinger/astropy,lpsinger/astropy,astropy/astropy,pllim/astropy,astropy/astropy,larrybradley/astropy,pllim/astropy,astropy/astropy,mhvk/astropy,larrybradley/astropy,larrybradley/astropy,astropy/astropy,astropy/astropy,mhvk/astropy,pllim/astropy,mhvk/astropy,larrybradley/astropy
|
b4a92b80d2cfe316d89dbecdf1026486d5288fe0
|
simulator-perfect.py
|
simulator-perfect.py
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
#!/usr/bin/env python3
import timer
import sys
import utils
def simulate():
# A set of files already in the storage
seen = set()
# The size of the all uploads combined (deduplicated or not)
total_in = 0
# The size of the data sent to the service
data_in = 0
tmr = timer.Timer()
for (i, (hsh, size)) in enumerate(utils.read_upload_stream()):
total_in += size
if hsh not in seen:
data_in += size
seen.add(hsh)
if (i + 1) % utils.REPORT_FREQUENCY == 0:
print("%s uploads, percentage %.4f, time %s, mem[%s]" % (
utils.num_fmt(i),
1 - data_in / total_in,
tmr.elapsed_str,
utils.get_mem_info()
), file=sys.stderr)
print("%i,%i" % (data_in, total_in))
dedup_percentage = 1 - data_in / total_in
print("Simulation complete. stored=%s, uploaded=%s, dedup_percentage=%f" % (
utils.sizeof_fmt(data_in), utils.sizeof_fmt(total_in), dedup_percentage),
file=sys.stderr)
if __name__ == "__main__":
simulate()
|
Make perfect simulator print data after each upload
|
Make perfect simulator print data after each upload
|
Python
|
apache-2.0
|
sjakthol/dedup-simulator,sjakthol/dedup-simulator
|
1a871cf3bf1fd40342e490599361d57017cdcc65
|
backend/breach/tests/test_strategy.py
|
backend/breach/tests/test_strategy.py
|
from mock import patch
from breach.tests.base import RuptureTestCase
from breach.strategy import Strategy
class StrategyTestCase(RuptureTestCase):
@patch('breach.strategy.Sniffer')
def test_first_round(self, Sniffer):
strategy0 = Strategy(self.victim)
work0 = strategy0.get_work()
self.assertEqual(
work0['url'],
'https://di.uoa.gr/?breach=^testsecret0^1^3^2^5^4^7^6^9^8^'
)
self.assertTrue('amount' in work0)
self.assertTrue('timeout' in work0)
strategy1 = Strategy(self.victim)
work1 = strategy1.get_work()
self.assertEqual(
work1['url'],
'https://di.uoa.gr/?breach=^testsecret1^0^3^2^5^4^7^6^9^8^'
)
def test_same_round_same_batch(self):
pass
def test_same_round_different_batch(self):
pass
def test_advance_round(self):
pass
|
from mock import patch
from breach.tests.base import RuptureTestCase
from breach.strategy import Strategy
class StrategyTestCase(RuptureTestCase):
@patch('breach.strategy.Sniffer')
def test_first_round(self, Sniffer):
strategy0 = Strategy(self.victim)
work0 = strategy0.get_work()
self.assertEqual(
work0['url'],
'https://di.uoa.gr/?breach=^testsecret0^1^'
)
self.assertTrue('amount' in work0)
self.assertTrue('timeout' in work0)
strategy1 = Strategy(self.victim)
work1 = strategy1.get_work()
self.assertEqual(
work1['url'],
'https://di.uoa.gr/?breach=^testsecret1^0^'
)
def test_same_round_same_batch(self):
pass
def test_same_round_different_batch(self):
pass
def test_advance_round(self):
pass
|
Update first round test, create huffman based on knownalphabet
|
Update first round test, create huffman based on knownalphabet
|
Python
|
mit
|
dionyziz/rupture,dimriou/rupture,esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,esarafianou/rupture,dimkarakostas/rupture,dimkarakostas/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dionyziz/rupture
|
91b01e37897ea20f6486118e4dd595439f81006b
|
ktane/Model/Modules/WiresModule.py
|
ktane/Model/Modules/WiresModule.py
|
from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
NotImplementedError
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
|
from enum import Enum
from .AbstractModule import AbstractModule, ModuleState
class WireColors(Enum):
MISSING = 'missing'
BLACK = 'black'
RED = 'red'
WHITE = 'white'
BLUE = 'blue'
YELLOW = 'yellow'
def get_correct_wire(sequence, boolpar):
wires_count = get_wires_count(sequence)
def get_wires_count(sequence):
return len([1 for x in sequence if x != WireColors.MISSING.value])
def get_nth_wire_position(sequence, n):
counter = 0
for idx, value in enumerate(sequence):
if value != WireColors.MISSING.value:
counter += 1
if counter == n:
return idx
return None
class WiresModule(AbstractModule):
def export_to_string(self):
raise NotImplementedError
def import_from_string(self, string):
raise NotImplementedError
def translate_to_commands(self):
raise NotImplementedError
def __init__(self):
super().__init__()
self.name = "WiresModule"
self.type_number = 10
self.state = ModuleState.Armed
|
Implement Wires helper method get_nth_wire_position
|
Implement Wires helper method get_nth_wire_position
|
Python
|
mit
|
hanzikl/ktane-controller
|
d8d77d4dd98d9287be8a98f0024e5f458bef2b66
|
tests/test_time.py
|
tests/test_time.py
|
from immobilus import immobilus
from immobilus.logic import _datetime_to_utc_timestamp
from datetime import datetime
from time import time
def test_time_function():
dt = datetime(1970, 1, 1)
assert _datetime_to_utc_timestamp(dt) == 0.0
assert type(_datetime_to_utc_timestamp(dt)) is float
assert time() != _datetime_to_utc_timestamp(dt)
with immobilus(dt):
assert time() == _datetime_to_utc_timestamp(dt)
assert time() != _datetime_to_utc_timestamp(dt)
|
from immobilus import immobilus
from immobilus.logic import _datetime_to_utc_timestamp
from datetime import datetime
from time import time
def test_time_function():
dt = datetime(1970, 1, 1)
timestamp = _datetime_to_utc_timestamp(dt)
assert timestamp == 0.0
assert type(timestamp) is float
assert time() != timestamp
with immobilus(dt):
assert time() == timestamp
assert time() != timestamp
|
Tidy test - reuse timestamp
|
Tidy test - reuse timestamp
|
Python
|
apache-2.0
|
pokidovea/immobilus
|
a9f55a57559a6647c451d38893624be4109be23b
|
Spiders.py
|
Spiders.py
|
'''
Created on 2 сент. 2016 г.
@author: garet
'''
class BaseSpider():
def __init__(self):
pass
def AddUrls(self, urls):
pass
def Routing(self, url):
pass
def SaveCache(self, url, data=None):
pass
def GetCache(self, url):
pass
def Run(self):
pass
|
'''
Created on 2 сент. 2016 г.
@author: garet
'''
import queue
import sqlite3
class BaseSpider():
def __init__(self):
pass
def AddUrls(self, urls):
pass
def Routing(self, url):
pass
def SaveCache(self, url, data=None):
pass
def GetCache(self, url):
pass
def Run(self):
pass
class QueueUrls():
def __init__(self):
self._urls_queue = queue.Queue()
self._urls_set = set()
def AddUrls(self, urls):
for url in urls:
if url not in self._urls_set:
self._urls_queue.put(url)
self._urls_set.add(url)
pass
def ExistUrl(self, url):
if url in self._urls_set:
return True
return False
def GetUrl(self):
return self._urls_queue.get()
class SqliteCache():
def __init__(self, db_name):
self.db_name = db_name
def InitDB(self):
file = self.db_name + '.sqlite'
self._db = sqlite3.connect(file)
self._cursor = self._db.cursor()
# Create table
sql = """
CREATE TABLE IF NOT EXISTS tbl_urls
(
url text primary key not null,
html text,
time timestamp DEFAULT CURRENT_TIMESTAMP
);"""
self._cursor.execute(sql)
def Get(self, url):
if self._cursor == None:
self.InitDB()
sql = """SELECT * FROM tbl_urls WHERE url=?;"""
self._cursor.execute(sql, (url,))
return self._cursor.fetchone()
def Set(self, url, data):
if self._cursor == None:
self.InitDB()
sql = """INSERT OR REPLACE INTO tbl_urls(url, html)
VALUES (?,?);"""
self._cursor.execute(sql, (url, data) )
self._db.commit()
|
Add SqliteCache for html raw data. Add QueueUrls for list urls.
|
Add SqliteCache for html raw data. Add QueueUrls for list urls.
|
Python
|
bsd-3-clause
|
SaltusVita/ReoGrab
|
20eb711953a8981e7b73b59613018514157e352a
|
spyder_terminal/__init__.py
|
spyder_terminal/__init__.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 2, 3)
__version__ = '.'.join(map(str, VERSION_INFO))
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) Spyder Project Contributors
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Spyder Terminal Plugin."""
from .terminalplugin import TerminalPlugin as PLUGIN_CLASS
PLUGIN_CLASS
VERSION_INFO = (0, 3, 0, 'dev0')
__version__ = '.'.join(map(str, VERSION_INFO))
|
Set development version number to v0.3.0.dev0
|
Set development version number to v0.3.0.dev0
|
Python
|
mit
|
spyder-ide/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal,andfoy/spyder-terminal,spyder-ide/spyder-terminal
|
caf18b1cd8923e6d070d2652f9969dabba50e81b
|
lotteryResult.py
|
lotteryResult.py
|
#!/usr/bin/env python
import sys
import json
import requests
import hashlib
def hashToNumber(txhash,total):
result = long(txhash, 16) % total
return result
def getBlocktxs(blockhash, number, total, startnum):
url = "https://blockexplorer.com/api/block/" + blockhash
params = dict()
resp = requests.get(url=url, params=params)
data = json.loads(resp.text)
if "tx" in data:
if len(data["tx"]) >= number :
print ("%d Transactions for %d results." % (len(data["tx"]), number) )
for i in range(number):
txhash=data["tx"][i];
r = hashToNumber (txhash, total) + startnum
print ( "result %d is %d" % (i, r) )
else:
print ("only %d Transactions for %d results." % (len(data["tx"]), number) )
else:
print "invalid block data"
def main():
if len(sys.argv) == 5:
blockhash = sys.argv[1]
number = sys.argv[2]
total= sys.argv[3]
startnum = sys.argv[4]
getBlocktxs(blockhash, int(number), int(total), int(startnum))
else:
print "usage: ./lotteryResult.py blockhash number total startnum"
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
import sys
import json
import requests
def hashToNumber(txhash, total):
result = long(txhash, 16) % total
return result
def getBlocktxs(blockhash, number, total, startnum):
url = "https://blockexplorer.com/api/block/" + blockhash
params = dict()
resp = requests.get(url=url, params=params, timeout=5)
data = json.loads(resp.text)
if "tx" in data:
if len(data["tx"]) >= number:
print ("%d Transactions for %d results." % (len(data["tx"]), number))
for i in range(number):
txhash=data["tx"][i]
r = hashToNumber(txhash, total) + startnum
print ("result %d is %d" % (i, r))
else:
print ("only %d Transactions for %d results." % (len(data["tx"]), number))
else:
print "invalid block data"
def main():
if len(sys.argv) == 5:
blockhash = sys.argv[1]
number = sys.argv[2]
total= sys.argv[3]
startnum = sys.argv[4]
getBlocktxs(blockhash, int(number), int(total), int(startnum))
else:
print "usage: ./lotteryResult.py blockhash number total startnum"
if __name__ == '__main__':
main()
|
Format code with pep8 and add timeout to requests
|
Format code with pep8 and add timeout to requests
|
Python
|
mit
|
planetcoder/readerLottery
|
5ec99974a6611cc5993bf56f3f0f4e299a89e29d
|
txircd/modules/cmd_pass.py
|
txircd/modules/cmd_pass.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command, Module
class PassCommand(Command, Module):
def onUse(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "PASS", ":Not enough parameters")
return
user.password = params[0]
def onRegister(self, user):
if self.ircd.server_password and self.ircd.server_password != user.password:
user.sendMessage("ERROR", ":Closing link: ({}@{}) [Access denied]".format(user.username, user.hostname), to=None, prefix=None)
return False
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.passcmd = PassCommand()
def spawn():
return {
"actions": {
"register": [self.passcmd]
},
"commands": {
"PASS": self.passcmd
}
}
def cleanup():
self.ircd.actions.remove(self.passcmd)
del self.ircd.commands["PASS"]
del self.passcmd
|
from twisted.words.protocols import irc
from txircd.modbase import Command, Module
class PassCommand(Command, Module):
def onUse(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if not params:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "PASS", ":Not enough parameters")
return
user.password = params[0]
def onRegister(self, user):
if self.ircd.server_password and self.ircd.server_password != user.password:
user.sendMessage("ERROR", ":Closing link: ({}@{}) [Access denied]".format(user.username, user.hostname), to=None, prefix=None)
return False
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
self.passcmd = PassCommand()
def spawn():
return {
"actions": {
"register": [self.passcmd.onRegister]
},
"commands": {
"PASS": self.passcmd
}
}
def cleanup():
self.ircd.actions.remove(self.passcmd)
del self.ircd.commands["PASS"]
del self.passcmd
|
Add the function (not class) to actions as is now required
|
Add the function (not class) to actions as is now required
|
Python
|
bsd-3-clause
|
DesertBus/txircd,Heufneutje/txircd,ElementalAlchemist/txircd
|
1dd681517fd1831f3990caa043ea8220f5d1bb90
|
app/app.py
|
app/app.py
|
#!/usr/bin/env python3.5
import os,time,asyncio,json
from datetime import datetime
from aiohttp import web
import logging;logging.basicConfig(level=logging.INFO)
from tools.log import Log
from tools.httptools import Middleware,Route
from tools.template import Template
from models import *
from tools.config import Config
@Route.get('/')
def index():
user=yield from User.findall()
print(user)
return Template.render('index.html')
@Route.get('/user/{id}/comment/{comment}')
def user(id,comment):
return '<h1>%s,%s</h1>'%(id,comment)
@asyncio.coroutine
def init(loop):
print(Middleware.allmiddlewares())
app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares())
Template(app)
Route.register_route(app)
pool=yield from create_pool(loop)
srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000)
logging.info('server started at http://127.0.0.1:8000')
Log.info("server startd at http://127.0.0.1:8000")
return srv
if __name__=="__main__":
loop=asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
|
#!/usr/bin/env python3.5
import os,time,asyncio,json
from datetime import datetime
from aiohttp import web
import logging;logging.basicConfig(level=logging.INFO)
from tools.log import Log
from tools.httptools import Middleware,Route
from tools.template import Template
from models import *
from tools.config import Config
@Route.get('/')
def index():
user=yield from User.findall()
print(user)
return Template('index.html').render()
@Route.get('/user/{id}/comment/{comment}')
def user(id,comment):
return '<h1>%s,%s</h1>'%(id,comment)
@asyncio.coroutine
def init(loop):
print(Middleware.allmiddlewares())
app=web.Application(loop=loop,middlewares=Middleware.allmiddlewares())
Template.init(app)
Route.register_route(app)
pool=yield from create_pool(loop)
srv=yield from loop.create_server(app.make_handler(),'127.0.0.1',8000)
logging.info('server started at http://127.0.0.1:8000')
Log.info("server startd at http://127.0.0.1:8000")
return srv
if __name__=="__main__":
loop=asyncio.get_event_loop()
loop.run_until_complete(init(loop))
loop.run_forever()
|
Change Template() to Template.init() in init function
|
Change Template() to Template.init() in init function
|
Python
|
mit
|
free-free/pyblog,free-free/pyblog,free-free/pyblog,free-free/pyblog
|
178474ceb7227313d039666db3c235c2ee18251e
|
astropy/tests/image_tests.py
|
astropy/tests/image_tests.py
|
import matplotlib
from matplotlib import pyplot as plt
from astropy.utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
ROOT = "http://{server}/testing/astropy/2018-10-24T12:38:34.134556/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
import matplotlib
from matplotlib import pyplot as plt
from astropy.utils.decorators import wraps
MPL_VERSION = matplotlib.__version__
# The developer versions of the form 3.1.x+... contain changes that will only
# be included in the 3.2.x release, so we update this here.
if MPL_VERSION[:3] == '3.1' and '+' in MPL_VERSION:
MPL_VERSION = '3.2'
ROOT = "http://{server}/testing/astropy/2018-10-24T12:38:34.134556/{mpl_version}/"
IMAGE_REFERENCE_DIR = (ROOT.format(server='data.astropy.org', mpl_version=MPL_VERSION[:3] + '.x') + ',' +
ROOT.format(server='www.astropy.org/astropy-data', mpl_version=MPL_VERSION[:3] + '.x'))
def ignore_matplotlibrc(func):
# This is a decorator for tests that use matplotlib but not pytest-mpl
# (which already handles rcParams)
@wraps(func)
def wrapper(*args, **kwargs):
with plt.style.context({}, after_reset=True):
return func(*args, **kwargs)
return wrapper
|
Use 3.2.x reference images for developer version of Matplotlib
|
Use 3.2.x reference images for developer version of Matplotlib
|
Python
|
bsd-3-clause
|
pllim/astropy,StuartLittlefair/astropy,mhvk/astropy,stargaser/astropy,stargaser/astropy,mhvk/astropy,mhvk/astropy,saimn/astropy,aleksandr-bakanov/astropy,astropy/astropy,lpsinger/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,StuartLittlefair/astropy,saimn/astropy,bsipocz/astropy,dhomeier/astropy,mhvk/astropy,pllim/astropy,larrybradley/astropy,dhomeier/astropy,MSeifert04/astropy,astropy/astropy,astropy/astropy,astropy/astropy,saimn/astropy,pllim/astropy,saimn/astropy,dhomeier/astropy,StuartLittlefair/astropy,stargaser/astropy,larrybradley/astropy,stargaser/astropy,StuartLittlefair/astropy,saimn/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,bsipocz/astropy,bsipocz/astropy,larrybradley/astropy,lpsinger/astropy,lpsinger/astropy,larrybradley/astropy,dhomeier/astropy,MSeifert04/astropy,lpsinger/astropy,astropy/astropy,pllim/astropy,lpsinger/astropy,mhvk/astropy,pllim/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,MSeifert04/astropy,larrybradley/astropy
|
41a83c6742f0e688dad5a98761c0f0415c77bac9
|
outgoing_mail.py
|
outgoing_mail.py
|
#!/usr/bin/env python
#
# Copyright 2010 Eric Entzel <eric@ubermac.net>
#
from google.appengine.api import mail
from google.appengine.ext.webapp import template
import os
from_address = '"EventBot" <admin@myeventbot.com>'
def send(to, template_name, values):
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
|
#!/usr/bin/env python
#
# Copyright 2010 Eric Entzel <eric@ubermac.net>
#
from google.appengine.api import mail
from google.appengine.ext.webapp import template
from google.appengine.api import memcache
from datetime import datetime
import os
from_address = '"EventBot" <admin@myeventbot.com>'
email_interval = 10
def send(to, template_name, values):
"""Send an email to the specified address using a template. No
more than one email per EMAIL_INTERVAL seconds will be sent to any
given address.
"""
last_action = memcache.get(to, namespace='last_action')
if last_action != None:
return
path = os.path.join(os.path.dirname(__file__), 'email_templates', template_name)
message = mail.EmailMessage(sender=from_address, to=to)
message.subject = template.render(path + '.subject', values)
message.body = template.render(path + '.body', values)
message.send()
memcache.set(to, datetime.now(), time=email_interval, namespace='last_action')
|
Use memcache to rate-limit outgoing emails.
|
Use memcache to rate-limit outgoing emails.
|
Python
|
mit
|
eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot,eentzel/myeventbot
|
cf7b2bb0569431e97cc316dc41924c78806af5a9
|
drivers/vnfm/gvnfm/gvnfmadapter/driver/pub/config/config.py
|
drivers/vnfm/gvnfm/gvnfmadapter/driver/pub/config/config.py
|
# Copyright 2017 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [MSB]
MSB_SERVICE_IP = '127.0.0.1'
MSB_SERVICE_PORT = '10080'
# [register]
REG_TO_MSB_WHEN_START = True
REG_TO_MSB_REG_URL = "/openoapi/microservices/v1/services"
REG_TO_MSB_REG_PARAM = {
"serviceName": "ztevmanagerdriver",
"version": "v1",
"url": "/openoapi/ztevmanagerdriver/v1",
"protocol": "REST",
"visualRange": "1",
"nodes": [{
"ip": "127.0.0.1",
"port": "8410",
"ttl": 0
}]
}
|
# Copyright 2017 ZTE Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [MSB]
MSB_SERVICE_IP = '127.0.0.1'
MSB_SERVICE_PORT = '10080'
# [register]
REG_TO_MSB_WHEN_START = True
REG_TO_MSB_REG_URL = "/openoapi/microservices/v1/services"
REG_TO_MSB_REG_PARAM = {
"serviceName": "gvnfmdriver",
"version": "v1",
"url": "/openoapi/gvnfmdriver/v1",
"protocol": "REST",
"visualRange": "1",
"nodes": [{
"ip": "127.0.0.1",
"port": "8484",
"ttl": 0
}]
}
|
Add code framework of gvnfm-driver
|
Add code framework of gvnfm-driver
Change-Id: Ibb0dd98a73860f538599328b718040df5f3f7007
Issue-Id: NFVO-132
Signed-off-by: fujinhua <302f4934d283b6f50163b4a7fd9b6c869e0ad64e@zte.com.cn>
|
Python
|
apache-2.0
|
open-o/nfvo,open-o/nfvo,open-o/nfvo,open-o/nfvo,open-o/nfvo
|
c4c71dd65675f904c34a0d86a80d5abe7bafdbb1
|
txircd/modules/cmd_user.py
|
txircd/modules/cmd_user.py
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, params):
if user.registered == 0:
self.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
if not user.username:
user.registered -= 1
user.username = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not user.username:
user.registered += 1
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return
user.realname = params[3]
if user.registered == 0:
user.register()
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
from twisted.words.protocols import irc
from txircd.modbase import Command
class UserCommand(Command):
def onUse(self, user, data):
if not user.username:
user.registered -= 1
user.username = data["ident"]
user.realname = data["gecos"]
if user.registered == 0:
user.register()
def processParams(self, user, params):
if user.registered == 0:
user.sendMessage(irc.ERR_ALREADYREGISTRED, ":Unauthorized command (already registered)")
return {}
if params and len(params) < 4:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Not enough parameters")
return {}
ident = filter(lambda x: x in string.ascii_letters + string.digits + "-_", params[0])[:12]
if not ident:
user.sendMessage(irc.ERR_NEEDMOREPARAMS, "USER", ":Your username is not valid")
return {}
return {
"user": user,
"ident": ident,
"gecos": params[3]
}
def Spawner(object):
def __init__(self, ircd):
self.ircd = ircd
def spawn():
return {
"commands": {
"USER": UserCommand()
}
}
def cleanup():
del self.ircd.commands["USER"]
|
Update the USER command to take advantage of core capabilities as well
|
Update the USER command to take advantage of core capabilities as well
|
Python
|
bsd-3-clause
|
ElementalAlchemist/txircd,Heufneutje/txircd,DesertBus/txircd
|
2c082afb4024cafb530ffab6a62cc6602e75e092
|
stock_request_picking_type/models/stock_request_order.py
|
stock_request_picking_type/models/stock_request_order.py
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
|
# Copyright 2019 Open Source Integrators
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class StockRequestOrder(models.Model):
_inherit = 'stock.request.order'
@api.model
def _get_default_picking_type(self):
return self.env['stock.picking.type'].search([
('code', '=', 'stock_request_order'),
('warehouse_id.company_id', 'in',
[self.env.context.get('company_id', self.env.user.company_id.id),
False])],
limit=1).id
picking_type_id = fields.Many2one(
'stock.picking.type', 'Operation Type',
default=_get_default_picking_type, required=True)
@api.onchange('warehouse_id')
def onchange_warehouse_picking_id(self):
if self.warehouse_id:
picking_type_id = self.env['stock.picking.type'].\
search([('code', '=', 'stock_request_order'),
('warehouse_id', '=', self.warehouse_id.id)], limit=1)
if picking_type_id:
self._origin.write({'picking_type_id': picking_type_id.id})
|
Synchronize Picking Type and Warehouse
|
[IMP] Synchronize Picking Type and Warehouse
[IMP] User write()
|
Python
|
agpl-3.0
|
Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse,Vauxoo/stock-logistics-warehouse
|
be0a078aa004470a450dddfa5a8e770b2e0ad97c
|
disk/datadog_checks/disk/__init__.py
|
disk/datadog_checks/disk/__init__.py
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__
from .disk import Disk
all = [
'__version__', 'Disk'
]
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
from .__about__ import __version__ # NOQA F401
from .disk import Disk # NOQA F401
all = [
'__version__', 'Disk'
]
|
Fix flake8 issues and ignore unused
|
[Disk] Fix flake8 issues and ignore unused
|
Python
|
bsd-3-clause
|
DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core,DataDog/integrations-core
|
a4f41648cd0318694d551b067309539df475c2d7
|
tests/test_function_calls.py
|
tests/test_function_calls.py
|
from thinglang.runner import run
def test_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
|
from thinglang.runner import run
def test_zero_arg_function_calls():
assert run("""
thing Program
does start
number n = 1
number m = 2
Output.write("before n=", n, " m=", m)
self.say_hello()
Output.write("after n=", n, " m=", m)
does say_hello
number n = 3
Output.write("hello", n)
""").output == """
before n= 1 m= 2
hello 3
after n= 1 m= 2
""".strip()
def test_multi_arg_function_calls():
assert run("""
thing Program
does start
text arg_val = "some value"
self.say_hello(1, "hello", arg_val)
does say_hello with arg1, arg2, arg3
Output.write("in say_hello", arg1, arg2, arg3)
""").output == """
in say_hello 1 hello some value
""".strip()
|
Test for method argument calls
|
Test for method argument calls
|
Python
|
mit
|
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
|
ebbc68da19755097b2131d60bc9757ecb4dc6d4c
|
bundles/auth/models/token.py
|
bundles/auth/models/token.py
|
import hashlib
import random
import string
from ext.aboard.model import *
def set_value(token):
"""Randomly create and return a value."""
value = str(token.user) + "_" + str(token.timestamp)
len_rand = random.randint(20, 40)
to_pick = string.digits + string.ascii_letters + \
"_-+^$"
for i in range(len_rand):
value += random.choice(to_pick)
print("Private value", value)
# Hash the value
hashed = hashlib.sha512(value.encode())
value = hashed.hexdigest()
print("Public value", value)
return value
class Token(Model):
"""A token model."""
id = None
user = Integer()
timestamp = Integer()
value = String(pkey=True, default=set_value)
|
import hashlib
import random
import string
from ext.aboard.model import *
class Token(Model):
"""A token model."""
id = None
user = Integer()
timestamp = Integer()
value = String(pkey=True)
def __init__(self, user=None, timestamp=None):
value = None
if user and timestamp:
value = Token.get_token_value(user, timestamp)
Model.__init__(self, user=user, timestamp=timestamp, value=value)
@staticmethod
def get_token_value(user, timestamp):
"""Randomly create and return a token value."""
value = str(user) + "_" + str(timestamp)
len_rand = random.randint(20, 40)
to_pick = string.digits + string.ascii_letters + \
"_-+^$"
for i in range(len_rand):
value += random.choice(to_pick)
print("Private value", value)
# Hash the value
hashed = hashlib.sha512(value.encode())
value = hashed.hexdigest()
print("Public value", value)
return value
|
Use the Model constructor to generate a default value
|
[user] Use the Model constructor to generate a default value
|
Python
|
bsd-3-clause
|
v-legoff/pa-poc2,v-legoff/pa-poc2
|
1b40a51e371d10cc37f4d8f8c7557dbc741d690f
|
butterfly/ImageLayer/HDF5.py
|
butterfly/ImageLayer/HDF5.py
|
from Datasource import Datasource
import numpy as np
import h5py
class HDF5(Datasource):
pass
@classmethod
def load_tile(ds, query):
Sk,Sj,Si = query.all_scales
path = query.OUTPUT.INFO.PATH.VALUE
(K0,J0,I0),(K1,J1,I1) = query.source_bounds
with h5py.File(path) as fd:
vol = fd[fd.keys()[0]]
return vol[::Sk,::Sj,::Si]
|
from Datasource import Datasource
import numpy as np
import h5py
class HDF5(Datasource):
pass
@classmethod
def load_tile(ds, query):
Sk,Sj,Si = query.all_scales
path = query.OUTPUT.INFO.PATH.VALUE
z0,y0,x0 = query.index_zyx*query.blocksize
z1,y1,x1 = query.index_zyx*query.blocksize + query.blocksize
with h5py.File(path) as fd:
vol = fd[fd.keys()[0]]
return vol[z0:z1:Sk,y0:y1:Sj,x0:x1:Si]
|
Fix loading a whole tile into memory.
|
Fix loading a whole tile into memory.
|
Python
|
mit
|
Rhoana/butterfly,Rhoana/butterfly,Rhoana/butterfly2,Rhoana/butterfly,Rhoana/butterfly
|
78c5580d349d6bec0715a36c13437177a726f7ad
|
tests/test_isim.py
|
tests/test_isim.py
|
import pytest
def test_isim():
import os
import shutil
import tempfile
import yaml
from fusesoc.edatools import get_edatool
from edalize_common import compare_files, files, param_gen, tests_dir, vpi
(parameters, args) = param_gen(['plusarg', 'vlogdefine', 'vlogparam'])
work_root = tempfile.mkdtemp(prefix='isim_')
eda_api_file = os.path.join(work_root, 'test_isim_0.eda.yml')
with open(eda_api_file,'w') as f:
f.write(yaml.dump({'name' : 'test_isim_0',
'files' : files,
'parameters' : parameters,
'tool_options' : {'isim' : {
'fuse_options' : ['some', 'fuse_options'],
'isim_options' : ['a', 'few', 'isim_options']}},
'toplevel' : 'top_module',
'vpi' : vpi}))
backend = get_edatool('isim')(eda_api_file=eda_api_file)
backend.configure(args)
ref_dir = os.path.join(tests_dir, __name__)
compare_files(ref_dir, work_root,
['config.mk',
'Makefile',
'run_test_isim_0.tcl',
'test_isim_0.prj'])
dummy_exe = 'test_isim_0'
shutil.copy(os.path.join(ref_dir, dummy_exe),
os.path.join(work_root, dummy_exe))
backend.run([])
compare_files(ref_dir, work_root, ['run.cmd'])
|
import pytest
def test_isim():
import os
import shutil
from edalize_common import compare_files, setup_backend, tests_dir
ref_dir = os.path.join(tests_dir, __name__)
paramtypes = ['plusarg', 'vlogdefine', 'vlogparam']
name = 'test_isim_0'
tool = 'isim'
tool_options = {
'fuse_options' : ['some', 'fuse_options'],
'isim_options' : ['a', 'few', 'isim_options'],
}
(backend, args, work_root) = setup_backend(paramtypes, name, tool, tool_options)
backend.configure(args)
compare_files(ref_dir, work_root,
['config.mk',
'Makefile',
'run_test_isim_0.tcl',
'test_isim_0.prj'])
dummy_exe = 'test_isim_0'
shutil.copy(os.path.join(ref_dir, dummy_exe),
os.path.join(work_root, dummy_exe))
backend.run([])
compare_files(ref_dir, work_root, ['run.cmd'])
|
Reduce code duplication in isim test
|
Reduce code duplication in isim test
|
Python
|
bsd-2-clause
|
olofk/fusesoc,olofk/fusesoc,lowRISC/fusesoc,lowRISC/fusesoc
|
1e60c603321729c71895ac5dc19adc669cce4a72
|
tests/udev_test.py
|
tests/udev_test.py
|
#!/usr/bin/python
import unittest
import mock
class UdevTest(unittest.TestCase):
def setUp(self):
import blivet.udev
blivet.udev.os = mock.Mock()
blivet.udev.log = mock.Mock()
def test_udev_get_device(self):
import blivet.udev
devices = blivet.udev.global_udev.list_devices(subsystem="block")
for device in devices:
self.assertNotEqual(blivet.udev.get_device(device.sys_path), None)
def udev_settle_test(self):
import blivet.udev
blivet.udev.util = mock.Mock()
blivet.udev.settle()
self.assertTrue(blivet.udev.util.run_program.called)
def udev_trigger_test(self):
import blivet.udev
blivet.udev.util = mock.Mock()
blivet.udev.trigger()
self.assertTrue(blivet.udev.util.run_program.called)
if __name__ == "__main__":
unittest.main()
|
#!/usr/bin/python
import unittest
import mock
class UdevTest(unittest.TestCase):
def setUp(self):
import blivet.udev
self._blivet_os = blivet.udev.os
self._blivet_log = blivet.udev.log
self._blivet_util = blivet.udev.util
blivet.udev.os = mock.Mock()
blivet.udev.log = mock.Mock()
blivet.udev.util = mock.Mock()
def tearDown(self):
import blivet.udev
blivet.udev.log = self._blivet_log
blivet.udev.os = self._blivet_os
blivet.udev.util = self._blivet_util
def test_udev_get_device(self):
import blivet.udev
devices = blivet.udev.global_udev.list_devices(subsystem="block")
for device in devices:
self.assertNotEqual(blivet.udev.get_device(device.sys_path), None)
def udev_settle_test(self):
import blivet.udev
blivet.udev.settle()
self.assertTrue(blivet.udev.util.run_program.called)
def udev_trigger_test(self):
import blivet.udev
blivet.udev.trigger()
self.assertTrue(blivet.udev.util.run_program.called)
if __name__ == "__main__":
unittest.main()
|
Clean up mocking done by udev tests when finished.
|
Clean up mocking done by udev tests when finished.
|
Python
|
lgpl-2.1
|
dwlehman/blivet,rvykydal/blivet,AdamWill/blivet,rhinstaller/blivet,vpodzime/blivet,AdamWill/blivet,vojtechtrefny/blivet,vojtechtrefny/blivet,vpodzime/blivet,rvykydal/blivet,rhinstaller/blivet,dwlehman/blivet,jkonecny12/blivet,jkonecny12/blivet
|
c3029a3796437add90cdd6c0033be70fe5766a3a
|
mapit/middleware/__init__.py
|
mapit/middleware/__init__.py
|
import re
from .view_error import *
class JSONPMiddleware(object):
def process_response(self, request, response):
# If the response is a redirect, the callback will be dealt
# on the next request:
if response.status_code == 302:
return response
else:
if request.GET.get('callback') and re.match('[a-zA-Z0-9_$.]+$', request.GET.get('callback')):
response.content = request.GET.get('callback').encode('utf-8') + b'(' + response.content + b')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
|
import re
from .view_error import *
class JSONPMiddleware(object):
def process_response(self, request, response):
# If the response is a redirect, the callback will be dealt
# on the next request:
if response.status_code == 302:
return response
else:
cb = request.GET.get('callback')
if cb and re.match('[a-zA-Z0-9_$.]+$', cb):
cb = cb.encode('utf-8')
response.content = b'typeof ' + cb + b" === 'function' && " + cb + b'(' + response.content + b')'
response.status_code = 200 # Must return OK for JSONP to be processed
return response
|
Include typeof check in JSONP callback response.
|
Include typeof check in JSONP callback response.
This is more robust, and helps against attacks such as Rosetta Flash:
https://miki.it/blog/2014/7/8/abusing-jsonp-with-rosetta-flash/
|
Python
|
agpl-3.0
|
opencorato/mapit,chris48s/mapit,opencorato/mapit,Code4SA/mapit,Code4SA/mapit,opencorato/mapit,Code4SA/mapit,chris48s/mapit,chris48s/mapit
|
9c2075f13e2aa8ff7a5c4644208e8de17ebefbab
|
finding-geodesic-basins-with-scipy.py
|
finding-geodesic-basins-with-scipy.py
|
# IPython log file
import numpy as np
from scipy import sparse
from skimage import graph
from skimage.graph import _mcp
image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]])
mcp = graph.MCP_Geometric(image)
destinations = [[0, 0], [3, 3]]
costs, traceback = mcp.find_costs(destinations)
offsets = _mcp.make_offsets(2, True)
indices = np.indices(traceback.shape)
offsets.append([0, 0])
offsets_arr = np.array(offsets)
offset_to_neighbor = offsets_arr[traceback]
neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1))
ids = np.arange(traceback.size).reshape(image.shape)
neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape)
g = sparse.coo_matrix((
np.ones(traceback.size),
(ids.flat, neighbor_ids.flat),
)).tocsr()
basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4))
print(basins)
|
# IPython log file
# See https://stackoverflow.com/questions/62135639/mcp-geometrics-for-calculating-marketsheds/62144556
import numpy as np
from scipy import sparse
from skimage import graph
from skimage.graph import _mcp
image = np.array([[1, 1, 2, 2], [2, 1, 1, 3], [3, 2, 1, 2], [2, 2, 2, 1]])
mcp = graph.MCP_Geometric(image)
destinations = [[0, 0], [3, 3]]
costs, traceback = mcp.find_costs(destinations)
offsets = _mcp.make_offsets(2, True)
indices = np.indices(traceback.shape)
offsets.append([0, 0])
offsets_arr = np.array(offsets)
offset_to_neighbor = offsets_arr[traceback]
neighbor_index = indices - offset_to_neighbor.transpose((2, 0, 1))
ids = np.arange(traceback.size).reshape(image.shape)
neighbor_ids = np.ravel_multi_index(tuple(neighbor_index), traceback.shape)
g = sparse.coo_matrix((
np.ones(traceback.size),
(ids.flat, neighbor_ids.flat),
)).tocsr()
basins = sparse.csgraph.connected_components(g)[1].reshape((4, 4))
print(basins)
|
Add link to SO question
|
Add link to SO question
|
Python
|
bsd-3-clause
|
jni/useful-histories
|
897b637ca9de93b7107cd6d6ab76ed0cb485aba9
|
classifiers/ppmc.py
|
classifiers/ppmc.py
|
__author__ = 'sharvey'
from classifiers import Classifier
from corpus.mysql.reddit import RedditMySQLCorpus
from ppm import Trie
class RedditPPM(Classifier):
corpus = None
trie = None
user = None
reddit = None
order = 5
def __init__(self, corpus):
self.corpus = corpus
def train(self, corpus_type, user, reddit, char_count, order=5):
if (self.trie is not None):
del self.trie
self.trie = Trie(order)
self.reddit = reddit
self.user = user
document = self.corpus.get_train_documents(corpus_type, user, reddit, char_count).encode('utf-8')
for c in document:
self.trie.add(c)
def test(self, corpus_type, reddit, char_count):
documents = self.corpus.get_test_documents(corpus_type, reddit)
results = []
for row in documents:
test_bits = 0
newtrie = self.trie.duplicate()
document = row['text'].encode('utf-8')
for c in document:
newtrie.add(c)
test_bits += newtrie.bit_encoding
del newtrie
results.append({'id': row['id'],
'label': (self.user == row['username']),
'score': test_bits/(len(document)*8)})
return results
def run_reddit_experiment(corpus_type, char_count, reddits, mysql_opts):
corpus = RedditMySQLCorpus()
corpus.setup(**mysql_opts)
|
__author__ = 'sharvey'
from classifiers import Classifier
from corpus.mysql.reddit import RedditMySQLCorpus
from ppm import Trie
class RedditPPM(Classifier):
corpus = None
trie = None
user = None
reddit = None
order = 5
def __init__(self, corpus):
self.corpus = corpus
def train(self, corpus_type, user, reddit, char_count, order=5):
if (self.trie is not None):
del self.trie
self.trie = Trie(order)
self.reddit = reddit
self.user = user
document = self.corpus.get_train_documents(corpus_type, user, reddit, char_count).encode('utf-8')
for c in document:
self.trie.add(c)
def test(self, corpus_type, reddit, char_count):
documents = self.corpus.get_test_documents(corpus_type, reddit)
results = []
for row in documents:
test_bits = 0
newtrie = self.trie.duplicate()
document = row['text'].encode('utf-8')
for c in document:
newtrie.add(c)
test_bits += newtrie.bit_encoding
del newtrie
results.append({'id': row['id'],
'username': row['username'],
'label': (self.user == row['username']),
'score': test_bits/(len(document)*8)})
return results
|
Add field for test result return
|
Add field for test result return
|
Python
|
mit
|
worldwise001/stylometry
|
552caa1d1fefcc48107eae02091aaca4a39123b4
|
src/zeit/content/cp/field.py
|
src/zeit/content/cp/field.py
|
import zc.form.field
import zope.schema.interfaces
class DynamicCombination(zc.form.field.Combination):
def __init__(self, type_field, type_interface, **kw):
self.type_field = type_field
self.type_field.__name__ = "combination_00"
self.fields = (type_field,)
self.type_interface = type_interface
super(zc.form.field.Combination, self).__init__(**kw)
def generate_fields(self, selector):
fields = []
field = self.type_interface[selector]
if zope.schema.interfaces.ICollection.providedBy(field):
fields.extend(field.value_type.fields)
else:
fields.append(field)
fields = [x.bind(self.context) for x in fields]
for ix, field in enumerate(fields):
field.__name__ = "combination_%02d" % (ix + 1)
return fields
def _validate(self, value):
# XXX I hope we can get away with no validation here, since all input
# happens through widgets and so should be valid anyway. Otherwise we
# have to synthesize fields here too, like DynamicCombinationWidget.
pass
|
import zc.form.field
import zc.form.interfaces
import zope.schema.interfaces
class DynamicCombination(zc.form.field.Combination):
def __init__(self, type_field, type_interface, **kw):
self.type_field = type_field
self.type_field.__name__ = "combination_00"
self.fields = (type_field,)
self.type_interface = type_interface
super(zc.form.field.Combination, self).__init__(**kw)
def generate_fields(self, selector):
result = []
field = self.type_interface[selector]
if zope.schema.interfaces.ICollection.providedBy(field):
field = field.value_type
if zc.form.interfaces.ICombinationField.providedBy(field):
result.extend(field.fields)
else:
result.append(field)
result = [x.bind(self.context) for x in result]
for ix, field in enumerate(result):
field.__name__ = "combination_%02d" % (ix + 1)
return result
def _validate(self, value):
# XXX I hope we can get away with no validation here, since all input
# happens through widgets and so should be valid anyway. Otherwise we
# have to synthesize fields here too, like DynamicCombinationWidget.
pass
|
Support sequences with value_type other than combination
|
TMS-227: Support sequences with value_type other than combination
|
Python
|
bsd-3-clause
|
ZeitOnline/zeit.content.cp,ZeitOnline/zeit.content.cp
|
0b4097394fd05da204624d1c6093176feb158bb1
|
ajaxuploader/backends/thumbnail.py
|
ajaxuploader/backends/thumbnail.py
|
import os
from sorl.thumbnail import get_thumbnail
from ajaxuploader.backends.local import LocalUploadBackend
class ThumbnailUploadBackend(LocalUploadBackend):
def __init__(self, dimension):
self._dimension = dimension
def upload_complete(self, request, filename):
thumbnail = get_thumbnail(self._filename, self._dimension)
os.unlink(self._filename)
return {"path": thumbnail.name}
|
import os
from django.conf import settings
from sorl.thumbnail import get_thumbnail
from ajaxuploader.backends.local import LocalUploadBackend
class ThumbnailUploadBackend(LocalUploadBackend):
DIMENSION = "100x100"
def upload_complete(self, request, filename):
thumbnail = get_thumbnail(self._path, self.DIMENSION)
os.unlink(self._path)
return {"path": settings.MEDIA_URL + thumbnail.name}
|
Use dimension as a constant, so we keep same interface for all backends; also returns full path to the place where image was saved
|
Use dimension as a constant, so we keep same interface for all backends; also returns full path to the place where image was saved
|
Python
|
bsd-3-clause
|
OnlyInAmerica/django-ajax-uploader,derek-adair/django-ajax-uploader,derek-adair/django-ajax-uploader,skoczen/django-ajax-uploader,brilliant-org/django-ajax-uploader,derek-adair/django-ajax-uploader,brilliant-org/django-ajax-uploader,skoczen/django-ajax-uploader,OnlyInAmerica/django-ajax-uploader,brilliant-org/django-ajax-uploader
|
a3c4f151a9a44aae3528492d4a00a1815c52cda6
|
website_membership_contact_visibility/models/res_partner.py
|
website_membership_contact_visibility/models/res_partner.py
|
# -*- coding: utf-8 -*-
# © 2016 Michael Viriyananda
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agp
from openerp import fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
website_membership_published = fields.Boolean(
string='Visible In The Website',
copy=False,
default=True)
|
# -*- coding: utf-8 -*-
# © 2016 Michael Viriyananda
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agp
from openerp import fields, models
class ResPartner(models.Model):
_inherit = 'res.partner'
website_membership_published = fields.Boolean(
string='Visible Contact Info On The Website',
copy=False,
default=True)
|
Change the label of "website_membership_published" into "Visible Contact Info On The Website"
|
Change the label of "website_membership_published" into "Visible Contact Info On The Website"
|
Python
|
agpl-3.0
|
open-synergy/vertical-association
|
b56c2063dbb8ea6145048eb8a74bfd2693b2b6f4
|
app.py
|
app.py
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route("/ping")
def hello():
return "pong"
if __name__ == "__main__":
app.run()
|
#!/usr/bin/env python
from flask import Flask
app = Flask(__name__)
@app.route("/ping")
def hello():
return "pong"
# Returns larger sample JSON from http://json.org/example.html to exercise performance with larger payloads
@app.route("/bigger")
def big_response():
return '''{
"glossary": {
"title": "example glossary",
"GlossDiv": {
"title": "S",
"GlossList": {
"GlossEntry": {
"ID": "SGML",
"SortAs": "SGML",
"GlossTerm": "Standard Generalized Markup Language",
"Acronym": "SGML",
"Abbrev": "ISO 8879:1986",
"GlossDef": {
"para": "A meta-markup language, used to create markup languages such as DocBook.",
"GlossSeeAlso": ["GML", "XML"]
},
"GlossSee": "markup"
}
}
}
}
}'''
if __name__ == "__main__":
app.run()
|
Add bigger response payload option of 512B
|
Add bigger response payload option of 512B
|
Python
|
apache-2.0
|
svanoort/python-client-benchmarks,svanoort/python-client-benchmarks
|
d7cb9bdd63b381b81bf89c5e3c1cc3031c5928d9
|
run.py
|
run.py
|
"""
Entry point for running the sqmpy application standalone
"""
import os
from gevent import monkey
monkey.patch_all()
from sqmpy.factory import create_app
# This line added to support heroku deployment
port = int(os.environ.get("PORT", 3000))
app = create_app('../config.py')
app.run(host='0.0.0.0', port=port,
ssl_context=('server.crt', 'server.key'),
debug=True,
threaded=True)
|
"""
Entry point for running the sqmpy application standalone
"""
import os
from gevent import monkey
monkey.patch_all()
from sqmpy.factory import create_app
# This line added to support heroku deployment
port = int(os.environ.get("PORT", 3000))
# Workaround for passing ssh options to underlying library. Since we want
# to avoid any question upon ssh initialization, therefore we have tp add
# this StrictHostKeyChecking=no to ~/.ssh/config, otherwise we will get
# an error when connecting to new host, since there is no way currently to
# pass this option programmatically.
# Pass the correct config file and create the app instance
app = create_app('../config.py')
# If pyOpenSSL is installed it is possible to use adhoc certificates:
# app.run(host='0.0.0.0', port=port, ssl_context='adhoc')
app.run(host='0.0.0.0', port=port, ssl_context=('server.crt', 'server.key'))
|
Add comments and more gitignore
|
Add comments and more gitignore
|
Python
|
bsd-3-clause
|
mehdisadeghi/sqmpy,mehdisadeghi/sqmpy,mehdisadeghi/sqmpy,simphony/sqmpy,simphony/sqmpy,simphony/sqmpy
|
ca758b2813ae77b795c4318d7d5566cd47ab0ec7
|
postgres/operations.py
|
postgres/operations.py
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
from django.db.migrations.operations.base import Operation
from django.db import connection
from psycopg2.extras import register_composite
from .fields.composite import composite_type_created
class LoadSQLFromScript(Operation):
def __init__(self, filename):
self.filename = filename
@property
def reversible(self):
return False
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute(open(self.filename).read().replace('%', '%%'))
class CreateCompositeType(Operation):
def __init__(self, name=None, fields=None):
self.name = name
self.fields = fields
@property
def reversible(self):
return True
def state_forwards(self, app_label, state):
pass
def database_forwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('CREATE TYPE %s AS (%s)' % (
self.name, ", ".join(["%s %s" % field for field in self.fields])
))
composite_type_created.send(sender=self.__class__, db_type=self.name)
def state_backwards(self, app_label, state):
pass
def database_backwards(self, app_label, schema_editor, from_state, to_state):
schema_editor.execute('DROP TYPE %s' % self.name)
|
Send a signal after creation of composite field.
|
Send a signal after creation of composite field.
|
Python
|
bsd-3-clause
|
wlanslovenija/django-postgres
|
7f51b7a74df8e2c8d6756b8c3e95f7fbf47b291b
|
hashbrown/utils.py
|
hashbrown/utils.py
|
from django.conf import settings
from .models import Switch
def is_active(label, user=None):
defaults = getattr(settings, 'HASHBROWN_SWITCH_DEFAULTS', {})
globally_active = defaults[label].get(
'globally_active',
False) if label in defaults else False
description = defaults[label].get(
'description',
'') if label in defaults else ''
switch, created = Switch.objects.get_or_create(
label=label, defaults={
'globally_active': globally_active,
'description': description,
})
if created:
return switch.globally_active
if switch.globally_active or (
user and user.available_switches.filter(pk=switch.pk).exists()
):
return True
return False
|
from django.conf import settings
from .models import Switch
SETTINGS_KEY = 'HASHBROWN_SWITCH_DEFAULTS'
def is_active(label, user=None):
defaults = getattr(settings, SETTINGS_KEY, {})
globally_active = defaults[label].get(
'globally_active',
False) if label in defaults else False
description = defaults[label].get(
'description',
'') if label in defaults else ''
switch, created = Switch.objects.get_or_create(
label=label, defaults={
'globally_active': globally_active,
'description': description,
})
if created:
return switch.globally_active
if switch.globally_active or (
user and user.available_switches.filter(pk=switch.pk).exists()
):
return True
return False
|
Use a constant for the 'HASHBROWN_SWITCH_DEFAULTS' settings key so it is easier to re-use.
|
Use a constant for the 'HASHBROWN_SWITCH_DEFAULTS' settings key so it is easier to re-use.
|
Python
|
bsd-2-clause
|
potatolondon/django-hashbrown
|
df57b55c8ffa2a1948d7442d041415a3f19bbca0
|
python/Cloudbot/bbm.py
|
python/Cloudbot/bbm.py
|
from cloudbot import hook
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if chan in ("#bbm-bots", "#bbm-dev", "#bbm-packs", "#builtbrokenmodding", "#builtbroken"):
message("Owners: Dmodoomsirius, DarkGuardsman");
message("textureArtist: Morton0000");
message("Developers: Snow, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if chan in ("#BBM-bots", "#BBM-Dev", "#BBM-Packs", "#BuiltBrokenModding", "#BuiltBroken"):
message("BuiltBroken servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
|
from cloudbot import hook
bbmChannels = ["#bbm-bots","#bbm-dev","#builtbroken","#builtbrokenmodding","#bbm-packs","#icbm","#artillects "]
@hook.command("bbmstaff")
def bbmStaff(text, message, chan):
if any(x in chan for x in bbmChannels):
message("Owners: Dmodoomsirius, DarkGuardsman");
#message("Texture Artist: Morton0000");
message("Senior Developers: Kolatra")
message("Junior Developers: Kolatra, shobu9, TheCowGod, Hennamann")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("sponsor")
def sponsor(text, message, chan):
if any(x in chan for x in bbmChannels):
message("BuildBrokenModding servers both Beta test servers and build server");
message("is sponsored by Akliz.");
message("http://www.akliz.net/bbm")
else:
message("Command can only be run in Official BBM Channels. Join #bbm-bots to run the command.")
@hook.command("bbmchan")
def chans(text, message):
message("The official BuiltBroken Channels are: " + " , ".join(bbmChannels))
@hook.command("bbmhelp")
def bbmhelp(text, message):
message("If you are looking for who is the staff type .bbmstaff")
message ("if you are looking for our sponsors please type .sponsor")
message("If you are looking for our official channels please do .bbmchan")
|
Update and add more commands.
|
Update and add more commands.
|
Python
|
unknown
|
dmodoomsirius/DmodCode,dmodoomsirius/DmodCode,dsirius/DmodCode,dmodoomsirius/DmodCode,dsirius/DmodCode,dsirius/DmodCode
|
373fd6e9332ca225c1939b5bba675161bdec3596
|
bika/lims/upgrade/__init__.py
|
bika/lims/upgrade/__init__.py
|
# see https://gist.github.com/malthe/704910
import imp
import sys
def create_modules(module_path):
path = ""
module = None
for element in module_path.split('.'):
path += element
try:
module = __import__(path)
except ImportError:
new = imp.new_module(path)
if module is not None:
setattr(module, element, new)
module = new
sys.modules[path] = module
__import__(path)
path += "."
return module
def stub(module_path, class_name, base_class, meta_class=type):
module = create_modules(module_path)
cls = meta_class(class_name, (base_class, ), {})
setattr(module, class_name, cls)
def skip_pre315(portal):
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
qi = portal.portal_quickinstaller
info = qi.upgradeInfo('bika.lims')
if info['installedVersion'] > '315':
return True
|
# see https://gist.github.com/malthe/704910
import imp
import sys
def create_modules(module_path):
path = ""
module = None
for element in module_path.split('.'):
path += element
try:
module = __import__(path)
except ImportError:
new = imp.new_module(path)
if module is not None:
setattr(module, element, new)
module = new
sys.modules[path] = module
__import__(path)
path += "."
return module
def stub(module_path, class_name, base_class, meta_class=type):
module = create_modules(module_path)
cls = meta_class(class_name, (base_class, ), {})
setattr(module, class_name, cls)
def skip_pre315(portal):
# Hack prevent out-of-date upgrading
# Related: PR #1484
# https://github.com/bikalabs/Bika-LIMS/pull/1484
qi = portal.portal_quickinstaller
info = qi.upgradeInfo('bika.lims')
if info['installedVersion'] > '315':
return True
return False
|
Add return False to be sure all works as expected
|
Add return False to be sure all works as expected
|
Python
|
agpl-3.0
|
labsanmartin/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,labsanmartin/Bika-LIMS,veroc/Bika-LIMS,veroc/Bika-LIMS,rockfruit/bika.lims,rockfruit/bika.lims
|
6168ce884a1234910bace1a026402a21501b499c
|
buildbot_travis/steps/base.py
|
buildbot_travis/steps/base.py
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
defer.returnValue(config)
|
from buildbot.process import buildstep
from buildbot.process.buildstep import SUCCESS, FAILURE, EXCEPTION
from buildbot.process.properties import Properties
from twisted.internet import defer
from ..travisyml import TravisYml
class ConfigurableStep(buildstep.LoggingBuildStep):
"""
Base class for a step which can be tuned by changing settings in .travis.yml
"""
@defer.inlineCallbacks
def getStepConfig(self):
config = TravisYml()
struct = self.build.getProperty(".travis.yml", None)
if struct:
config.parse(struct)
defer.returnValue(config)
log = self.addLog(".travis.yml")
cmd = self.cmd = buildstep.RemoteShellCommand(workdir="build", command=["cat", ".travis.yml"])
cmd.useLog(log, False, "stdio")
yield self.runCommand(cmd)
self.cmd = None
if cmd.rc != 0:
raise buildstep.BuildStepFailed()
config = TravisYml()
config.parse(log.getText())
self.build.setProperty(".travis.yml", config.config, ".VCS")
defer.returnValue(config)
|
Save .travis.yml into build properties
|
Save .travis.yml into build properties
|
Python
|
unknown
|
tardyp/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis,buildbot/buildbot_travis,isotoma/buildbot_travis,tardyp/buildbot_travis,tardyp/buildbot_travis,buildbot/buildbot_travis
|
dbfa6398ae84d6920181a750f1447fd1b9a9c521
|
tests/test_packet.py
|
tests/test_packet.py
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Tests for Packet.
"""
import os
import json
import pytest
from laniakea.core.providers.packet import PacketManager
@pytest.fixture
def packet():
with open(os.path.join(os.getcwd(), 'laniakea/examples/packet.json')) as fo:
conf = json.loads(fo.read())
return PacketManager(conf)
def test_list_projects(packet):
for plan in packet.list_projects():
assert hasattr(plan, 'name')
assert hasattr(plan, 'id')
|
# coding: utf-8
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
"""
Tests for Packet.
"""
import os
import json
import pytest
from laniakea.core.providers.packet import PacketManager
#@pytest.fixture
#def packet():
# with open(os.path.join(os.getcwd(), 'laniakea/examples/packet.json')) as fo:
# conf = json.loads(fo.read())
# return PacketManager(conf)
#def test_list_projects(packet):
# for plan in packet.list_projects():
# assert hasattr(plan, 'name')
# assert hasattr(plan, 'id')
|
Disable dummy Packet test temporarily
|
Disable dummy Packet test temporarily
|
Python
|
mpl-2.0
|
nth10sd/laniakea,MozillaSecurity/laniakea,MozillaSecurity/laniakea,nth10sd/laniakea
|
1239623e7e23d7c51e864f715c0908ef2c0d2765
|
tests/test_reduce.py
|
tests/test_reduce.py
|
import mr_streams as ms
import unittest
# :::: auxilary functions ::::
def sum_reduction(x,y):
return x + y
class TestMisc(unittest.TestCase):
def test_sum_reduce(self):
_ = ms.stream([1,2,3,4,5]).reduce(sum_reduction)
assert _ is 15
def test_reduce_with_one_element(self):
_ = ms.stream([1]).reduce(sum_reduction)
assert _ is 1
def test_empty_reduce(self):
try:
_ = ms.stream([]).reduce(sum_reduction)
except ms.IllegalStreamOperationException:
pass
|
import mr_streams as ms
import unittest
# :::: auxilary functions ::::
def sum_reduction(x,y):
return x + y
class TestMisc(unittest.TestCase):
def test_sum_reduce(self):
_ = ms.stream([1,2,3,4,5]).reduce(sum_reduction)
assert _ is 15
def test_initializer(self):
_ = ms.stream([1]).reduce(sum_reduction, initializer= 1)
assert _ is 2
def test_reduce_with_one_element(self):
_ = ms.stream([1]).reduce(sum_reduction)
assert _ is 1
def test_empty_reduce(self):
try:
_ = ms.stream([]).reduce(sum_reduction)
except ms.IllegalStreamOperationException:
pass
|
Refactor reduce to handle edge-case streams of length 0 and 1.
|
Refactor reduce to handle edge-case streams of length 0 and 1.
|
Python
|
mit
|
caffeine-potent/Streamer-Datastructure
|
9a97b9df87f06268ab1075726835da95f4852052
|
romanesco/format/tree/nested_to_vtktree.py
|
romanesco/format/tree/nested_to_vtktree.py
|
from romanesco.format import dict_to_vtkarrays, dict_to_vtkrow
import vtk
vtk_builder = vtk.vtkMutableDirectedGraph()
node_fields = input["node_fields"]
edge_fields = input["edge_fields"]
dict_to_vtkarrays(input["node_data"], node_fields, vtk_builder.GetVertexData())
if "children" in input and len(input["children"]) > 0:
dict_to_vtkarrays(input["children"][0]["edge_data"], edge_fields,
vtk_builder.GetEdgeData())
def process_node(vtknode, node):
if "children" in node:
for n in node["children"]:
vtkchild = vtk_builder.AddVertex()
vtkparentedge = vtk_builder.AddGraphEdge(vtknode, vtkchild).GetId()
dict_to_vtkrow(n["node_data"], vtk_builder.GetVertexData())
if "edge_data" in n:
dict_to_vtkrow(n["edge_data"], vtk_builder.GetEdgeData())
process_node(vtkchild, n)
vtk_builder.AddVertex()
dict_to_vtkrow(input["node_data"], vtk_builder.GetVertexData())
process_node(0, input)
output = vtk.vtkTree()
output.ShallowCopy(vtk_builder)
|
from romanesco.format import dict_to_vtkarrays, dict_to_vtkrow
import vtk
vtk_builder = vtk.vtkMutableDirectedGraph()
node_fields = input["node_fields"]
edge_fields = input["edge_fields"]
dict_to_vtkarrays(input["node_data"], node_fields, vtk_builder.GetVertexData())
if "children" in input and len(input["children"]) > 0:
dict_to_vtkarrays(input["children"][0]["edge_data"], edge_fields,
vtk_builder.GetEdgeData())
def process_node(vtknode, node):
if "children" in node:
for n in node["children"]:
vtkchild = vtk_builder.AddVertex()
vtkparentedge = vtk_builder.AddGraphEdge(vtknode, vtkchild).GetId()
dict_to_vtkrow(n["node_data"], vtk_builder.GetVertexData())
dict_to_vtkrow(n["edge_data"], vtk_builder.GetEdgeData())
process_node(vtkchild, n)
vtk_builder.AddVertex()
dict_to_vtkrow(input["node_data"], vtk_builder.GetVertexData())
process_node(0, input)
output = vtk.vtkTree()
output.ShallowCopy(vtk_builder)
|
Revert "tolerate missing edge data"
|
Revert "tolerate missing edge data"
This reverts commit 93f1f6b24b7e8e61dbbfebe500048db752bc9fed.
|
Python
|
apache-2.0
|
Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,Kitware/romanesco,girder/girder_worker,girder/girder_worker,girder/girder_worker
|
47831156874d31dcf9b8b61118399cb5ac77632c
|
PyFVCOM/__init__.py
|
PyFVCOM/__init__.py
|
"""
The FVCOM Python toolbox (PyFvcom)
"""
__version__ = '1.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import numpy so we have it across the board.
import numpy as np
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import img2xyz
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
# External TAPPY now instead of my bundled version. Requires my forked version
# of TAPPY from https://github.com/pwcazenave/tappy or
# http://gitlab.em.pml.ac.uk/pica/tappy.
from tappy import tappy
# For backwards-compatibility.
process_FVCOM_results = process_results
read_FVCOM_results = read_results
|
"""
The FVCOM Python toolbox (PyFvcom)
"""
__version__ = '1.2'
__author__ = 'Pierre Cazenave'
__credits__ = ['Pierre Cazenave']
__license__ = 'MIT'
__maintainer__ = 'Pierre Cazenave'
__email__ = 'pica@pml.ac.uk'
import inspect
from warnings import warn
# Import numpy so we have it across the board.
import numpy as np
# Import everything!
from PyFVCOM import buoy_tools
from PyFVCOM import cst_tools
from PyFVCOM import ctd_tools
from PyFVCOM import grid_tools
from PyFVCOM import ll2utm
from PyFVCOM import ocean_tools
from PyFVCOM import stats_tools
from PyFVCOM import tide_tools
from PyFVCOM import tidal_ellipse
from PyFVCOM import process_results
from PyFVCOM import read_results
# External TAPPY now instead of my bundled version. Requires my forked version
# of TAPPY from https://github.com/pwcazenave/tappy or
# http://gitlab.em.pml.ac.uk/pica/tappy.
from tappy import tappy
# For backwards-compatibility.
process_FVCOM_results = process_results
read_FVCOM_results = read_results
|
Remove the (dodgy) function to convert from an image to data.
|
Remove the (dodgy) function to convert from an image to data.
|
Python
|
mit
|
pwcazenave/PyFVCOM
|
751f40ef23250cf9fad1374359393588edee477a
|
back/blog/models/base.py
|
back/blog/models/base.py
|
from sqlalchemy.ext.declarative import declared_attr
from blog.lib.database import db
class ModelMixin(object):
"""A base mixin for all models."""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
def __str__(self):
return '<{} (id={})>'.format(self.__class__.__name__, self.id_)
def __repr__(self):
return str(self)
id_ = db.Column('id', db.Integer, primary_key=True)
def get_dictionary(self):
d = {}
for column in self.__table__.columns:
key = 'id_' if column.key == 'id' else column.key
d[key] = getattr(self, key)
return d
def update(self, d):
for column in self.__table__.columns:
if column.key == 'id_':
continue
setattr(
self, column.key, d.get(
column.key, getattr(self, column.key)
)
)
|
from sqlalchemy.ext.declarative import declared_attr
from blog.lib.database import db
class ModelMixin(object):
"""A base mixin for all models."""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
def __str__(self):
return '<{} (id={})>'.format(self.__class__.__name__, self.id_)
def __repr__(self):
return str(self)
id_ = db.Column('id', db.Integer, primary_key=True)
def get_dictionary(self):
d = {}
for column in self.__table__.columns:
if column.key == 'id':
d['id'] = getattr(self, 'id_')
else:
d[column.key] = getattr(self, column.key)
return d
def update(self, d):
for column in self.__table__.columns:
if column.key == 'id_':
continue
setattr(
self, column.key, d.get(
column.key, getattr(self, column.key)
)
)
|
Return "id" key to front instead of "id_".
|
Return "id" key to front instead of "id_".
|
Python
|
mit
|
astex/living-with-django,astex/living-with-django,astex/living-with-django
|
564d54c377bf6a8c16cae3681934cc7ba5007c76
|
bundledApps/wailEndpoint.py
|
bundledApps/wailEndpoint.py
|
import tornado.ioloop
import tornado.web
import requests
host = 'localhost'
waybackPort = '8080'
archiveConfigFile = '/Applications/WAIL.app/config/archive.json'
class MainHandler(tornado.web.RequestHandler):
def get(self):
iwa = isWaybackAccessible()
print iwa
self.write(iwa)
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
])
def isWaybackAccessible():
try:
r = requests.get('http://' + host + ':' + waybackPort)
with open(archiveConfigFile, 'r') as myfile:
data=myfile.read()
return data
except requests.exceptions.ConnectionError as e:
return ''
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
|
import tornado.ioloop
import tornado.web
import requests
host = 'localhost'
waybackPort = '8080'
# Use a separate JSON file that only queries the local WAIL instance for MemGator
archiveConfigFile = '/Applications/WAIL.app/config/archive.json'
class MainHandler(tornado.web.RequestHandler):
def get(self):
iwa = isWaybackAccessible()
print iwa
self.write(iwa)
def make_app():
return tornado.web.Application([
(r"/", MainHandler),
])
def isWaybackAccessible():
try:
r = requests.get('http://' + host + ':' + waybackPort)
with open(archiveConfigFile, 'r') as myfile:
data=myfile.read()
return data
except requests.exceptions.ConnectionError as e:
return ''
if __name__ == "__main__":
app = make_app()
app.listen(8888)
tornado.ioloop.IOLoop.current().start()
|
Add comment to justify separate JSON file existence
|
Add comment to justify separate JSON file existence
|
Python
|
mit
|
machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail,machawk1/wail
|
b32f4955665b8618a9623f6898a15d4da40dc58e
|
dxtbx/command_line/print_header.py
|
dxtbx/command_line/print_header.py
|
def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
|
def print_header():
import sys
from dxtbx.format.Registry import Registry
# this will do the lookup for every frame - this is strictly not needed
# if all frames are from the same instrument
for arg in sys.argv[1:]:
format = Registry.find(arg)
print 'Using header reader: %s' % format.__name__
i = format(arg)
print 'Beam:'
print i.get_beam()
print 'Goniometer:'
print i.get_goniometer()
print 'Detector:'
print i.get_detector()
print 'Scan:'
print i.get_scan()
print 'Total Counts:'
print sum(i.get_raw_data())
if __name__ == '__main__':
print_header()
|
Print the Format class used
|
Print the Format class used
|
Python
|
bsd-3-clause
|
dials/dials,dials/dials,dials/dials,dials/dials,dials/dials
|
c790055fa7e6810703599bc0124507133b8a55fc
|
crispy_forms/compatibility.py
|
crispy_forms/compatibility.py
|
import sys
try:
basestring
except:
basestring = str # Python3
PY2 = sys.version_info[0] == 2
if not PY2:
text_type = str
binary_type = bytes
string_types = (str,)
integer_types = (int,)
else:
text_type = unicode
binary_type = str
string_types = basestring
integer_types = (int, long)
try:
# avoid RemovedInDjango19Warning by using lru_cache where available
from django.utils.lru_cache import lru_cache as memoize
except:
from django.utils.functional import memoize
|
import sys
try:
basestring
except:
basestring = str # Python3
PY2 = sys.version_info[0] == 2
if not PY2:
text_type = str
binary_type = bytes
string_types = (str,)
integer_types = (int,)
else:
text_type = unicode
binary_type = str
string_types = basestring
integer_types = (int, long)
try:
# avoid RemovedInDjango19Warning by using lru_cache where available
from django.utils.lru_cache import lru_cache
def memoize(function, *args):
return lru_cache()(function)
except:
from django.utils.functional import memoize
|
Fix lru_cache import as memoize
|
Fix lru_cache import as memoize
Thanks to @jcomeauictx for the heads up
|
Python
|
mit
|
scuml/django-crispy-forms,VishvajitP/django-crispy-forms,saydulk/django-crispy-forms,alanwj/django-crispy-forms,schrd/django-crispy-forms,bouttier/django-crispy-forms,smirolo/django-crispy-forms,saydulk/django-crispy-forms,IanLee1521/django-crispy-forms,zixan/django-crispy-forms,Stranger6667/django-crispy-forms,RamezIssac/django-crispy-forms,maraujop/django-crispy-forms,alanwj/django-crispy-forms,iris-edu/django-crispy-forms,dzhuang/django-crispy-forms,ngenovictor/django-crispy-forms,damienjones/django-crispy-forms,VishvajitP/django-crispy-forms,iris-edu-int/django-crispy-forms,RamezIssac/django-crispy-forms,django-crispy-forms/django-crispy-forms,damienjones/django-crispy-forms,schrd/django-crispy-forms,davidszotten/django-crispy-forms,spectras/django-crispy-forms,IanLee1521/django-crispy-forms,avsd/django-crispy-forms,carltongibson/django-crispy-forms,agepoly/django-crispy-forms,zixan/django-crispy-forms,dzhuang/django-crispy-forms,scuml/django-crispy-forms,avsd/django-crispy-forms,jtyoung/django-crispy-forms,iris-edu/django-crispy-forms,Stranger6667/django-crispy-forms,ngenovictor/django-crispy-forms,tarunlnmiit/django-crispy-forms,impulse-cloud/django-crispy-forms,django-crispy-forms/django-crispy-forms,tarunlnmiit/django-crispy-forms,dessibelle/django-crispy-forms,carltongibson/django-crispy-forms,spectras/django-crispy-forms,maraujop/django-crispy-forms,treyhunner/django-crispy-forms,davidszotten/django-crispy-forms,impulse-cloud/django-crispy-forms,jtyoung/django-crispy-forms,iris-edu-int/django-crispy-forms,treyhunner/django-crispy-forms,dessibelle/django-crispy-forms,bouttier/django-crispy-forms,agepoly/django-crispy-forms,smirolo/django-crispy-forms
|
3a18c25ef019a9a54475419bfabc4b6e2776df9c
|
lib/unsubscribe.py
|
lib/unsubscribe.py
|
from lxml.html import fromstring as lxml_from_string
from unidecode import unidecode
UNSUBSCRIBE_MARKERS = [
# English
"unsub", "blacklist", "opt-out", "opt out",
# French
"desinscription", "desinscrire", "desabonner", "desabonnement",
"ne souhaitez plus", "ne plus recevoir", "cesser de recevoir"
]
def FindUnsubscribeLink(message):
unsubscribe_link = None
unsubscribe_links = []
for part in message.walk():
if part.get_content_type() == 'text/html':
html = part.get_payload(decode=True)
doc = lxml_from_string(html)
for element, attribute, link, pos in doc.iterlinks():
link_content = unidecode(element.text_content()).lower()
link = link.lower()
unsubscribe_links.append((repr(link_content)[0:100], link[0:100]))
for pattern in UNSUBSCRIBE_MARKERS:
if (pattern in link_content) or (pattern in link):
unsubscribe_link = link
return unsubscribe_link, unsubscribe_links
|
from lxml.html import fromstring as lxml_from_string
from unidecode import unidecode
UNSUBSCRIBE_MARKERS = [
# English
"unsub", "blacklist", "opt-out", "opt out", "removealert", "removeme",
# French
"desinscription", "desinscrire", "desabonner", "desabonnement",
"ne souhaitez plus", "ne plus recevoir", "cesser de recevoir"
]
def FindUnsubscribeLink(message):
unsubscribe_link = None
unsubscribe_links = []
for part in message.walk():
if part.get_content_type() == 'text/html':
html = part.get_payload(decode=True)
doc = lxml_from_string(html)
for element, attribute, link, pos in doc.iterlinks():
link_content = unidecode(element.text_content()).lower()
unsubscribe_links.append((repr(link_content)[0:100], link[0:100]))
for pattern in UNSUBSCRIBE_MARKERS:
if (pattern in link_content) or (pattern in link.lower()):
unsubscribe_link = link
return unsubscribe_link, unsubscribe_links
|
Fix a bug with uppercase links
|
Fix a bug with uppercase links
|
Python
|
mit
|
sylvinus/reclaim-my-gmail-inbox
|
593e826b24d83997a5be450be1401e16ec17c07c
|
application.py
|
application.py
|
#!/usr/bin/env python
from __future__ import print_function
import os
from flask.ext.script import Manager, Server
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = Manager(application)
manager.add_command("runserver", Server(port=5000))
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
@manager.command
def list_routes():
"""List URLs of all application routes."""
for rule in sorted(application.url_map.iter_rules(), key=lambda r: r.rule):
print("{:10} {}".format(", ".join(rule.methods - set(['OPTIONS', 'HEAD'])), rule.rule))
if __name__ == '__main__':
manager.run()
|
#!/usr/bin/env python
from __future__ import print_function
import os
from dmutils import init_manager
from flask.ext.migrate import Migrate, MigrateCommand
from app import create_app, db
application = create_app(os.getenv('DM_ENVIRONMENT') or 'development')
manager = init_manager(application, 5000, ['./json_schemas'])
migrate = Migrate(application, db)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
Use new dmutils init_manager to set up reload on schema changes
|
Use new dmutils init_manager to set up reload on schema changes
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
39603bde90ebad7e0d70e41403a9a971867dcbac
|
backend/breach/views.py
|
backend/breach/views.py
|
from django.shortcuts import render
from django.http import HttpResponse
def get_work(request):
return HttpResponse('Not implemented')
def work_completed(request):
return HttpResponse('Not implemented')
|
from django.http import HttpResponse
from django.views.decorators.csrf import csrf_exempt
def get_work(request):
return HttpResponse('Not implemented')
@csrf_exempt
def work_completed(request):
return HttpResponse('Not implemented')
|
Allow POST request to work_completed view
|
Allow POST request to work_completed view
|
Python
|
mit
|
esarafianou/rupture,esarafianou/rupture,dimkarakostas/rupture,esarafianou/rupture,dionyziz/rupture,dimriou/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dionyziz/rupture,dimriou/rupture,esarafianou/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture,dimriou/rupture,dionyziz/rupture,dimkarakostas/rupture
|
a633fd37a4d795e7b565254ef10aaa0f2ad77f31
|
vcontrol/rest/machines/shutdown.py
|
vcontrol/rest/machines/shutdown.py
|
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
web.header('Access-Control-Allow-Origin', self.allow_origin)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
from ..helpers import get_allowed
import subprocess
import web
class ShutdownMachineR:
"""
This endpoint is for shutting down a running machine.
"""
allow_origin, rest_url = get_allowed.get_allowed()
def GET(self, machine):
try:
web.header('Access-Control-Allow-Origin', self.allow_origin)
except Exception as e: # no pragma
print(e.message)
try:
out = subprocess.check_output("/usr/local/bin/docker-machine stop "+machine, shell=True)
except:
out = "unable to stop machine"
return str(out)
|
Put the web.header function in a try/except block
|
Put the web.header function in a try/except block
|
Python
|
apache-2.0
|
cglewis/vcontrol,CyberReboot/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol,CyberReboot/vcontrol,cglewis/vcontrol
|
633c3a356a0ed88c00fbb1a5c972171de2255890
|
dinosaurs/transaction/database.py
|
dinosaurs/transaction/database.py
|
from peewee import *
db = SqliteDatabase('emails.db')
class Transaction(Model):
cost = FloatField()
address = CharField()
tempPass = CharField()
domain = CharField(index=True)
email = CharField(primary_key=True, unique=True)
is_complete = BooleanField(default=False, index=True)
class Meta:
database = db
|
from datetime import datetime
from peewee import *
from dinosaurs import settings
from dinosaurs.transaction.coin import generate_address
db = SqliteDatabase(settings.database)
class Transaction(Model):
cost = FloatField()
address = CharField()
started = DateField()
tempPass = CharField()
domain = CharField(index=True)
email = CharField(primary_key=True, unique=True)
is_complete = BooleanField(default=False, index=True)
class Meta:
database = db
def __init__(self, *args, **kwargs):
kwargs['started'] = datetime.now()
kwargs['address'] = generate_address()
super(Transaction, self).__init__(*args, **kwargs)
@property
def expired(self):
return (datetime.now() - self.started).minutes > 4
@property
def seconds_left(self):
return (datetime.now() - self.started).total_seconds
|
Update what a transaction is
|
Update what a transaction is
|
Python
|
mit
|
chrisseto/dinosaurs.sexy,chrisseto/dinosaurs.sexy
|
820ddf412d09f10977b4bec525d478cc55fe443b
|
math/prime_test.py
|
math/prime_test.py
|
'''
prime_test(n) returns a True if n is a prime number else it returns False
'''
def prime_test(n):
if n <= 1:
return False
if n==2 or n==3:
return True
if n%2==0 or n%3==0:
return False
j = 5
while(j*j <= n):
if n%(j)==0 or n%(j+2)==0:
return False
j += 6
return True
def prime_test(n):
# prime numbers are greater than 1
if num > 1:
# check for factors
for i in range(2,num):
if (num % i) == 0:
print(num,"is not a prime number")
print(i,"times",num//i,"is",num)
break
else:
print(num,"is a prime number")
# if input number is less than
# or equal to 1, it is not prime
else:
print(num,"is not a prime number")
|
'''
prime_test(n) returns a True if n is a prime number else it returns False
'''
def prime_test(n):
if n <= 1:
return False
if n==2 or n==3:
return True
if n%2==0 or n%3==0:
return False
j = 5
while(j*j <= n):
if n%(j)==0 or n%(j+2)==0:
return False
j += 6
return True
def prime_test(n):
# prime numbers are greater than 1
if num > 1:
# check for factors
for i in range(2,num):
if (num % i) == 0:
#print(num,"is not a prime number")
#print(i,"times",num//i,"is",num)
return False
break
else:
#print(num,"is a prime number")
return True
# if input number is less than
# or equal to 1, it is not prime
else:
#print(num,"is not a prime number")
return False
|
Change the return type to boolean
|
Change the return type to boolean
|
Python
|
mit
|
amaozhao/algorithms,keon/algorithms
|
64a653b6bd6c9aae2492f3ee838bda1fafe639d6
|
upnpy/utils.py
|
upnpy/utils.py
|
# -*- coding: utf-8 -*-
"""
utils.py
~~~~~~~~
Defines utility functions used by UPnPy.
"""
def camelcase_to_underscore(text):
"""
Convert a camelCasedString to one separated_by_underscores. Treats
neighbouring capitals as acronyms and doesn't separated them, e.g. URL does
not become u_r_l. That would be stupid.
:param text: The string to convert.
"""
outstr = []
for char in text:
if char.is_lower():
outstr.append(char)
elif outstr[-1].is_lower():
outstr.append('_')
outstr.append(char.lower())
else:
outstr.append(char.lower())
return ''.join(outstr)
|
# -*- coding: utf-8 -*-
"""
utils.py
~~~~~~~~
Defines utility functions used by UPnPy.
"""
def camelcase_to_underscore(text):
"""
Convert a camelCasedString to one separated_by_underscores. Treats
neighbouring capitals as acronyms and doesn't separated them, e.g. URL does
not become u_r_l. That would be stupid.
:param text: The string to convert.
"""
outstr = []
for char in text:
if char.islower():
outstr.append(char)
elif (len(outstr) > 0) and (outstr[-1].islower()):
outstr.append('_')
outstr.append(char.lower())
else:
outstr.append(char.lower())
return ''.join(outstr)
|
Correct an AttributeError and a potential IndexErr
|
Correct an AttributeError and a potential IndexErr
|
Python
|
mit
|
WenhaoYu/upnpy,Lukasa/upnpy
|
41c49a44c5f1bc9747b22b6d1f1088f1354a2cd5
|
nes/cpu/decoder.py
|
nes/cpu/decoder.py
|
from sqlite3 import Connection, Row
class Decoder:
def __init__(self):
self.conn = Connection('nes.sqlite')
self.conn.row_factory = Row
def __del__(self):
self.conn.close()
def decode(self, opcode):
c = self.conn.cursor()
c.execute('select * from instruction where opcode=?', [opcode])
row = c.fetchone()
return dict(zip(row.keys(), row))
|
from sqlite3 import Connection, Row
class Decoder:
def __init__(self):
self.conn = Connection('nes.sqlite')
self.conn.row_factory = Row
def __del__(self):
self.conn.close()
def decode(self, opcode):
c = self.conn.cursor()
c.execute('select * from instruction where opcode=?', [opcode])
row = c.fetchone()
if row:
return dict(zip(row.keys(), row))
else:
raise NotImplementedError('Undocumented Opcode: ' + str(opcode))
|
Raise an exception when it's an undocumented opcode.
|
Raise an exception when it's an undocumented opcode.
|
Python
|
mit
|
Hexadorsimal/pynes
|
6829e9b4cf87b8d8d8b6e5a1c3aaf881f66393cf
|
healthcheck/contrib/django/status_endpoint/views.py
|
healthcheck/contrib/django/status_endpoint/views.py
|
import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(json.dumps(details))
return JsonResponse(details)
|
import json
from django.conf import settings
from django.views.decorators.http import require_http_methods
from django.http import HttpResponse
from healthcheck.healthcheck import (
DjangoDBsHealthCheck, FilesDontExistHealthCheck, HealthChecker)
class JsonResponse(HttpResponse):
def __init__(self, data, **kwargs):
kwargs.setdefault('content_type', 'application/json')
data = json.dumps(data)
super(JsonResponse, self).__init__(content=data, **kwargs)
class JsonResponseServerError(JsonResponse):
status_code = 500
@require_http_methods(['GET'])
def status(request):
checks = []
if getattr(settings, 'STATUS_CHECK_DBS', True):
checks.append(DjangoDBsHealthCheck())
files_to_check = getattr(settings, 'STATUS_CHECK_FILES', None)
if files_to_check:
checks.append(FilesDontExistHealthCheck(
files_to_check, check_id="quiesce file doesn't exist"))
ok, details = HealthChecker(checks)()
if ok and not details:
details = 'There were no checks.'
if not ok:
return JsonResponseServerError(details)
return JsonResponse(details)
|
Remove duplicated JSON encoding for error messages
|
Remove duplicated JSON encoding for error messages
|
Python
|
mit
|
yola/healthcheck
|
089e8c74106f3a19b229d085d73c932df6fe4e7d
|
application.py
|
application.py
|
from canis import siriusxm, spotify, oauth
def main():
try:
current = siriusxm.get_currently_playing('siriusxmu')
spotify_id = spotify.id_for_song(current)
print(current, spotify_id)
except Exception, e:
print "Error {}".format(e)
if __name__ == "__main__":
oauth.app.run(debug=True)
main()
|
from canis import siriusxm, spotify, oauth
def main():
try:
current = siriusxm.get_currently_playing('siriusxmu')
spotify_id = spotify.id_for_song(current)
print(current, spotify_id)
except Exception, e:
print "Error {}".format(e)
if __name__ == "__main__":
oauth.app.run()
main()
|
Remove debug mode on flask
|
Remove debug mode on flask
|
Python
|
mit
|
maxgoedjen/canis
|
697833caade1323ddb9a0b4e51031f1d494262cd
|
201705/migonzalvar/biggest_set.py
|
201705/migonzalvar/biggest_set.py
|
#!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def less_than(secs):
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
print(f'Duration: {elapsed} seconds')
if elapsed >= secs:
print('Limit reached. Stopping.')
raise SystemExit(0)
def do():
for n in range(1, 100, 10):
source = range(1, n)
print(f'Length: {n} items')
with less_than(300):
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print('Continue...')
print()
if __name__ == '__main__':
do()
|
#!/usr/bin/env python3
from contextlib import contextmanager
import time
from main import has_subset_sum_zero
class Duration:
def __init__(self, elapsed=None):
self.elapsed = elapsed
@contextmanager
def less_than(secs):
duration = Duration()
tic = time.time()
yield duration
elapsed = time.time() - tic
duration.elapsed = elapsed
def nosolution_case(N):
return range(1, N + 1)
def negative_worst_case(N):
case = list(range(-N + 1, 0))
case += [abs(sum(case))]
return case
def positive_worst_case(N):
case = list(range(1, N))
case.insert(0, - sum(case))
return case
def do():
strategies = [nosolution_case, negative_worst_case, positive_worst_case]
for strategy in strategies:
print(f'## Using {strategy.__name__}')
print()
for n in range(1, 100, 10):
source = range(1, n)
print(f'Length: {n} items')
with less_than(300) as duration:
result = has_subset_sum_zero(source)
print(f'Result: {result}')
print(f'Duration: {duration.elapsed} seconds')
if duration.elapsed >= secs:
print('Limit reached. Stopping.')
break
print('Continue searching...')
print()
if __name__ == '__main__':
do()
|
Use several strategies for performance
|
Use several strategies for performance
|
Python
|
bsd-3-clause
|
VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,vigojug/reto,vigojug/reto,vigojug/reto,vigojug/reto,VigoTech/reto,VigoTech/reto,vigojug/reto,vigojug/reto
|
0c785e349c2000bbf3b22671071a66eaca4d82d0
|
astropy/io/votable/__init__.py
|
astropy/io/votable/__init__.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package reads and writes data formats used by the Virtual
Observatory (VO) initiative, particularly the VOTable XML format.
"""
from .table import (
parse, parse_single_table, validate, from_table, is_votable)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
'is_votable', 'VOWarning', 'VOTableChangeWarning', 'VOTableSpecWarning',
'UnimplementedWarning', 'IOWarning', 'VOTableSpecError']
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This package reads and writes data formats used by the Virtual
Observatory (VO) initiative, particularly the VOTable XML format.
"""
from .table import (
parse, parse_single_table, validate, from_table, is_votable, writeto)
from .exceptions import (
VOWarning, VOTableChangeWarning, VOTableSpecWarning, UnimplementedWarning,
IOWarning, VOTableSpecError)
__all__ = [
'parse', 'parse_single_table', 'validate', 'from_table',
'is_votable', 'writeto', 'VOWarning', 'VOTableChangeWarning',
'VOTableSpecWarning', 'UnimplementedWarning', 'IOWarning',
'VOTableSpecError']
|
Put astropy.io.votable.writeto in the top-level namespace
|
Put astropy.io.votable.writeto in the top-level namespace
|
Python
|
bsd-3-clause
|
DougBurke/astropy,AustereCuriosity/astropy,funbaker/astropy,joergdietrich/astropy,StuartLittlefair/astropy,larrybradley/astropy,tbabej/astropy,mhvk/astropy,pllim/astropy,stargaser/astropy,lpsinger/astropy,joergdietrich/astropy,lpsinger/astropy,AustereCuriosity/astropy,kelle/astropy,saimn/astropy,DougBurke/astropy,bsipocz/astropy,mhvk/astropy,pllim/astropy,StuartLittlefair/astropy,astropy/astropy,saimn/astropy,dhomeier/astropy,StuartLittlefair/astropy,tbabej/astropy,joergdietrich/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,funbaker/astropy,lpsinger/astropy,tbabej/astropy,AustereCuriosity/astropy,larrybradley/astropy,larrybradley/astropy,kelle/astropy,lpsinger/astropy,dhomeier/astropy,bsipocz/astropy,bsipocz/astropy,pllim/astropy,astropy/astropy,lpsinger/astropy,StuartLittlefair/astropy,joergdietrich/astropy,kelle/astropy,pllim/astropy,astropy/astropy,aleksandr-bakanov/astropy,MSeifert04/astropy,joergdietrich/astropy,stargaser/astropy,saimn/astropy,tbabej/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,mhvk/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,DougBurke/astropy,dhomeier/astropy,funbaker/astropy,DougBurke/astropy,funbaker/astropy,mhvk/astropy,MSeifert04/astropy,larrybradley/astropy,stargaser/astropy,stargaser/astropy,bsipocz/astropy,kelle/astropy,saimn/astropy,astropy/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,kelle/astropy,MSeifert04/astropy,MSeifert04/astropy,astropy/astropy,tbabej/astropy,StuartLittlefair/astropy,pllim/astropy,AustereCuriosity/astropy
|
fb1f03c7d46d9274f144a767830cf9c81078e8c8
|
kovfig.py
|
kovfig.py
|
#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
loop_count = 10
phrase_model_file = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
bigram_model_file = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"loop_count",
loop_count))
print("{} = {}".format(
"phrase_model_file",
phrase_model_file))
print("{} = {}".format(
"bigram_model_file",
bigram_model_file))
|
#! /usr/bin/env python
# coding:utf-8
from os import path
# the number of loop for train IBM Model 2
LOOP_COUNT = 10
PHRASE_MODEL_FILE = path.join(
path.abspath(path.dirname(__file__)),
"phrase.model"
)
BIGRAM_MODEL_FILE = path.join(
path.abspath(path.dirname(__file__)),
"bigram.model"
)
if __name__ == '__main__':
print("{} = {}".format(
"LOOP_COUNT",
LOOP_COUNT))
print("{} = {}".format(
"phrase_model_file",
PHRASE_MODEL_FILE))
print("{} = {}".format(
"bigram_model_file",
BIGRAM_MODEL_FILE))
|
Use upper case variable for global vars
|
Use upper case variable for global vars
|
Python
|
mit
|
kenkov/kovlive
|
b9b9382a62b00aa00255fbc9271ef5ec2db8c295
|
fabfile.py
|
fabfile.py
|
from fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
from datetime import datetime
from fabric.api import (
cd,
env,
put,
run,
sudo,
task
)
PRODUCTION_IP = '54.154.235.243'
PROJECT_DIRECTORY = '/home/ubuntu/ztm/'
BACKUP_DIRECTORY = '/home/ubuntu/backup/'
COMPOSE_FILE = 'compose-production.yml'
@task
def production():
env.run = sudo
env.hosts = [
'ubuntu@' + PRODUCTION_IP + ':22',
]
def create_project_directory():
run('mkdir -p ' + PROJECT_DIRECTORY)
def update_compose_file():
put('./' + COMPOSE_FILE, PROJECT_DIRECTORY)
@task
def do_backup():
backup_time = datetime.now().strftime('%Y-%m-%d_%H%M')
with cd(BACKUP_DIRECTORY):
command = 'tar -cjvf ztm-' + backup_time + \
'.tar.bz2 ' + PROJECT_DIRECTORY
env.run(command)
command = 's3cmd sync ' + BACKUP_DIRECTORY + ' ' \
's3://zendesk-tickets-machine'
run(command)
@task
def deploy():
create_project_directory()
update_compose_file()
with cd(PROJECT_DIRECTORY):
env.run('docker-compose -f ' + COMPOSE_FILE + ' pull')
env.run('docker-compose -f ' + COMPOSE_FILE + ' up -d')
|
Add S3 command for performing backup data
|
Add S3 command for performing backup data
|
Python
|
mit
|
prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine,prontotools/zendesk-tickets-machine
|
873b82225d287dcca9b9bc0e0c3746233d15d947
|
utilities.py
|
utilities.py
|
"""
Various utilities
"""
import pprint
def load_state(path):
"""
Load an n-puzzle state from a file into an array and return it.
"""
result = []
with open(path) as f:
for line in f:
result.append(line.split())
return result
def print_state(state):
"""
Prittily returns a puzzle state (a 2D array)
"""
return pprint.pformat(state)
print print_state(load_state('/home/benjamin/npuzz/puzzle_states/1'))
|
"""
Various utilities
"""
import pprint
def load_state(path):
"""
Load an n-puzzle state from a file into an array and return it.
"""
result = []
with open(path) as f:
for line in f:
result.append([])
for square in line.split():
try:
result[-1].append(int(square))
except ValueError: #if its a * character
result[-1].append(square)
return result
def print_state(state):
"""
Prittily returns a puzzle state (a 2D array)
"""
return pprint.pformat(state)
def check_goal(state):
"""
Returns True if state is the goal state. Otherwise, returns False.
state is expected to be a 2D array.
"""
n = len(state[0])
for i in range(0, n):
for j in range(0, n):
if state[i][j] != (j + 1) + (i * n):
if not(i == j == (n - 1) and state[i][j] == '*'):
return False
return True
print check_goal(load_state('/home/benjamin/npuzz/puzzle_states/1'))
|
Add function to check for goal state.
|
Add function to check for goal state.
|
Python
|
mit
|
bandrebandrebandre/npuzz
|
422bb9ebfcff9826cf58d17a20df61cea21fdd77
|
app/supplier_constants.py
|
app/supplier_constants.py
|
# Here we define a set of hardcoded keys that we use when denormalizing data from Supplier/ContactInformation tables
# into the SupplierFramework.declaration field. These are used only by the API and by the
# `digitalmarketplace-scripts/scripts/generate-framework-agreement-*-pages`, which generates framework agreement
# signature pages for successful suppliers to sign. These agreements are populated with some of the details below.
KEY_DUNS_NUMBER = 'supplierDunsNumber'
KEY_ORGANISATION_SIZE = 'supplierOrganisationSize'
KEY_REGISTERED_NAME = 'supplierRegisteredName'
KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding'
KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry'
KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber'
KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode'
KEY_REGISTRATION_TOWN = 'supplierRegisteredTown'
KEY_TRADING_NAME = 'supplierTradingName'
KEY_TRADING_STATUS = 'supplierTradingStatus'
KEY_VAT_NUMBER = 'supplierVatNumber'
|
# Here we define a set of hardcoded keys that we use when denormalizing data from Supplier/ContactInformation tables
# into the SupplierFramework.declaration field. These are used only by the API and by the
# `digitalmarketplace-scripts/scripts/generate-framework-agreement-*-pages`, which generates framework agreement
# signature pages for successful suppliers to sign. These agreements are populated with some of the details below.
KEY_DUNS_NUMBER = 'supplierDunsNumber'
KEY_ORGANISATION_SIZE = 'supplierOrganisationSize'
KEY_REGISTERED_NAME = 'supplierRegisteredName'
KEY_REGISTRATION_BUILDING = 'supplierRegisteredBuilding'
KEY_REGISTRATION_COUNTRY = 'supplierRegisteredCountry'
KEY_REGISTRATION_NUMBER = 'supplierCompanyRegistrationNumber'
KEY_REGISTRATION_POSTCODE = 'supplierRegisteredPostcode'
KEY_REGISTRATION_TOWN = 'supplierRegisteredTown'
KEY_TRADING_NAME = 'supplierTradingName'
KEY_TRADING_STATUS = 'supplierTradingStatus'
|
Remove VAT number from supplier constants
|
Remove VAT number from supplier constants
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
99909048bc702e21e980bb1167caf9217aa31196
|
steel/fields/strings.py
|
steel/fields/strings.py
|
import codecs
from steel.fields import Field
from steel.fields.mixin import Fixed
__all__ = ['Bytes', 'String', 'FixedBytes', 'FixedString']
class Bytes(Field):
"A stream of bytes that should be left unconverted"
def encode(self, value):
# Nothing to do here
return value
def decode(self, value):
# Nothing to do here
return value
class String(Field):
"A string that gets converted using a specified encoding"
def __init__(self, *args, encoding, **kwargs):
# Bail out early if the encoding isn't valid
codecs.lookup(encoding)
self.encoding = encoding
super(String, self).__init__(*args, **kwargs)
def encode(self, value):
return value.encode(self.encoding)
def decode(self, value):
return value.decode(self.encoding)
class FixedBytes(Fixed, Bytes):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
class FixedString(Fixed, String):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
|
import codecs
from steel.fields import Field
from steel.fields.mixin import Fixed
__all__ = ['Bytes', 'String', 'FixedBytes', 'FixedString']
class Bytes(Field):
"A stream of bytes that should be left unconverted"
def encode(self, value):
# Nothing to do here
return value
def decode(self, value):
# Nothing to do here
return value
class String(Field):
"A string that gets converted using a specified encoding"
def __init__(self, *args, encoding, **kwargs):
# Bail out early if the encoding isn't valid
codecs.lookup(encoding)
self.encoding = encoding
super(String, self).__init__(*args, **kwargs)
def encode(self, value):
return value.encode(self.encoding)
def decode(self, value):
return value.decode(self.encoding)
class FixedBytes(Fixed, Bytes):
"A stream of bytes that will always be set to the same value"
# The mixin does the heavy lifting
pass
class FixedString(Fixed, String):
"A string that will always be set to the same value"
# The mixin does the heavy lifting
pass
|
Fix the docstring for FixedString
|
Fix the docstring for FixedString
|
Python
|
bsd-3-clause
|
gulopine/steel-experiment
|
be00af0a0e87af5b4c82107d2f1356f378b65cb4
|
obj_sys/management/commands/tag_the_file.py
|
obj_sys/management/commands/tag_the_file.py
|
import os
from optparse import make_option
from django.core.management import BaseCommand
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
from obj_sys.models_ufs_obj import UfsObj
class FileTagger(DjangoCmdBase):
option_list = BaseCommand.option_list + (
make_option('--tags',
action='store',
dest='tags',
type='string',
help='Tags separated with ","'),
make_option('--file_path',
action='store',
dest='file_path',
type='string',
help='Path of the file to be tagged'),
make_option('--log-file',
action='store',
dest='log_file',
help='Log file destination'),
make_option('--log-std',
action='store_true',
dest='log_std',
help='Redirect stdout and stderr to the logging system'),
)
def msg_loop(self):
# enum_method = enum_git_repo
# pull_all_in_enumerable(enum_method)
if os.path.exists(self.options["file_path"]):
new_file_ufs_obj = UfsObj.objects.get_or_create(full_path=self.options["file_path"])
new_file_ufs_obj.tags = self.options["tags"]
Command = FileTagger
|
import os
from optparse import make_option
from django.core.management import BaseCommand
from djangoautoconf.cmd_handler_base.msg_process_cmd_base import DjangoCmdBase
from obj_sys.models_ufs_obj import UfsObj
class FileTagger(DjangoCmdBase):
option_list = BaseCommand.option_list + (
make_option('--tags',
action='store',
dest='tags',
type='string',
help='Tags separated with ","'),
make_option('--file_path',
action='store',
dest='file_path',
type='string',
help='Path of the file to be tagged'),
make_option('--log-file',
action='store',
dest='log_file',
help='Log file destination'),
make_option('--log-std',
action='store_true',
dest='log_std',
help='Redirect stdout and stderr to the logging system'),
)
def msg_loop(self):
# enum_method = enum_git_repo
# pull_all_in_enumerable(enum_method)
if os.path.exists(self.options["file_path"]):
new_file_ufs_obj, is_created = UfsObj.objects.get_or_create(full_path=self.options["file_path"])
new_file_ufs_obj.tags = self.options["tags"]
Command = FileTagger
|
Fix the issue that get_or_create returns a tuple instead of one object.
|
Fix the issue that get_or_create returns a tuple instead of one object.
|
Python
|
bsd-3-clause
|
weijia/obj_sys,weijia/obj_sys
|
fffb98874066d5762b815987d7e6768a2e9cb03c
|
tests/daemon_uid_gid.py
|
tests/daemon_uid_gid.py
|
#!/usr/bin/env python
from os import getuid, geteuid, getgid, getegid
from sys import argv
from time import sleep
from daemonize import Daemonize
pid = argv[1]
log = argv[2]
def main():
uids = getuid(), geteuid()
gids = getgid(), getegid()
with open(log, "w") as f:
f.write(" ".join(map(str, uids + gids)))
daemon = Daemonize(app="test_app", pid=pid, action=main, user="nobody", group="nobody", keep_fds=[1, 2])
daemon.start()
|
#!/usr/bin/env python
from os import getuid, geteuid, getgid, getegid
from sys import argv
from time import sleep
from daemonize import Daemonize
pid = argv[1]
log = argv[2]
def main():
uids = getuid(), geteuid()
gids = getgid(), getegid()
with open(log, "w") as f:
f.write(" ".join(map(str, uids + gids)))
group = "nogroup" if os.path.exists("/etc/debian_version") else "nobody"
daemon = Daemonize(app="test_app", pid=pid, action=main, user="nobody", group=group)
daemon.start()
|
Support debian based distributives in tests
|
Support debian based distributives in tests
|
Python
|
mit
|
thesharp/daemonize
|
329f4cd5123440baf537db30340fd3d33d7bbbf1
|
games/management/commands/makelove.py
|
games/management/commands/makelove.py
|
from django.core.management.base import BaseCommand
from games import models, bundle
def package_love(stdout, game, release):
if release.get_asset('love') is not None:
stdout.write(u"SKIPPING {}".format(release))
return
upload = release.get_asset('uploaded')
if upload is None:
stdout.write(u"NO UPLOAD {}".format(release))
return
identity = bundle.detect_identity(upload.blob) or game.slug
config = bundle.game_config(game.uuid, identity, release.version)
prefix = "build/love8"
if release.love_version == "0.9.0":
prefix = "build/love9"
# Detect version, fail if not specified
love = bundle.inject_code(game, upload.blob, config)
slug = game.slug
name = game.name
# Create binaries
love_file = bundle.blobify(bundle.package_love, game, love, prefix,
name, slug, release.version)
release.add_asset(love_file, tag='love')
stdout.write(u"FINISHED {}".format(release))
class Command(BaseCommand):
help = 'Backfill LOVE files for all games'
def handle(self, *args, **options):
for game in models.Game.objects.all():
for release in game.release_set.all():
package_love(self.stdout, game, release)
|
import zipfile
from django.core.management.base import BaseCommand
from games import models, bundle
def package_love(stdout, game, release):
if release.get_asset('love') is not None:
stdout.write(u"SKIPPING {}".format(release))
return
upload = release.get_asset('uploaded')
if upload is None:
stdout.write(u"NO UPLOAD {}".format(release))
return
try:
identity = bundle.detect_identity(upload.blob) or game.slug
except zipfile.BadZipfile:
stdout.write(u"BAD ZIP {}".format(release))
return
config = bundle.game_config(game.uuid, identity, release.version)
prefix = "build/love8"
if release.love_version == "0.9.0":
prefix = "build/love9"
# Detect version, fail if not specified
love = bundle.inject_code(game, upload.blob, config)
slug = game.slug
name = game.name
# Create binaries
love_file = bundle.blobify(bundle.package_love, game, love, prefix,
name, slug, release.version)
release.add_asset(love_file, tag='love')
stdout.write(u"FINISHED {}".format(release))
class Command(BaseCommand):
help = 'Backfill LOVE files for all games'
def handle(self, *args, **options):
for game in models.Game.objects.all():
for release in game.release_set.all():
package_love(self.stdout, game, release)
|
Make sure that uploaded files are zipfiles
|
Make sure that uploaded files are zipfiles
|
Python
|
mit
|
stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb,stackmachine/bearweb
|
52430087413e24c94a532e67a2c77248ecc0598c
|
saleor/core/extensions/checks.py
|
saleor/core/extensions/checks.py
|
import importlib
from typing import List
from django.conf import settings
from django.core.checks import Error, register
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
manager_path, _, manager_name = settings.EXTENSIONS_MANAGER.rpartition(".")
try:
manager_module = importlib.import_module(manager_path)
except ModuleNotFoundError:
errors.append(Error("Extension Manager path: %s doesn't exist" % manager_path))
else:
manager_class = getattr(manager_module, manager_name, None)
if not manager_class:
errors.append(
Error(
"Extension Manager %s doesn't exists in specific path %s"
% (manager_name, str(manager_module))
)
)
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
plugin_path, _, plugin_name = plugin_path.rpartition(".")
try:
plugin_module = importlib.import_module(plugin_path)
except ModuleNotFoundError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
else:
plugin_class = getattr(plugin_module, plugin_name, None)
if not plugin_class:
errors.append(
Error(
"Plugin %s doesn't exists in specific path %s"
% (plugin_name, str(plugin_module))
)
)
|
from typing import List
from django.conf import settings
from django.core.checks import Error, register
from django.utils.module_loading import import_string
@register()
def check_extensions(app_configs, **kwargs):
"""Confirm a correct import of plugins and manager."""
errors = []
check_manager(errors)
plugins = settings.PLUGINS or []
for plugin_path in plugins:
check_single_plugin(plugin_path, errors)
return errors
def check_manager(errors: List[Error]):
if not hasattr(settings, "EXTENSIONS_MANAGER") or not settings.EXTENSIONS_MANAGER:
errors.append(Error("Settings should contain EXTENSIONS_MANAGER env"))
return
try:
import_string(settings.EXTENSIONS_MANAGER)
except ImportError:
errors.append(
Error(
"Extension Manager path: %s doesn't exist" % settings.EXTENSIONS_MANAGER
)
)
def check_single_plugin(plugin_path: str, errors: List[Error]):
if not plugin_path:
errors.append(Error("Wrong plugin_path %s" % plugin_path))
return
try:
import_string(plugin_path)
except ImportError:
errors.append(Error("Plugin with path: %s doesn't exist" % plugin_path))
|
Use django helper to validate manager and plugins paths
|
Use django helper to validate manager and plugins paths
|
Python
|
bsd-3-clause
|
mociepka/saleor,mociepka/saleor,mociepka/saleor,maferelo/saleor,maferelo/saleor,maferelo/saleor
|
6f8a19c46a1d8b6b31039f212e733cd660551de7
|
mws/apis/__init__.py
|
mws/apis/__init__.py
|
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
from .outbound_shipments import OutboundShipments
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
]
|
from .feeds import Feeds
from .finances import Finances
from .inbound_shipments import InboundShipments
from .inventory import Inventory
from .merchant_fulfillment import MerchantFulfillment
from .offamazonpayments import OffAmazonPayments
from .orders import Orders
from .outbound_shipments import OutboundShipments
from .products import Products
from .recommendations import Recommendations
from .reports import Reports
from .sellers import Sellers
from .subscriptions import Subscriptions
__all__ = [
'Feeds',
'Finances',
'InboundShipments',
'Inventory',
'MerchantFulfillment',
'OffAmazonPayments',
'Orders',
'OutboundShipments',
'Products',
'Recommendations',
'Reports',
'Sellers',
'Subscriptions',
]
|
Include the new Subscriptions stub
|
Include the new Subscriptions stub
|
Python
|
unlicense
|
Bobspadger/python-amazon-mws,GriceTurrble/python-amazon-mws
|
22935ee89217ac1f8b8d3c921571381336069584
|
lctools/lc.py
|
lctools/lc.py
|
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security
from config import get_config
def get_lc(profile, resource=None):
if resource is None:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
else:
pkg_name = 'libcloud.%s' % resource
Provider = __import__(pkg_name + ".types",
globals(), locals(), ['Provider'], -1).Provider
get_driver = __import__(pkg_name + ".providers",
globals(), locals(), ['get_driver'], -1).get_driver
conf = get_config(profile)
libcloud.security.VERIFY_SSL_CERT = conf.get('verify_ssl_certs') == 'true'
extra_kwargs = {}
extra = conf.get("extra")
if extra != "":
extra_kwargs = eval(extra)
if not isinstance(extra_kwargs, dict):
raise Exception('Extra arguments should be a Python dict')
driver = get_driver(getattr(Provider, conf.get('driver').upper()))
conn = driver(conf.get('access_id'), conf.get('secret_key'), **extra_kwargs)
return conn
|
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
import libcloud.security
from config import get_config
def get_lc(profile, resource=None):
if resource is None:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
else:
pkg_name = 'libcloud.%s' % resource
Provider = __import__(pkg_name + ".types",
globals(), locals(), ['Provider'], -1).Provider
get_driver = __import__(pkg_name + ".providers",
globals(), locals(), ['get_driver'], -1).get_driver
conf = get_config(profile)
libcloud.security.VERIFY_SSL_CERT = conf.get('verify_ssl_certs') == 'true'
extra_kwargs = {}
extra = conf.get("extra")
if extra != "":
extra_kwargs = eval(extra)
if not isinstance(extra_kwargs, dict):
raise Exception('Extra arguments should be a Python dict')
# a hack because libcloud driver names for Rackspace doesn't match
# for loadbalancers and compute
driver_name = conf.get('driver').upper()
if 'loadbalancer' == resource and 'RACKSPACE' == driver_name:
driver_name += "_US"
driver = get_driver(getattr(Provider, driver_name))
conn = driver(conf.get('access_id'), conf.get('secret_key'), **extra_kwargs)
return conn
|
Add a hack to overcome driver name inconsistency in libcloud.
|
Add a hack to overcome driver name inconsistency in libcloud.
|
Python
|
apache-2.0
|
novel/lc-tools,novel/lc-tools
|
c541e85f8b1dccaabd047027e89791d807550ee5
|
fade/config.py
|
fade/config.py
|
#!/usr/bin/env python
"""
See LICENSE.txt file for copyright and license details.
"""
import os
basedir = os.path.abspath(os.path.dirname(__file__))
WTF_CSRF_ENABLED = True
SECRET_KEY = '3124534675689780'
# TODO: switch this to postgresql
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
|
#!/usr/bin/env python
"""
See LICENSE.txt file for copyright and license details.
"""
import os
basedir = os.path.abspath(os.path.dirname(__file__))
WTF_CSRF_ENABLED = True
SECRET_KEY = '3124534675689780'
dbuser = 'rockwolf'
dbpass = ''
dbhost = 'testdb'
dbname = 'finance'
SQLALCHEMY_DATABASE_URI = 'postgresql://'
+ dbuser
+ ':'
+ dbpass
+ '@'
+ dbhost
+ '/'
+ dbname
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'database')
|
Switch database connection string to pg
|
Switch database connection string to pg
|
Python
|
bsd-3-clause
|
rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python
|
6c8feca973703cf87a82cfa954fa3c7a3f152c72
|
manage.py
|
manage.py
|
from project import app, db
from project import models
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
if __name__ == '__main__':
manager.run()
|
from project import app, db
from project import models
from flask.ext.script import Manager
from flask.ext.migrate import Migrate, MigrateCommand
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
@manager.command
def create_db():
"""Creates the db tables."""
db.create_all()
@manager.command
def drop_db():
"""Drops the db tables."""
db.drop_all()
@manager.command
def create_admin():
"""Creates the admin user."""
db.session.add(User("ad@min.com", "admin"))
db.session.commit()
if __name__ == '__main__':
manager.run()
|
Create create_db, drop_db and create_admin functions
|
Create create_db, drop_db and create_admin functions
|
Python
|
mit
|
dylanshine/streamschool,dylanshine/streamschool
|
e6f3bd9c61be29560e09f5d5d9c7e355ec14c2e3
|
manage.py
|
manage.py
|
#!/usr/bin/env python
import sys
if __name__ == "__main__":
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
#!/usr/bin/env python
import sys
import os
if __name__ == "__main__":
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'settings')
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
Set a default settings module
|
Set a default settings module
|
Python
|
bsd-3-clause
|
wagnerand/olympia,andymckay/addons-server,kumar303/addons-server,andymckay/olympia,aviarypl/mozilla-l10n-addons-server,aviarypl/mozilla-l10n-addons-server,wagnerand/addons-server,lavish205/olympia,Prashant-Surya/addons-server,harry-7/addons-server,harikishen/addons-server,mstriemer/olympia,mozilla/addons-server,andymckay/addons-server,mozilla/addons-server,mstriemer/addons-server,eviljeff/olympia,lavish205/olympia,diox/olympia,andymckay/olympia,mstriemer/olympia,aviarypl/mozilla-l10n-addons-server,harikishen/addons-server,mstriemer/olympia,harry-7/addons-server,wagnerand/olympia,wagnerand/olympia,psiinon/addons-server,kumar303/addons-server,mozilla/olympia,atiqueahmedziad/addons-server,atiqueahmedziad/addons-server,mozilla/addons-server,lavish205/olympia,psiinon/addons-server,mstriemer/addons-server,Revanth47/addons-server,diox/olympia,eviljeff/olympia,kumar303/addons-server,aviarypl/mozilla-l10n-addons-server,kumar303/addons-server,bqbn/addons-server,tsl143/addons-server,andymckay/olympia,harikishen/addons-server,mstriemer/addons-server,eviljeff/olympia,atiqueahmedziad/addons-server,tsl143/addons-server,Prashant-Surya/addons-server,mozilla/addons-server,harry-7/addons-server,psiinon/addons-server,kumar303/olympia,bqbn/addons-server,andymckay/olympia,wagnerand/addons-server,eviljeff/olympia,wagnerand/addons-server,mstriemer/addons-server,mstriemer/olympia,kumar303/olympia,mozilla/olympia,andymckay/addons-server,diox/olympia,mozilla/olympia,bqbn/addons-server,Revanth47/addons-server,Prashant-Surya/addons-server,Revanth47/addons-server,wagnerand/olympia,atiqueahmedziad/addons-server,andymckay/addons-server,tsl143/addons-server,harry-7/addons-server,bqbn/addons-server,Prashant-Surya/addons-server,harikishen/addons-server,wagnerand/addons-server,kumar303/olympia,diox/olympia,lavish205/olympia,mozilla/olympia,Revanth47/addons-server,kumar303/olympia,tsl143/addons-server,psiinon/addons-server
|
34f0e697ba4d6a787f0f4fc294163a09a52c185f
|
tests/test_arrayfire.py
|
tests/test_arrayfire.py
|
import arrayfire
# We're going to test several arrayfire behaviours that we rely on
from asserts import *
import afnumpy as af
import numpy as np
def test_cast():
a = afnumpy.random.rand(2,3)
# Check that device_ptr does not cause a copy
assert a.d_array.device_ptr() == a.d_array.device_ptr()
# Check that cast does not cause a copy
assert arrayfire.cast(a.d_array, a.d_array.dtype()).device_ptr() == a.d_array.device_ptr()
|
import arrayfire
# We're going to test several arrayfire behaviours that we rely on
from asserts import *
import afnumpy as af
import numpy as np
def test_af_cast():
a = afnumpy.arrayfire.randu(2,3)
# Check that device_ptr does not cause a copy
assert a.device_ptr() == a.device_ptr()
# Check that cast does not cause a copy
assert arrayfire.cast(a, a.dtype()).device_ptr() == a.device_ptr()
def test_cast():
a = afnumpy.random.rand(2,3)
# Check that device_ptr does not cause a copy
assert a.d_array.device_ptr() == a.d_array.device_ptr()
# Check that cast does not cause a copy
assert arrayfire.cast(a.d_array, a.d_array.dtype()).device_ptr() == a.d_array.device_ptr()
|
Add a pure arrayfire cast test to check for seg faults
|
Add a pure arrayfire cast test to check for seg faults
|
Python
|
bsd-2-clause
|
FilipeMaia/afnumpy,daurer/afnumpy
|
aac31b69da5ec3a3622ca7752e8273886b344683
|
sublist/sublist.py
|
sublist/sublist.py
|
SUPERLIST = "superlist"
SUBLIST = "sublist"
EQUAL = "equal"
UNEQUAL = "unequal"
def check_lists(a, b):
if a == b:
return EQUAL
elif is_sublist(a, b):
return SUBLIST
elif is_sublist(b, a):
return SUPERLIST
else:
return UNEQUAL
def is_sublist(a, b):
return a in [b[i:i + len(a)] for i in range(len(b) - len(a) + 1)]
|
SUPERLIST = "superlist"
SUBLIST = "sublist"
EQUAL = "equal"
UNEQUAL = "unequal"
VERY_UNLIKELY_STRING = "ꗲꅯḪꍙ"
def check_lists(a, b):
if a == b:
return EQUAL
_a = VERY_UNLIKELY_STRING.join(map(str, a))
_b = VERY_UNLIKELY_STRING.join(map(str, b))
if _a in _b:
return SUBLIST
elif _b in _a:
return SUPERLIST
else:
return UNEQUAL
|
Switch back to the substring method - it's faster
|
Switch back to the substring method - it's faster
|
Python
|
agpl-3.0
|
CubicComet/exercism-python-solutions
|
6eae274fc200df9319e82abf99d0f2314a17a2af
|
formlibrary/migrations/0005_auto_20171204_0203.py
|
formlibrary/migrations/0005_auto_20171204_0203.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-04 10:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import uuid
class Migration(migrations.Migration):
dependencies = [
('workflow', '0007_auto_20171204_0203'),
('formlibrary', '0004_customform_created_by'),
]
operations = [
migrations.AddField(
model_name='customform',
name='form_uuid',
field=models.CharField(default='', max_length=255, verbose_name='CustomForm UUID'),
),
migrations.AddField(
model_name='customform',
name='silo_id',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='customform',
name='workflowlevel1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1'),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-04 10:03
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('workflow', '0007_auto_20171204_0203'),
('formlibrary', '0004_customform_created_by'),
]
operations = [
migrations.AddField(
model_name='customform',
name='form_uuid',
field=models.CharField(default='', max_length=255, verbose_name='CustomForm UUID'),
),
migrations.AddField(
model_name='customform',
name='silo_id',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='customform',
name='workflowlevel1',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='workflow.WorkflowLevel1'),
),
]
|
Split migration script of customform
|
Split migration script of customform
|
Python
|
apache-2.0
|
toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity,toladata/TolaActivity
|
265edc24561bdacfae2412680048c203f7f78c14
|
calendarapp.py
|
calendarapp.py
|
from kivy.app import App
class CalendarApp(App):
"""Basic App to hold the calendar widget."""
def build(self):
return self.root
|
import kivy
kivy.require('1.8.0')
from kivy.config import Config
Config.set('graphics', 'width', '360')
Config.set('graphics', 'height', '640')
from kivy.app import App
class CalendarApp(App):
"""Basic App to hold the calendar widget."""
def build(self):
return self.root
|
Set the window size to emulate a mobile device
|
Set the window size to emulate a mobile device
|
Python
|
mit
|
hackebrot/garden.calendar
|
401aafee6979cc95692555548b1fc10dea44a44e
|
product/api/views.py
|
product/api/views.py
|
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from .serializers import ProductSerializer
from ..models import Product
from django.http import Http404
from rest_framework.views import APIView
class ProductDetail(APIView):
permission_classes = (IsAuthenticated,)
"""
Retrieve a product instance.
"""
def get_object(self, slug):
try:
return Product.objects.get(code=slug)
except Product.DoesNotExist:
raise Http404
def get(self, request, slug, format=None):
snippet = self.get_object(slug)
serializer = ProductSerializer(snippet)
return Response(serializer.data)
|
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from .serializers import ProductSerializer
from ..models import Product
from django.http import Http404
from rest_framework.views import APIView
class ProductDetail(APIView):
permission_classes = (IsAuthenticated,)
"""
Retrieve a product instance.
"""
def get_object(self, code):
try:
return Product.get_by_code(code=code)
except Product.DoesNotExist:
raise Http404
def get(self, request, slug, format=None):
snippet = self.get_object(slug)
serializer = ProductSerializer(snippet)
return Response(serializer.data)
|
Use remote fallback for API request
|
Use remote fallback for API request
|
Python
|
bsd-3-clause
|
KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend,KlubJagiellonski/pola-backend
|
17bd35d7a2b442faebdb39aad07294612d8e7038
|
nflh/games.py
|
nflh/games.py
|
from datetime import datetime
GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json"
LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json"
class Game(object):
def __init__(self, id_, h, v):
self.id_ = id_
self.date = self.id_[:-2]
self.home = h
self.vis = v
self.latest_play_id = ""
self.latest_clip_id = ""
def is_today(self):
return self.date == str((datetime.today()).strftime('%Y%m%d'))
def video_url(self):
return GAME_VIDEO_BASE_URL.format(self.id_)
def live_update_url(self):
return LIVE_UPDATE_BASE_URL.format(self.id_)
|
from datetime import datetime
GAME_VIDEO_BASE_URL = "http://www.nfl.com/feeds-rs/videos/byGameCenter/{0}.json"
LIVE_UPDATE_BASE_URL = "http://www.nfl.com/liveupdate/game-center/{0}/{0}_gtd.json"
class Game(object):
def __init__(self, id_, h, v):
self.id_ = id_
self.date = self.id_[:-2]
self.home = h
self.vis = v
self.latest_play_id = ""
self.latest_clip_id = ""
self.videos = {}
def is_today(self):
return self.date == str((datetime.today()).strftime('%Y%m%d'))
def video_url(self):
return GAME_VIDEO_BASE_URL.format(self.id_)
def live_update_url(self):
return LIVE_UPDATE_BASE_URL.format(self.id_)
|
Add videos dict to Games.
|
Add videos dict to Games.
|
Python
|
apache-2.0
|
twbarber/nfl-highlight-bot
|
c36b0639190de6517260d6b6e8e5991976336760
|
shared/btr3baseball/DatasourceRepository.py
|
shared/btr3baseball/DatasourceRepository.py
|
import json
resource_package = __name__
resource_path_format = 'datasource/{}.json'
class DatasourceRepository:
def __init__(self):
self.availableSources = json.loads(pkg_resources.resource_string(resource_package, resource_path_format.format('all')))['available']
self.data = {}
for source in availableSources:
self.data[source] = json.loads(pkg_resources.resource_string(resource_package, resource_path_format.format(source)))
def listDatasources(self):
return self.availableSources
def getDatasource(self, sourceId):
if sourceId in self.data:
return self.data[sourceId]
else:
return None
|
import pkg_resources
import json
resource_package = __name__
resource_path_format = 'datasource/{}.json'
class DatasourceRepository:
def __init__(self):
self.availableSources = json.loads(pkg_resources.resource_string(resource_package, resource_path_format.format('all')))['available']
self.data = {}
for source in availableSources:
self.data[source] = json.loads(pkg_resources.resource_string(resource_package, resource_path_format.format(source)))
def listDatasources(self):
return self.availableSources
def getDatasource(self, sourceId):
if sourceId in self.data:
return self.data[sourceId]
else:
return None
|
Add pkg_resources back, working forward
|
Add pkg_resources back, working forward
|
Python
|
apache-2.0
|
bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball,bryantrobbins/baseball
|
e9949cdf609aeb99cfe97c37638c6cb80c947198
|
longclaw/longclawshipping/wagtail_hooks.py
|
longclaw/longclawshipping/wagtail_hooks.py
|
from wagtail.contrib.modeladmin.options import (
ModelAdmin, modeladmin_register
)
from longclaw.longclawshipping.models import ShippingCountry
class ShippingCountryModelAdmin(ModelAdmin):
model = ShippingCountry
menu_order = 200
menu_icon = 'site'
add_to_settings_menu = False
exclude_from_explorer = True
list_display = ('country', 'country_code', 'shipping_rates')
def flag(self, obj):
return obj.country.flag
def country_code(self, obj):
return obj.country.alpha3
def shipping_rates(self, obj):
return ", ".join(str(rate) for rate in obj.shipping_rates.all())
modeladmin_register(ShippingCountryModelAdmin)
|
from wagtail.contrib.modeladmin.options import (
ModelAdmin, modeladmin_register
)
from longclaw.longclawshipping.models import ShippingCountry
class ShippingCountryModelAdmin(ModelAdmin):
model = ShippingCountry
menu_label = 'Shipping'
menu_order = 200
menu_icon = 'site'
add_to_settings_menu = False
exclude_from_explorer = True
list_display = ('country', 'country_code', 'shipping_rates')
def flag(self, obj):
return obj.country.flag
def country_code(self, obj):
return obj.country.alpha3
def shipping_rates(self, obj):
return ", ".join(str(rate) for rate in obj.shipping_rates.all())
modeladmin_register(ShippingCountryModelAdmin)
|
Rename shipping label in model admin
|
Rename shipping label in model admin
|
Python
|
mit
|
JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw,JamesRamm/longclaw
|
8eddab84f27d5c068f5da477e05736c222cac4e2
|
mass/utils.py
|
mass/utils.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# 3rd-party modules
from botocore.client import Config
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
from mass.scheduler.swf import config
import boto3
client = boto3.client(
'swf',
region_name=config.REGION,
config=Config(connect_timeout=config.CONNECT_TIMEOUT,
read_timeout=config.READ_TIMEOUT))
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
job_title=job.title,
task_title=job.title
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Helper functions.
"""
# built-in modules
import json
# 3rd-party modules
from botocore.client import Config
# local modules
from mass.exception import UnsupportedScheduler
from mass.input_handler import InputHandler
def submit(job, protocol=None, priority=1, scheduler='swf'):
"""Submit mass job to SWF with specific priority.
"""
if scheduler != 'swf':
raise UnsupportedScheduler(scheduler)
from mass.scheduler.swf import config
import boto3
client = boto3.client(
'swf',
region_name=config.REGION,
config=Config(connect_timeout=config.CONNECT_TIMEOUT,
read_timeout=config.READ_TIMEOUT))
handler = InputHandler(protocol)
res = client.start_workflow_execution(
domain=config.DOMAIN,
workflowId=job.title,
workflowType=config.WORKFLOW_TYPE_FOR_JOB,
taskList={'name': config.DECISION_TASK_LIST},
taskPriority=str(priority),
input=json.dumps({
'protocol': protocol,
'body': handler.save(
data=job,
genealogy=[job.title]
)
}),
executionStartToCloseTimeout=str(config.WORKFLOW_EXECUTION_START_TO_CLOSE_TIMEOUT),
tagList=[job.title],
taskStartToCloseTimeout=str(config.DECISION_TASK_START_TO_CLOSE_TIMEOUT),
childPolicy=config.WORKFLOW_CHILD_POLICY)
return job.title, res['runId']
|
Use [job.title] as genealogy of input_handler.save while submit job.
|
Use [job.title] as genealogy of input_handler.save while submit job.
|
Python
|
apache-2.0
|
badboy99tw/mass,KKBOX/mass,KKBOX/mass,badboy99tw/mass,KKBOX/mass,badboy99tw/mass
|
facaa380b9b0fbb8f5d6d4d7c6c24257235cbb65
|
plugin.py
|
plugin.py
|
# -*- coding: utf-8 -*-
"""Load and Unload all GitGutter modules.
This module exports __all__ modules, which Sublime Text needs to know about.
The list of __all__ exported symbols is defined in modules/__init__.py.
"""
try:
from .modules import *
except ValueError:
from modules import *
def plugin_loaded():
"""Plugin loaded callback."""
try:
# Reload 'modules' once after upgrading to ensure GitGutter is ready
# for use instantly again. (Works with ST3 and python3 only!)
from package_control import events
if events.post_upgrade(__package__):
from .modules.reload import reload_package
reload_package(__package__)
except ImportError:
# Fail silently if package control isn't installed.
pass
|
# -*- coding: utf-8 -*-
"""Load and Unload all GitGutter modules.
This module exports __all__ modules, which Sublime Text needs to know about.
The list of __all__ exported symbols is defined in modules/__init__.py.
"""
try:
from .modules import *
except ValueError:
from modules import *
except ImportError:
# Failed to import at least one module. This can happen after upgrade due
# to internal structure changes.
import sublime
sublime.message_dialog(
"GitGutter failed to reload some of its modules.\n"
"Please restart Sublime Text!")
def plugin_loaded():
"""Plugin loaded callback."""
try:
# Reload 'modules' once after upgrading to ensure GitGutter is ready
# for use instantly again. (Works with ST3 and python3 only!)
from package_control import events
if events.post_upgrade(__package__):
from .modules.reload import reload_package
reload_package(__package__)
except ImportError:
# Fail silently if package control isn't installed.
pass
|
Handle module reload exceptions gracefully
|
Enhancement: Handle module reload exceptions gracefully
In some rare cases if the internal module structure has changed the 'reload' module can't recover all modules and will fail with ImportError. This is the situation we need to advice a restart of Sublime Text.
|
Python
|
mit
|
jisaacks/GitGutter
|
a07ac44d433981b7476ab3b57339797edddb368c
|
lenet_slim.py
|
lenet_slim.py
|
import tensorflow as tf
slim = tf.contrib.slim
def le_net(images, num_classes=10, scope='LeNet'):
with tf.variable_scope(scope, 'LeNet', [images, num_classes]):
net = slim.conv2d(images, 32, [5, 5], scope='conv1')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool1')
net = slim.conv2d(net, 64, [5, 5], scope='conv2')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool2')
gap = tf.reduce_mean(net, (1, 2))
with tf.variable_scope('GAP'):
gap_w = tf.get_variable('W', shape=[64, 10], initializer=tf.random_normal_initializer(0., 0.01))
logits = tf.matmul(gap, gap_w)
return logits, net
def le_net_arg_scope(weight_decay=0.0):
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
weights_initializer=tf.truncated_normal_initializer(stddev=0.1),
activation_fn=tf.nn.relu) as sc:
return sc
|
import tensorflow as tf
slim = tf.contrib.slim
def le_net(images, num_classes=10, scope='LeNet'):
with tf.variable_scope(scope, 'LeNet', [images, num_classes]):
net = slim.conv2d(images, 32, [5, 5], scope='conv1')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool1')
net = slim.conv2d(net, 64, [5, 5], scope='conv2')
net = slim.max_pool2d(net, [2, 2], 2, scope='pool2')
gap = tf.reduce_mean(net, (1, 2))
with tf.variable_scope('GAP'):
gap_w = tf.get_variable('W', shape=[64, num_classes], initializer=tf.random_normal_initializer(0., 0.01))
logits = tf.matmul(gap, gap_w)
return logits, net
def le_net_arg_scope(weight_decay=0.0):
with slim.arg_scope(
[slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
weights_initializer=tf.truncated_normal_initializer(stddev=0.1),
activation_fn=tf.nn.relu) as sc:
return sc
|
Fix the shape of gap_w
|
Fix the shape of gap_w
|
Python
|
mit
|
philipperemy/tensorflow-class-activation-mapping
|
7f48dde064acbf1c192ab0bf303ac8e80e56e947
|
wafer/kv/models.py
|
wafer/kv/models.py
|
from django.contrib.auth.models import Group
from django.db import models
from jsonfield import JSONField
class KeyValue(models.Model):
group = models.ForeignKey(Group, on_delete=models.CASCADE)
key = models.CharField(max_length=64, db_index=True)
value = JSONField()
def __unicode__(self):
return u'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
def __str__(self):
return 'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
|
from django.contrib.auth.models import Group
from django.db import models
from django.utils.encoding import python_2_unicode_compatible
from jsonfield import JSONField
@python_2_unicode_compatible
class KeyValue(models.Model):
group = models.ForeignKey(Group, on_delete=models.CASCADE)
key = models.CharField(max_length=64, db_index=True)
value = JSONField()
def __str__(self):
return 'KV(%s, %s, %r)' % (self.group.name, self.key, self.value)
|
Use @python_2_unicode_compatible rather than repeating methods
|
Use @python_2_unicode_compatible rather than repeating methods
|
Python
|
isc
|
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
|
9be09ccf5749fae1d7a72663d592de5a88a755eb
|
archive/archive_api/src/responses.py
|
archive/archive_api/src/responses.py
|
# -*- encoding: utf-8
import json
from flask import Response, jsonify
class ContextResponse(Response):
"""
This class adds the "@context" parameter to JSON responses before
they're sent to the user.
For an explanation of how this works/is used, read
https://blog.miguelgrinberg.com/post/customizing-the-flask-response-class
"""
context_url = "https://api.wellcomecollection.org/storage/v1/context.json"
def __init__(self, response, **kwargs):
# Here we unmarshal the response as provided by Flask-RESTPlus, add
# the @context parameter, then repack it.
rv = json.loads(response)
# The @context may already be provided if we've been through the
# force_type method below.
if "@context" in rv:
return super(ContextResponse, self).__init__(response, **kwargs)
else:
rv["@context"] = self.context_url
return super(ContextResponse, self).__init__(json.dumps(rv), **kwargs)
@classmethod
def force_type(cls, rv, environ=None):
# All of our endpoints should be returning a dictionary to be
# serialised as JSON.
assert isinstance(rv, dict)
assert "@context" not in rv, rv
rv["@context"] = cls.context_url
return super(ContextResponse, cls).force_type(jsonify(rv), environ)
|
# -*- encoding: utf-8
import json
from flask import Response, jsonify
from werkzeug.wsgi import ClosingIterator
class ContextResponse(Response):
"""
This class adds the "@context" parameter to JSON responses before
they're sent to the user.
For an explanation of how this works/is used, read
https://blog.miguelgrinberg.com/post/customizing-the-flask-response-class
"""
context_url = "https://api.wellcomecollection.org/storage/v1/context.json"
def __init__(self, response, *args, **kwargs):
"""
Unmarshal the response as provided by Flask-RESTPlus, add the
@context parameter, then repack it.
"""
if isinstance(response, ClosingIterator):
response = b''.join([char for char in response])
rv = json.loads(response)
# The @context may already be provided if we've been through the
# force_type method below.
if "@context" in rv:
return super(ContextResponse, self).__init__(response, **kwargs)
else:
rv["@context"] = self.context_url
json_string = json.dumps(rv)
return super(ContextResponse, self).__init__(json_string, **kwargs)
@classmethod
def force_type(cls, rv, environ=None):
# All of our endpoints should be returning a dictionary to be
# serialised as JSON.
assert isinstance(rv, dict)
assert "@context" not in rv, rv
rv["@context"] = cls.context_url
return super(ContextResponse, cls).force_type(jsonify(rv), environ)
|
Handle a Werkzeug ClosingIterator (as exposed by the tests)
|
Handle a Werkzeug ClosingIterator (as exposed by the tests)
|
Python
|
mit
|
wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api,wellcometrust/platform-api
|
939e5721300013b2977375f28897a6a573509112
|
xml4h/exceptions.py
|
xml4h/exceptions.py
|
"""
Custom *xml4h* exceptions.
"""
class BaseXml4hException(Exception):
"""
Base exception class for all non-standard exceptions raised by *xml4h*.
"""
pass
class FeatureUnavailableException(BaseXml4hException):
"""
User has attempted to use a feature that is available in some *xml4h*
implementations/adapters, but is not available in the current one.
"""
pass
class IncorrectArgumentTypeException(ValueError, BaseXml4hException):
"""
Richer flavour of a ValueError that describes exactly what argument
types are expected.
"""
def __init__(self, arg, expected_types):
msg = (u'Argument %s is not one of the expected types: %s'
% (arg, expected_types))
super(IncorrectArgumentTypeException, self).__init__(msg)
|
"""
Custom *xml4h* exceptions.
"""
class Xml4hException(Exception):
"""
Base exception class for all non-standard exceptions raised by *xml4h*.
"""
pass
class FeatureUnavailableException(Xml4hException):
"""
User has attempted to use a feature that is available in some *xml4h*
implementations/adapters, but is not available in the current one.
"""
pass
class IncorrectArgumentTypeException(ValueError, Xml4hException):
"""
Richer flavour of a ValueError that describes exactly what argument
types are expected.
"""
def __init__(self, arg, expected_types):
msg = (u'Argument %s is not one of the expected types: %s'
% (arg, expected_types))
super(IncorrectArgumentTypeException, self).__init__(msg)
|
Rename base exception class; less ugly
|
Rename base exception class; less ugly
|
Python
|
mit
|
jmurty/xml4h,pipermerriam/xml4h,czardoz/xml4h
|
a23e211ebdee849543cd7c729a8dafc145ea6b5c
|
TorGTK/var.py
|
TorGTK/var.py
|
from gi.repository import Gtk
import tempfile
import os.path
version = "0.2.2"
# Define default port numbers
default_socks_port = 19050
default_control_port = 19051
# Tor process descriptor placeholder
tor_process = None
# Tor logfile location placeholder
tor_logfile_dir = tempfile.mkdtemp()
tor_logfile_location = os.path.join(tor_logfile_dir, "tor_log")
# User preferences location placeholder
home_dir = os.path.expanduser("~")
prefs_dir = os.path.join(home_dir, ".local", "share", "torgtk")
prefs_file = os.path.join(prefs_dir, "config")
# Define object dictionary
objs = { }
objs["menuMain"] = Gtk.Menu()
# Define error message types
InfoBox = Gtk.MessageType.INFO
ErrorBox = Gtk.MessageType.ERROR
|
from gi.repository import Gtk
import tempfile
import os.path
import platform
version = "0.2.2"
# Define default port numbers
default_socks_port = 19050
default_control_port = 19051
# Tor process descriptor placeholder
tor_process = None
# Tor logfile location placeholder
tor_logfile_dir = tempfile.mkdtemp()
tor_logfile_location = os.path.join(tor_logfile_dir, "tor_log")
# User preferences location placeholder
if platform.system() == "Windows":
prefs_dir = os.path.join(os.getenv("APPDATA"), "torgtk")
prefs_file = os.path.join(prefs_dir, "config")
else:
home_dir = os.path.expanduser("~")
prefs_dir = os.path.join(home_dir, ".local", "share", "torgtk")
prefs_file = os.path.join(prefs_dir, "config")
# Define object dictionary
objs = { }
objs["menuMain"] = Gtk.Menu()
# Define error message types
InfoBox = Gtk.MessageType.INFO
ErrorBox = Gtk.MessageType.ERROR
|
Add OS detection (mainly Windows vs Unix) to preferences directory selection
|
Add OS detection (mainly Windows vs Unix) to preferences directory selection
|
Python
|
bsd-2-clause
|
neelchauhan/TorGTK,neelchauhan/TorNova
|
3e54119f07b0fdcbbe556e86de3c161a3eb20ddf
|
mwikiircbot.py
|
mwikiircbot.py
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
bot.joinchan(chan)
def main(cmd, args):
if len(args) < 1:
print("Usage: `" + cmd + " <host> <channel> [<channel> ...]` (for full arguments, see the readme)")
return
else:
Handler(host=args[0])
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
import ircbotframe
import sys
class Handler:
def __init__(self, host, port=6667, name="MediaWiki", description="MediaWiki recent changes bot", channels=[]):
self.channels = channels
self.bot = ircbotframe.ircBot(host, port, name, description)
self.bot.bind("376", self.endMOTD)
self.bot.start()
def endMOTD(self, sender, headers, message):
for chan in self.channels:
self.bot.joinchan(chan)
def main(cmd, args):
if len(args) < 2:
print("Usage: " + cmd + " <host> <channel> [<channel> ...]")
return
elif len(args) > 1:
Handler(host=args[0], channels=args[1:])
if __name__ == "__main__":
if __name__ == '__main__':
main(sys.argv[0], sys.argv[1:] if len(sys.argv) > 1 else [])
|
Fix bot not joining any channels
|
Fix bot not joining any channels
Also removed unnecessary usage comment.
|
Python
|
mit
|
fenhl/mwikiircbot
|
e91a923efd7cff36368059f47ffbd52248362305
|
me_api/middleware/me.py
|
me_api/middleware/me.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from flask import Blueprint, jsonify
from me_api.configs import Config
me = Blueprint('me', __name__)
@me.route('/')
def index():
routers = []
for module in Config.modules['modules'].values():
routers.append(module['path'])
return jsonify(me=Config.me, routers=routers)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from flask import Blueprint, jsonify
from me_api.configs import Config
me = Blueprint('me', __name__)
@me.route('/')
def index():
routers = [module_config['path'] for module_config in
Config.modules['modules'].values()]
return jsonify(me=Config.me, routers=routers)
|
Improve the way that get all the routers
|
Improve the way that get all the routers
|
Python
|
mit
|
lord63/me-api
|
850fba4b07e4c444aa8640c6f4c3816f8a3259ea
|
website_medical_patient_species/controllers/main.py
|
website_medical_patient_species/controllers/main.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp import http
from openerp.http import request
from openerp.addons.website_medical.controllers.main import (
WebsiteMedical
)
class WebsiteMedical(WebsiteMedical):
def _inject_medical_detail_vals(self, patient_id=0, **kwargs):
vals = super(WebsiteMedical, self)._inject_medical_detail_vals(
patient_id,
**kwargs
)
species_ids = request.env['medical.patient.species'].search([])
vals.update({
'species': species_ids,
})
return vals
|
# -*- coding: utf-8 -*-
# Copyright 2016 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp.http import request
from openerp.addons.website_medical.controllers.main import (
WebsiteMedical
)
class WebsiteMedical(WebsiteMedical):
def _inject_medical_detail_vals(self, patient_id=0, **kwargs):
vals = super(WebsiteMedical, self)._inject_medical_detail_vals(
patient_id,
**kwargs
)
species_ids = request.env['medical.patient.species'].search([])
vals.update({
'species': species_ids,
})
return vals
|
Fix lint * Remove stray import to fix lint
|
[FIX] website_medical_patient_species: Fix lint
* Remove stray import to fix lint
|
Python
|
agpl-3.0
|
laslabs/vertical-medical,laslabs/vertical-medical
|
bd1df334d68c82b0fc57b4c20da7844155382f83
|
numpy-array-of-tuple.py
|
numpy-array-of-tuple.py
|
# Numpy converts a list of tuples *not* into an array of tuples, but into a 2D
# array instead.
list_of_tuples = [(1, 2), (3, 4)]
import numpy as np
print('list of tuples:', list_of_tuples, 'type:', type(list_of_tuples))
A = np.array(list_of_tuples)
print('numpy array of tuples:', A, 'type:', type(A))
# It makes computing unique rows trickier than it should:
unique_A, indices_to_A = np.unique(list_of_tuples, return_inverse=True)
print('naive numpy unique:', unique_A, 'and indices:', indices_to_A) # WRONG!
# Workaround to do np.unique by row (http://stackoverflow.com/a/8024764/3438463)
A_by_row = np.empty(len(list_of_tuples), object)
A_by_row[:] = list_of_tuples
unique_A, indices_to_A = np.unique(A_by_row, return_inverse=True)
print('unique tuples:', unique_A, 'and indices:', indices_to_A)
|
# Numpy converts a list of tuples *not* into an array of tuples, but into a 2D
# array instead.
import numpy as np # 1.11.1
list_of_tuples = [(1, 2), (3, 4)]
print('list of tuples:', list_of_tuples, 'type:', type(list_of_tuples))
A = np.array(list_of_tuples)
print('numpy array of tuples:', A, 'type:', type(A))
# It makes computing unique rows trickier than it should:
unique_A, indices_to_A = np.unique(list_of_tuples, return_inverse=True)
print('naive numpy unique:', unique_A, 'and indices:', indices_to_A) # WRONG!
# Workaround to do np.unique by row (http://stackoverflow.com/a/8024764/3438463)
A_by_row = np.empty(len(list_of_tuples), object)
A_by_row[:] = list_of_tuples
unique_A, indices_to_A = np.unique(A_by_row, return_inverse=True)
print('unique tuples:', unique_A, 'and indices:', indices_to_A)
|
Update numpy array of tuples with np version
|
Update numpy array of tuples with np version
|
Python
|
mit
|
cmey/surprising-snippets,cmey/surprising-snippets
|
717339f2cb2aed818729a407009a30de53b62a2c
|
oocgcm/test/test_eos.py
|
oocgcm/test/test_eos.py
|
import os
import numpy as np
import xarray as xr
from . import TestCase, assert_equal,assert_allclose,requires_numba
from oocgcm.oceanfuncs.eos import misc
@requires_numba
def test_numpy_spice():
assert_allclose(misc._spice(15,33),0.54458641375)
|
import os
import numpy as np
import xarray as xr
from . import TestCase, assert_equal,assert_allclose,requires_numba,has_numba
if has_numba:
from oocgcm.oceanfuncs.eos import misc
@requires_numba
def test_numpy_spice():
assert_allclose(misc._spice(15,33),0.54458641375)
|
Fix has_numba for travis build
|
Fix has_numba for travis build
|
Python
|
apache-2.0
|
lesommer/oocgcm
|
d60ce9b23bcf2f8c60b2a8ce75eeba8779345b8b
|
Orange/tests/__init__.py
|
Orange/tests/__init__.py
|
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
load(test_widget),
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
import os
import unittest
from Orange.widgets.tests import test_setting_provider, \
test_settings_handler, test_context_handler, \
test_class_values_context_handler, test_domain_context_handler
from Orange.widgets.data.tests import test_owselectcolumns
try:
from Orange.widgets.tests import test_widget
run_widget_tests = True
except ImportError:
run_widget_tests = False
def suite():
test_dir = os.path.dirname(__file__)
all_tests = [
unittest.TestLoader().discover(test_dir),
]
load = unittest.TestLoader().loadTestsFromModule
all_tests.extend([
load(test_setting_provider),
load(test_settings_handler),
load(test_context_handler),
load(test_class_values_context_handler),
load(test_domain_context_handler),
load(test_owselectcolumns)
])
if run_widget_tests:
all_tests.extend([
#load(test_widget), # does not run on travis
])
return unittest.TestSuite(all_tests)
test_suite = suite()
if __name__ == '__main__':
unittest.main(defaultTest='suite')
|
Disable widget test. (does not run on travis)
|
Disable widget test. (does not run on travis)
|
Python
|
bsd-2-clause
|
marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,kwikadi/orange3,kwikadi/orange3,kwikadi/orange3,cheral/orange3,qusp/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qusp/orange3,qPCR4vir/orange3,marinkaz/orange3,marinkaz/orange3,kwikadi/orange3,cheral/orange3,kwikadi/orange3,qPCR4vir/orange3,qPCR4vir/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,cheral/orange3,kwikadi/orange3,qusp/orange3,marinkaz/orange3,qusp/orange3
|
ed1a14ef8f2038950b7e56c7ae5c21daa1d6618a
|
ordered_model/models.py
|
ordered_model/models.py
|
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.db import models
class OrderedModel(models.Model):
"""
An abstract model that allows objects to be ordered relative to each other.
Provides an ``order`` field.
"""
order = models.PositiveIntegerField(editable=False, db_index=True)
class Meta:
abstract = True
ordering = ('order',)
def save(self, *args, **kwargs):
if not self.id:
qs = self.__class__.objects.order_by('-order')
try:
self.order = qs[0].order + 1
except IndexError:
self.order = 0
super(OrderedModel, self).save(*args, **kwargs)
def _move(self, up, qs=None):
if qs is None:
qs = self.__class__._default_manager
if up:
qs = qs.order_by('-order').filter(order__lt=self.order)
else:
qs = qs.filter(order__gt=self.order)
try:
replacement = qs[0]
except IndexError:
# already first/last
return
self.order, replacement.order = replacement.order, self.order
self.save()
replacement.save()
def move(self, direction, qs=None):
self._move(direction == 'up', qs)
def move_down(self):
"""
Move this object down one position.
"""
return self._move(up=False)
def move_up(self):
"""
Move this object up one position.
"""
return self._move(up=True)
|
from django.contrib.contenttypes.models import ContentType
from django.core.urlresolvers import reverse
from django.db import models
from django.db.models import Max
class OrderedModel(models.Model):
"""
An abstract model that allows objects to be ordered relative to each other.
Provides an ``order`` field.
"""
order = models.PositiveIntegerField(editable=False, db_index=True)
class Meta:
abstract = True
ordering = ('order',)
def save(self, *args, **kwargs):
if not self.id:
c = self.__class__.objects.all().aggregate(Max('order')).get('order__max')
self.order = c and c + 1 or 0
super(OrderedModel, self).save(*args, **kwargs)
def _move(self, up, qs=None):
if qs is None:
qs = self.__class__._default_manager
if up:
qs = qs.order_by('-order').filter(order__lt=self.order)
else:
qs = qs.filter(order__gt=self.order)
try:
replacement = qs[0]
except IndexError:
# already first/last
return
self.order, replacement.order = replacement.order, self.order
self.save()
replacement.save()
def move(self, direction, qs=None):
self._move(direction == 'up', qs)
def move_down(self):
"""
Move this object down one position.
"""
return self._move(up=False)
def move_up(self):
"""
Move this object up one position.
"""
return self._move(up=True)
|
Use aggregate Max to fetch new order value.
|
Use aggregate Max to fetch new order value.
|
Python
|
bsd-3-clause
|
foozmeat/django-ordered-model,foozmeat/django-ordered-model,pombredanne/django-ordered-model,pombredanne/django-ordered-model,pombredanne/django-ordered-model,foozmeat/django-ordered-model
|
6443a0fed1b915745c591f425027d07216d28e12
|
podium/urls.py
|
podium/urls.py
|
"""podium URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from .talks import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^talks/', include('podium.talks.urls')),
url(r'^$', views.session_list_view),
]
|
"""podium URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from .talks import views
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^talks/', include('podium.talks.urls')),
url(r'^', include('podium.talks.urls')),
]
|
Use include, not a view, for the root URL.
|
Use include, not a view, for the root URL.
|
Python
|
mit
|
pyatl/podium-django,pyatl/podium-django,pyatl/podium-django
|
04c32537f7925aaeb54d8d7aa6da34ce85479c2c
|
mistraldashboard/test/helpers.py
|
mistraldashboard/test/helpers.py
|
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test import helpers
from mistraldashboard.test.test_data import utils
def create_stubs(stubs_to_create={}):
return helpers.create_stubs(stubs_to_create)
class MistralTestsMixin(object):
def _setup_test_data(self):
super(MistralTestsMixin, self)._setup_test_data()
utils.load_test_data(self)
class TestCase(MistralTestsMixin, helpers.TestCase):
use_mox = False
pass
class APITestCase(MistralTestsMixin, helpers.APITestCase):
use_mox = False
pass
|
# Copyright 2015 Huawei Technologies Co., Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from openstack_dashboard.test import helpers
from mistraldashboard.test.test_data import utils
class MistralTestsMixin(object):
def _setup_test_data(self):
super(MistralTestsMixin, self)._setup_test_data()
utils.load_test_data(self)
class TestCase(MistralTestsMixin, helpers.TestCase):
pass
class APITestCase(MistralTestsMixin, helpers.APITestCase):
pass
|
Drop mox, no longer needed
|
Drop mox, no longer needed
The porting of mistral-dashboard is complete.
This fullfills the community goal "Remove Use of mox/mox3 for Testing"
set for Rocky: https://governance.openstack.org/tc/goals/rocky/mox_removal.html
Remove use_mox and remove dead code.
Change-Id: I59839fecd85caaf8b81129b7f890c4ed50d39db8
Signed-off-by: Chuck Short <61c7e57c8f71fbf1f6c3fcd85c16ccd0f494e116@redhat.com>
|
Python
|
apache-2.0
|
openstack/mistral-dashboard,openstack/mistral-dashboard,openstack/mistral-dashboard
|
087a706fb8cadf98e3bd515427665997ca2001ba
|
tests/pytests/functional/states/test_npm.py
|
tests/pytests/functional/states/test_npm.py
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.pkg.install("npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError):
pytest.skip("Unable to install npm")
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
import pytest
from salt.exceptions import CommandExecutionError
@pytest.fixture(scope="module", autouse=True)
def install_npm(sminion):
try:
sminion.functions.state.single("pkg.installed", name="npm")
# Just name the thing we're looking for
sminion.functions.npm # pylint: disable=pointless-statement
except (CommandExecutionError, AttributeError) as e:
pytest.skip("Unable to install npm - " + str(e))
@pytest.mark.slow_test
@pytest.mark.destructive_test
@pytest.mark.requires_network
def test_removed_installed_cycle(sminion):
project_version = "pm2@5.1.0"
success = sminion.functions.npm.uninstall("pm2")
assert success, "Unable to uninstall pm2 in prep for tests"
ret = next(
iter(
sminion.functions.state.single(
"npm.installed", name=project_version
).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.installed " + project_version + ret["comment"]
ret = next(
iter(
sminion.functions.state.single("npm.removed", name=project_version).values()
)
)
success = ret["result"]
assert success, "Failed to states.npm.removed " + project_version
|
Use state.single to not upgrade npm
|
Use state.single to not upgrade npm
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.