commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
4752f704596613bbb80a649b275c79ce156b32ec | Fix library not found on linux | gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf,gpuopenanalytics/libgdf | python/libgdf_cffi/__init__.py | python/libgdf_cffi/__init__.py | from __future__ import absolute_import
import os
import sys
from .wrapper import _libgdf_wrapper
from .wrapper import GDFError # re-exported
try:
from .libgdf_cffi import ffi
except ImportError:
pass
else:
def _get_lib_name():
if os.name == 'posix':
# TODO this will need to be changed when packaged for distribution
if sys.platform == 'darwin':
path = 'libgdf.dylib'
else:
path = 'libgdf.so'
else:
raise NotImplementedError('OS {} not supported'.format(os.name))
# Prefer local version of the library if it exists
localpath = os.path.join('.', path)
if os.path.isfile(localpath):
return localpath
else:
return path
libgdf_api = ffi.dlopen(_get_lib_name())
libgdf = _libgdf_wrapper(ffi, libgdf_api)
del _libgdf_wrapper
| from __future__ import absolute_import
import os, sys
from .wrapper import _libgdf_wrapper
from .wrapper import GDFError # re-exported
try:
from .libgdf_cffi import ffi
except ImportError:
pass
else:
def _get_lib_name():
if os.name == 'posix':
# TODO this will need to be changed when packaged for distribution
if sys.platform == 'darwin':
return './libgdf.dylib'
else:
return './libgdf.so'
raise NotImplementedError('OS {} not supported'.format(os.name))
libgdf_api = ffi.dlopen(_get_lib_name())
libgdf = _libgdf_wrapper(ffi, libgdf_api)
del _libgdf_wrapper
| apache-2.0 | Python |
70ae46aab717a7695239bc36ca2aa2db764e6d3e | Fix small mistake in CV example comment | Saurabh7/shogun,lisitsyn/shogun,Saurabh7/shogun,karlnapf/shogun,geektoni/shogun,lambday/shogun,geektoni/shogun,Saurabh7/shogun,besser82/shogun,sorig/shogun,shogun-toolbox/shogun,lisitsyn/shogun,geektoni/shogun,besser82/shogun,lisitsyn/shogun,shogun-toolbox/shogun,Saurabh7/shogun,sorig/shogun,shogun-toolbox/shogun,lambday/shogun,Saurabh7/shogun,karlnapf/shogun,Saurabh7/shogun,besser82/shogun,karlnapf/shogun,sorig/shogun,lambday/shogun,geektoni/shogun,Saurabh7/shogun,besser82/shogun,sorig/shogun,lambday/shogun,shogun-toolbox/shogun,lisitsyn/shogun,sorig/shogun,besser82/shogun,geektoni/shogun,Saurabh7/shogun,lisitsyn/shogun,lambday/shogun,shogun-toolbox/shogun,Saurabh7/shogun,geektoni/shogun,lambday/shogun,lisitsyn/shogun,shogun-toolbox/shogun,karlnapf/shogun,karlnapf/shogun,karlnapf/shogun,sorig/shogun,besser82/shogun | examples/undocumented/python_modular/evaluation_cross_validation_classification.py | examples/undocumented/python_modular/evaluation_cross_validation_classification.py | #!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Written (W) 2012 Heiko Strathmann
# Copyright (C) 2012 Berlin Institute of Technology and Max-Planck-Society
#
from numpy.random import randn
from numpy import *
# generate some overlapping training vectors
num_vectors=100
vec_distance=1
traindat=concatenate((randn(2,num_vectors)-vec_distance,
randn(2,num_vectors)+vec_distance), axis=1)
label_traindat=concatenate((-ones(num_vectors), ones(num_vectors)));
parameter_list = [[traindat,label_traindat]]
def evaluation_cross_validation_classification (traindat=traindat, label_traindat=label_traindat):
from modshogun import CrossValidation, CrossValidationResult
from modshogun import ContingencyTableEvaluation, ACCURACY
from modshogun import StratifiedCrossValidationSplitting
from modshogun import BinaryLabels
from modshogun import RealFeatures
from modshogun import LibLinear, L2R_L2LOSS_SVC
# training data
features=RealFeatures(traindat)
labels=BinaryLabels(label_traindat)
# classifier
classifier=LibLinear(L2R_L2LOSS_SVC)
# splitting strategy for 5 fold cross-validation (for classification its better
# to use "StratifiedCrossValidation", but the standard
# "CrossValidationSplitting" is also available
splitting_strategy=StratifiedCrossValidationSplitting(labels, 5)
# evaluation method
evaluation_criterium=ContingencyTableEvaluation(ACCURACY)
# cross-validation instance
cross_validation=CrossValidation(classifier, features, labels,
splitting_strategy, evaluation_criterium)
cross_validation.set_autolock(False)
# (optional) repeat x-val 10 times
cross_validation.set_num_runs(10)
# (optional) request 95% confidence intervals for results (not actually needed
# for this toy example)
cross_validation.set_conf_int_alpha(0.05)
# perform cross-validation and print(results)
result=cross_validation.evaluate()
#print("mean:", result.mean)
#if result.has_conf_int:
# print("[", result.conf_int_low, ",", result.conf_int_up, "] with alpha=", result.conf_int_alpha)
if __name__=='__main__':
print('Evaluation CrossValidationClassification')
evaluation_cross_validation_classification(*parameter_list[0])
| #!/usr/bin/env python
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# Written (W) 2012 Heiko Strathmann
# Copyright (C) 2012 Berlin Institute of Technology and Max-Planck-Society
#
from numpy.random import randn
from numpy import *
# generate some overlapping training vectors
num_vectors=100
vec_distance=1
traindat=concatenate((randn(2,num_vectors)-vec_distance,
randn(2,num_vectors)+vec_distance), axis=1)
label_traindat=concatenate((-ones(num_vectors), ones(num_vectors)));
parameter_list = [[traindat,label_traindat]]
def evaluation_cross_validation_classification (traindat=traindat, label_traindat=label_traindat):
from modshogun import CrossValidation, CrossValidationResult
from modshogun import ContingencyTableEvaluation, ACCURACY
from modshogun import StratifiedCrossValidationSplitting
from modshogun import BinaryLabels
from modshogun import RealFeatures
from modshogun import LibLinear, L2R_L2LOSS_SVC
# training data
features=RealFeatures(traindat)
labels=BinaryLabels(label_traindat)
# classifier
classifier=LibLinear(L2R_L2LOSS_SVC)
# splitting strategy for 5 fold cross-validation (for classification its better
# to use "StratifiedCrossValidation", but the standard
# "StratifiedCrossValidationSplitting" is also available
splitting_strategy=StratifiedCrossValidationSplitting(labels, 5)
# evaluation method
evaluation_criterium=ContingencyTableEvaluation(ACCURACY)
# cross-validation instance
cross_validation=CrossValidation(classifier, features, labels,
splitting_strategy, evaluation_criterium)
cross_validation.set_autolock(False)
# (optional) repeat x-val 10 times
cross_validation.set_num_runs(10)
# (optional) request 95% confidence intervals for results (not actually needed
# for this toy example)
cross_validation.set_conf_int_alpha(0.05)
# perform cross-validation and print(results)
result=cross_validation.evaluate()
#print("mean:", result.mean)
#if result.has_conf_int:
# print("[", result.conf_int_low, ",", result.conf_int_up, "] with alpha=", result.conf_int_alpha)
if __name__=='__main__':
print('Evaluation CrossValidationClassification')
evaluation_cross_validation_classification(*parameter_list[0])
| bsd-3-clause | Python |
414a991d850ee21f682015c3c277170842f44404 | fix translation string | SEL-Columbia/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,gmimano/commcaretest,gmimano/commcaretest,qedsoftware/commcare-hq,qedsoftware/commcare-hq,gmimano/commcaretest,dimagi/commcare-hq,puttarajubr/commcare-hq,SEL-Columbia/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq | corehq/apps/api/util.py | corehq/apps/api/util.py | from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
doc_type = getattr(cls, '_doc_type', cls.__name__)
try:
doc = cls.get(doc_id)
if doc and doc.domain == domain and doc.doc_type == doc_type:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise ObjectDoesNotExist(_("Could not find %(doc_type)s with id %(id)s") % \
{"doc_type": doc_type, "id": doc_id})
| from django.core.exceptions import ObjectDoesNotExist
from django.utils.translation import ugettext as _
from couchdbkit.exceptions import ResourceNotFound
def get_object_or_not_exist(cls, doc_id, domain):
"""
Given a Document class, id, and domain, get that object or raise
an ObjectDoesNotExist exception if it's not found, not the right
type, or doesn't belong to the domain.
"""
doc_type = getattr(cls, '_doc_type', cls.__name__)
try:
doc = cls.get(doc_id)
if doc and doc.domain == domain and doc.doc_type == doc_type:
return doc
except ResourceNotFound:
pass # covered by the below
except AttributeError:
# there's a weird edge case if you reference a form with a case id
# that explodes on the "version" property. might as well swallow that
# too.
pass
raise ObjectDoesNotExist(_("Could not find %s with id %s") % (doc_type, doc_id))
| bsd-3-clause | Python |
e6e144c38edb9121f7f03b5b8ed164692e4b36b9 | Remove failing test assertion. | wakermahmud/sync-engine,nylas/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,closeio/nylas,closeio/nylas,Eagles2F/sync-engine,jobscore/sync-engine,nylas/sync-engine,nylas/sync-engine,closeio/nylas,Eagles2F/sync-engine,ErinCall/sync-engine,gale320/sync-engine,ErinCall/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,wakermahmud/sync-engine,gale320/sync-engine,closeio/nylas,jobscore/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,jobscore/sync-engine,nylas/sync-engine,jobscore/sync-engine,gale320/sync-engine,wakermahmud/sync-engine | tests/events/test_inviting.py | tests/events/test_inviting.py | from tests.util.base import event
def test_invite_generation(event, default_account):
from inbox.events.ical import generate_icalendar_invite
event.sequence_number = 1
event.participants = [{'email': 'helena@nylas.com'},
{'email': 'myles@nylas.com'}]
cal = generate_icalendar_invite(event)
assert cal['method'] == 'REQUEST'
for component in cal.walk():
if component.name == "VEVENT":
assert component.get('summary') == event.title
assert int(component.get('sequence')) == event.sequence_number
assert component.get('location') == event.location
attendees = component.get('attendee', [])
# the iCalendar python module doesn't return a list when
# there's only one attendee. Go figure.
if not isinstance(attendees, list):
attendees = [attendees]
for attendee in attendees:
email = unicode(attendee)
# strip mailto: if it exists
if email.lower().startswith('mailto:'):
email = email[7:]
assert email in ['helena@nylas.com', 'myles@nylas.com']
def test_message_generation(event, default_account):
from inbox.events.ical import generate_invite_message
event.title = 'A long walk on the beach'
event.participants = [{'email': 'helena@nylas.com'}]
msg = generate_invite_message('empty', event, "",
default_account)
# Check that we have an email with an HTML part, a plain text part, a
# text/calendar with METHOD=REQUEST and an attachment.
count = 0
for mimepart in msg.walk(with_self=msg.content_type.is_singlepart()):
format_type = mimepart.content_type.format_type
subtype = mimepart.content_type.subtype
if (format_type, subtype) in [('text', 'plain'), ('text', 'html'),
('text', 'calendar; method=request'),
('application', 'ics')]:
count += 1
assert count == 3
| from tests.util.base import event
def test_invite_generation(event, default_account):
from inbox.events.ical import generate_icalendar_invite
event.sequence_number = 1
event.participants = [{'email': 'helena@nylas.com'},
{'email': 'myles@nylas.com'}]
cal = generate_icalendar_invite(event)
assert cal['method'] == 'REQUEST'
for component in cal.walk():
if component.name == "VEVENT":
assert component.get('summary') == event.title
assert int(component.get('sequence')) == event.sequence_number
assert component.get('location') == event.location
attendees = component.get('attendee', [])
# the iCalendar python module doesn't return a list when
# there's only one attendee. Go figure.
if not isinstance(attendees, list):
attendees = [attendees]
for attendee in attendees:
email = unicode(attendee)
# strip mailto: if it exists
if email.lower().startswith('mailto:'):
email = email[7:]
assert email in ['helena@nylas.com', 'myles@nylas.com']
def test_message_generation(event, default_account):
from inbox.events.ical import generate_invite_message
event.title = 'A long walk on the beach'
event.participants = [{'email': 'helena@nylas.com'}]
msg = generate_invite_message('empty', event, "",
default_account)
# Check that we have an email with an HTML part, a plain text part, a
# text/calendar with METHOD=REQUEST and an attachment.
count = 0
for mimepart in msg.walk(with_self=msg.content_type.is_singlepart()):
format_type = mimepart.content_type.format_type
subtype = mimepart.content_type.subtype
if (format_type, subtype) in [('text', 'plain'), ('text', 'html'),
('text', 'calendar; method=request'),
('application', 'ics')]:
count += 1
if (format_type, subtype) == ('text', 'html'):
assert 'A long walk on the beach' in mimepart.body
if (format_type, subtype) == ('text', 'plain'):
assert '**A long walk on the beach**' in mimepart.body
assert count == 3
| agpl-3.0 | Python |
30198bad431b5dbaa9a5355d64aca1ff92beb54d | Update 01_Blinking_and_Fading_a_LED.py | userdw/RaspberryPi_3_Starter_Kit | 01_Blinking_and_Fading_a_LED/01_Blinking_and_Fading_a_LED.py | 01_Blinking_and_Fading_a_LED/01_Blinking_and_Fading_a_LED.py | import wiringpi # import library WiringPi-Python
from time import sleep # import library sleep
wiringpi.wiringPiSetup() # Must be called before using IO function
wiringpi.pinMode(1,1) # Set pin 9 to 1 (OUTPUT) / 0 (INPUT)
wiringpi.softPwmCreate(1,0,100) # Set PWM on pin 9, start value 0, max value 100
LED_MODULE = 1
def toogleLED(times):
for i in range (0,times,1):
print "on"
wiringpi.digitalWrite(LED_MODULE, not wiringpi.digitalRead(LED_MODULE))
sleep(1)
print "off"
wiringpi.digitalWrite(LED_MODULE, not wiringpi.digitalRead(LED_MODULE))
sleep(1)
def fadeLED(times):
for i in range (0,times,1):
for j in range (0,100,1):
wiringpi.softPwmWrite(LED_MODULE,j) # PWM pulse on pin 9
sleep(0.02)
for j in range (100,0,-1):
wiringpi.softPwmWrite(LED_MODULE,j) # PWM pulse on pin 9
sleep(0.02)
wiringpi.softPwmWrite(LED_MODULE,0)
try:
while True :
print "Start"
toogleLED(3)
sleep(2)
fadeLED(3)
sleep(1)
except KeyboardInterrupt :
wiringpi.digitalWrite(LED_MODULE,0)
wiringpi.softPwmWrite(LED_MODULE,0) # PWM pulse on pin 0
print "exit"
|
import wiringpi # import library WiringPi-Python
from time import sleep # import library sleep
wiringpi.wiringPiSetup() # Must be called before using IO function
wiringpi.pinMode(1,1) # Set pin 9 to 1 (OUTPUT) / 0 (INPUT)
wiringpi.softPwmCreate(1,0,100) # Set PWM on pin 9, start value 0, max value 100
LED_MODULE = 1
def toogleLED(times):
for i in range (0,times,1):
print "on"
wiringpi.digitalWrite(LED_MODULE, not wiringpi.digitalRead(LED_MODULE))
sleep(1)
print "off"
wiringpi.digitalWrite(LED_MODULE, not wiringpi.digitalRead(LED_MODULE))
sleep(1)
def fadeLED(times):
for i in range (0,times,1):
for j in range (0,100,1):
wiringpi.softPwmWrite(LED_MODULE,j) # PWM pulse on pin 9
sleep(0.02)
for j in range (100,0,-1):
wiringpi.softPwmWrite(LED_MODULE,j) # PWM pulse on pin 9
sleep(0.02)
wiringpi.softPwmWrite(LED_MODULE,0)
try:
while True :
print "Start"
toogleLED(3)
sleep(2)
fadeLED(3)
sleep(1)
except KeyboardInterrupt :
wiringpi.digitalWrite(LED_MODULE,0)
wiringpi.softPwmWrite(LED_MODULE,0) # PWM pulse on pin 0
print "exit"
| mit | Python |
76ef4b051dec7564150939a279f903a404191b1d | Fix reading calc report [skip CI] | gem/oq-svir-qgis,gem/oq-svir-qgis,gem/oq-svir-qgis,gem/oq-svir-qgis | svir/dialogs/show_full_report_dialog.py | svir/dialogs/show_full_report_dialog.py |
# -*- coding: utf-8 -*-
# /***************************************************************************
# Irmt
# A QGIS plugin
# OpenQuake Integrated Risk Modelling Toolkit
# -------------------
# begin : 2013-10-24
# copyright : (C) 2014 by GEM Foundation
# email : devops@openquake.org
# ***************************************************************************/
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from qgis.PyQt.QtWidgets import QDialog
from svir.utilities.utils import get_ui_class
FORM_CLASS = get_ui_class('ui_text_browser.ui')
class ShowFullReportDialog(QDialog, FORM_CLASS):
"""
Non-modal dialog to show the full report of a OQ-Engine calculation
"""
def __init__(self, filepath):
QDialog.__init__(self)
# Set up the user interface from Designer.
self.setupUi(self)
with open(filepath, 'r') as rst_file:
text = rst_file.read()
self.text_browser.setText(text)
|
# -*- coding: utf-8 -*-
# /***************************************************************************
# Irmt
# A QGIS plugin
# OpenQuake Integrated Risk Modelling Toolkit
# -------------------
# begin : 2013-10-24
# copyright : (C) 2014 by GEM Foundation
# email : devops@openquake.org
# ***************************************************************************/
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
from qgis.PyQt.QtWidgets import QDialog
from svir.utilities.utils import get_ui_class
FORM_CLASS = get_ui_class('ui_text_browser.ui')
class ShowFullReportDialog(QDialog, FORM_CLASS):
"""
Non-modal dialog to show the full report of a OQ-Engine calculation
"""
def __init__(self, filepath):
QDialog.__init__(self)
# Set up the user interface from Designer.
self.setupUi(self)
with open(filepath, 'rb') as rst_file:
text = rst_file.read()
self.text_browser.setText(text)
| agpl-3.0 | Python |
2a82d102d7bc7377a8766485e7f484392ac0d5a8 | Disable debug logging | beerfactory/hbmqtt | tests/plugins/test_manager.py | tests/plugins/test_manager.py | # Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
import unittest
import logging
import asyncio
from hbmqtt.plugins.manager import PluginManager
#formatter = "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
#logging.basicConfig(level=logging.DEBUG, format=formatter)
class TestPlugin:
def __init__(self, manager):
pass
class EventTestPlugin:
def __init__(self, manager: PluginManager):
self.test_flag = False
self.coro_flag = False
@asyncio.coroutine
def on_test(self):
self.test_flag = True
@asyncio.coroutine
def test_coro(self):
self.coro_flag = True
class TestPluginManager(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
def test_load_plugin(self):
manager = PluginManager("hbmqtt.test.plugins", context=None)
self.assertTrue(len(manager._plugins) > 0)
def test_fire_event(self):
@asyncio.coroutine
def fire_event():
yield from manager.fire_event("test")
manager = PluginManager("hbmqtt.test.plugins", context=None, loop=self.loop)
self.loop.run_until_complete(fire_event())
plugin = manager.get_plugin("event_plugin")
self.assertTrue(plugin.object.test_flag)
def test_map_coro(self):
@asyncio.coroutine
def call_coro():
yield from manager.map_plugin_coro('test_coro')
manager = PluginManager("hbmqtt.test.plugins", context=None, loop=self.loop)
self.loop.run_until_complete(call_coro())
plugin = manager.get_plugin("event_plugin")
self.assertTrue(plugin.object.test_coro)
| # Copyright (c) 2015 Nicolas JOUANIN
#
# See the file license.txt for copying permission.
import unittest
import logging
import asyncio
from hbmqtt.plugins.manager import PluginManager
formatter = "[%(asctime)s] {%(filename)s:%(lineno)d} %(levelname)s - %(message)s"
logging.basicConfig(level=logging.DEBUG, format=formatter)
class TestPlugin:
def __init__(self, manager):
pass
class EventTestPlugin:
def __init__(self, manager: PluginManager):
self.test_flag = False
self.coro_flag = False
@asyncio.coroutine
def on_test(self):
self.test_flag = True
@asyncio.coroutine
def test_coro(self):
self.coro_flag = True
class TestPluginManager(unittest.TestCase):
def setUp(self):
self.loop = asyncio.new_event_loop()
def test_load_plugin(self):
manager = PluginManager("hbmqtt.test.plugins", context=None)
self.assertTrue(len(manager._plugins) > 0)
def test_fire_event(self):
@asyncio.coroutine
def fire_event():
yield from manager.fire_event("test")
manager = PluginManager("hbmqtt.test.plugins", context=None, loop=self.loop)
self.loop.run_until_complete(fire_event())
plugin = manager.get_plugin("event_plugin")
self.assertTrue(plugin.object.test_flag)
def test_map_coro(self):
@asyncio.coroutine
def call_coro():
yield from manager.map_plugin_coro('test_coro')
manager = PluginManager("hbmqtt.test.plugins", context=None, loop=self.loop)
self.loop.run_until_complete(call_coro())
plugin = manager.get_plugin("event_plugin")
self.assertTrue(plugin.object.test_coro)
| mit | Python |
b1dcf10e4c8cbc3f380cb812bfe65dea2a1753f3 | Fix test | QuantCrimAtLeeds/PredictCode | tests/sources/chicago_test.py | tests/sources/chicago_test.py | import pytest
import unittest.mock as mock
from tests.helpers import MockOpen
import os.path
import numpy as np
import open_cp.sources.chicago as chicago
def test_set_data_dir():
chicago.set_data_directory("..")
assert chicago.get_default_filename() == os.path.join("..", "chicago.csv")
@pytest.fixture
def string_data_snap():
dic = chicago._FIELDS["snapshot"]
return "\n".join([
",".join([dic["_DESCRIPTION_FIELD"], dic["_X_FIELD"], dic["_Y_FIELD"],
"other", dic["_TIME_FIELD"]]),
"THEFT, 789, 1012, ahgd, 01/01/2017 10:30:23 PM",
"ASSAULT, 12, 34, dgs, sgjhg",
"THEFT, 123, 456, as, 03/13/2016 02:53:30 AM"
])
def test_load_data(string_data_snap):
with mock.patch("builtins.open", MockOpen(string_data_snap)) as open_mock:
points = chicago.load("filename", {"THEFT"})
assert( open_mock.calls[0][0] == ("filename",) )
assert( len(points.timestamps) == 2 )
assert( points.timestamps[0] == np.datetime64("2016-03-13T02:53:30") )
assert( points.timestamps[1] == np.datetime64("2017-01-01T22:30:23") )
np.testing.assert_allclose( points.coords[:,0], np.array([123, 456]) * 1200 / 3937 )
np.testing.assert_allclose( points.coords[:,1], np.array([789, 1012]) * 1200 / 3937 )
def test_load_data_keep_in_feet(string_data_snap):
with mock.patch("builtins.open", MockOpen(string_data_snap)) as open_mock:
points = chicago.load("filename", {"THEFT"}, to_meters=False)
np.testing.assert_allclose( points.coords[:,0], [123, 456] )
np.testing.assert_allclose( points.coords[:,1], [789, 1012] )
| import pytest
import unittest.mock as mock
from tests.helpers import MockOpen
import os.path
import numpy as np
import open_cp.sources.chicago as chicago
def test_set_data_dir():
chicago.set_data_directory("..")
assert chicago.get_default_filename() == os.path.join("..", "chicago.csv")
@pytest.fixture
def string_data_snap():
dic = chicago._FIELDS["snapshot"]
return "\n".join([
",".join([dic["_DESCRIPTION_FIELD"], dic["_X_FIELD"], dic["_Y_FIELD"],
"other", dic["_TIME_FIELD"]]),
"THEFT, 789, 1012, ahgd, 01/01/2017 10:30:23 PM",
"ASSAULT, 12, 34, dgs, sgjhg",
"THEFT, 123, 456, as, 03/13/2016 02:53:30 AM"
])
def test_load_data(string_data_snap):
with mock.patch("builtins.open", MockOpen(string_data_snap)) as open_mock:
points = chicago.load("filename", {"THEFT"})
assert( open_mock.calls[0][0] == ("filename",) )
assert( len(points.timestamps) == 2 )
assert( points.timestamps[0] == np.datetime64("2016-03-13T02:53:30") )
assert( points.timestamps[1] == np.datetime64("2017-01-01T22:30:23") )
np.testing.assert_allclose( points.coords[:,0], np.array([123, 456]) / 3.28084 )
np.testing.assert_allclose( points.coords[:,1], np.array([789, 1012]) / 3.28084 )
def test_load_data_keep_in_feet(string_data_snap):
with mock.patch("builtins.open", MockOpen(string_data_snap)) as open_mock:
points = chicago.load("filename", {"THEFT"}, to_meters=False)
np.testing.assert_allclose( points.coords[:,0], [123, 456] )
np.testing.assert_allclose( points.coords[:,1], [789, 1012] )
| artistic-2.0 | Python |
60a73b46c537fc27f6ea92a91dec851361da12a2 | Move plugins to Custom | spacetelescope/stginga,pllim/stginga | stginga/plugin_info.py | stginga/plugin_info.py | """This module contains functions to handle ``stginga`` plugins.
See :ref:`stginga-run`.
"""
from __future__ import absolute_import, division, print_function
# GINGA
from ginga.misc.Bunch import Bunch
__all__ = ['load_plugins', 'show_plugin_install_info']
def load_plugins(ginga):
"""Load the ``stginga`` plugins.
Parameters
----------
ginga
The ginga app object that is provided to ``pre_gui_config`` in
``ginga_config.py``.
"""
stglobal_plugins, stlocal_plugins = _get_stginga_plugins()
# Add custom global plugins
for gplg in stglobal_plugins:
if gplg['module'] in ginga.global_plugins:
ginga.logger.info('Plugin {0} already loaded in Ginga. Not adding '
'again.'.format(gplg['module']))
else:
ginga.add_global_plugin(gplg)
# Add custom local plugins
for lplg in stlocal_plugins:
if lplg['module'] in ginga.local_plugins:
ginga.logger.info('Plugin {0} already loaded in Ginga. Not adding '
'again.'.format(lplg['module']))
else:
ginga.add_local_plugin(lplg)
def _get_stginga_plugins():
gpfx = 'stginga.plugins' # To load custom plugins in Ginga namespace
global_plugins = []
local_plugins = [
Bunch(module='BackgroundSub', workspace='dialogs', pfx=gpfx,
category='Custom', ptype='local'),
Bunch(module='BadPixCorr', workspace='dialogs', pfx=gpfx,
category='Custom', ptype='local'),
Bunch(module='DQInspect', workspace='dialogs', pfx=gpfx,
category='Custom', ptype='local'),
Bunch(module='SNRCalc', workspace='dialogs', pfx=gpfx,
category='Custom', ptype='local'),
]
return global_plugins, local_plugins
def show_plugin_install_info():
"""Print the documentation on how to install the ginga plugins."""
print('See http://stginga.readthedocs.org/en/latest/run.html')
| """This module contains functions to handle ``stginga`` plugins.
See :ref:`stginga-run`.
"""
from __future__ import absolute_import, division, print_function
# GINGA
from ginga.misc.Bunch import Bunch
__all__ = ['load_plugins', 'show_plugin_install_info']
def load_plugins(ginga):
"""Load the ``stginga`` plugins.
Parameters
----------
ginga
The ginga app object that is provided to ``pre_gui_config`` in
``ginga_config.py``.
"""
stglobal_plugins, stlocal_plugins = _get_stginga_plugins()
# Add custom global plugins
for gplg in stglobal_plugins:
if gplg['module'] in ginga.global_plugins:
ginga.logger.info('Plugin {0} already loaded in Ginga. Not adding '
'again.'.format(gplg['module']))
else:
ginga.add_global_plugin(gplg)
# Add custom local plugins
for lplg in stlocal_plugins:
if lplg['module'] in ginga.local_plugins:
ginga.logger.info('Plugin {0} already loaded in Ginga. Not adding '
'again.'.format(lplg['module']))
else:
ginga.add_local_plugin(lplg)
def _get_stginga_plugins():
gpfx = 'stginga.plugins' # To load custom plugins in Ginga namespace
global_plugins = []
local_plugins = [
Bunch(module='BackgroundSub', workspace='dialogs', pfx=gpfx,
category='stginga', ptype='local'),
Bunch(module='BadPixCorr', workspace='dialogs', pfx=gpfx,
category='stginga', ptype='local'),
Bunch(module='DQInspect', workspace='dialogs', pfx=gpfx,
category='stginga', ptype='local'),
Bunch(module='SNRCalc', workspace='dialogs', pfx=gpfx,
category='stginga', ptype='local'),
]
return global_plugins, local_plugins
def show_plugin_install_info():
"""Print the documentation on how to install the ginga plugins."""
print('See http://stginga.readthedocs.org/en/latest/run.html')
| bsd-3-clause | Python |
2b99ef66938ed71853c8746ca301704e2be5d0e4 | fix condition when normalize_memory tries to use integer as a string | D3DeFi/vmcli | lib/tools/__init__.py | lib/tools/__init__.py | from lib.constants import VM_MIN_MEM, VM_MAX_MEM
from lib.exceptions import VmCLIException
def normalize_memory(value):
"""Function converts passed value to integer, which will represent size in megabytes
as well as performs control whether the value sits between global limits."""
if isinstance(value, str):
if value.endswith('T'):
value = int(value.strip('T')) * 1024 * 1024
elif value.endswith('G'):
value = int(value.strip('G')) * 1024
elif value.endswith('M'):
value = int(value.strip('M'))
try:
value = int(value)
except ValueError:
raise VmCLIException('Unable to convert memory size to gigabytes. Aborting...')
if value < VM_MIN_MEM or value > VM_MAX_MEM:
raise VmCLIException('Memory must be between {}-{} megabytes'.format(VM_MIN_MEM, VM_MAX_MEM))
else:
return value
| from lib.constants import VM_MIN_MEM, VM_MAX_MEM
from lib.exceptions import VmCLIException
def normalize_memory(value):
"""Function converts passed value to integer, which will represent size in megabytes
as well as performs control whether the value sits between global limits."""
if value.endswith('T'):
value = int(value.strip('T')) * 1024 * 1024
elif value.endswith('G'):
value = int(value.strip('G')) * 1024
elif value.endswith('M'):
value = int(value.strip('M'))
else:
try:
value = int(value)
except ValueError:
raise VmCLIException('Unable to convert memory size to gigabytes. Aborting...')
if value < VM_MIN_MEM or value > VM_MAX_MEM:
raise VmCLIException('Memory must be between {}-{} megabytes'.format(VM_MIN_MEM, VM_MAX_MEM))
else:
return value
| apache-2.0 | Python |
b61dfffa600c05eed34c5bcc30a2da61f0f6bd73 | Update __version__.py | payplug/payplug-python | payplug/__version__.py | payplug/__version__.py | # -*- coding: utf-8 -*-
__version__ = '1.3.1'
| # -*- coding: utf-8 -*-
__version__ = '1.3.0'
| mit | Python |
0d66e594eeb09c1415964a00a4cfa2280464f840 | add allow empty and not in for states | buxx/synergine | synergine/core/simulation/MetaStates.py | synergine/core/simulation/MetaStates.py | from synergine.synergy.Simulation import Simulation
class MetaStates():
def __init__(self, list):
self._list = list
# TODO: have_list etc
def have_list(self, object_id, states):
for state in states:
if not self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return False
return True
def have(self, object_id, state):
return self.have_list(object_id, [state])
def dont_have_list(self, object_id, states):
for state in states:
if self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return False
return True
def dont_have(self, object_id, state):
return self.dont_have_list(object_id, [state])
def have_one(self, object_id, states):
for state in states:
if self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return True
return False
def dont_have_one(self, object_id, states):
for state in states:
if not self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return True
return False
def add(self, object_id, state):
self._list.add(Simulation.STATE, object_id, state)
def add_list(self, object_id, states):
for state in states:
self.add(object_id, state)
def remove(self, object_id, state, allow_empty=False, allow_not_in=False):
self._list.remove(Simulation.STATE, object_id, state, allow_empty=allow_empty, allow_not_in=allow_not_in)
def remove_list(self, object_id, states, allow_empty=False, allow_not_in=False):
for state in states:
self.remove(object_id, state, allow_empty=allow_empty, allow_not_in=allow_not_in)
def add_remove(self, object_id, state_add, state_remove):
self.add(object_id, state_add)
self.remove(object_id, state_remove)
def add_remove_lists(self, objec_id, states_add, states_remove):
for state_add in states_add:
self.add(objec_id, state_add)
for state_remove in states_remove:
self.remove(objec_id, state_remove)
| from synergine.synergy.Simulation import Simulation
class MetaStates():
def __init__(self, list):
self._list = list
# TODO: have_list etc
def have_list(self, object_id, states):
for state in states:
if not self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return False
return True
def have(self, object_id, state):
return self.have_list(object_id, [state])
def dont_have_list(self, object_id, states):
for state in states:
if self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return False
return True
def dont_have(self, object_id, state):
return self.dont_have_list(object_id, [state])
def have_one(self, object_id, states):
for state in states:
if self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return True
return False
def dont_have_one(self, object_id, states):
for state in states:
if not self._list.have(Simulation.STATE, object_id, state, allow_empty=True):
return True
return False
def add(self, object_id, state):
self._list.add(Simulation.STATE, object_id, state)
def add_list(self, object_id, states):
for state in states:
self.add(object_id, state)
def remove(self, object_id, state):
self._list.remove(Simulation.STATE, object_id, state)
def remove_list(self, object_id, states):
for state in states:
self.remove(object_id, state)
def add_remove(self, object_id, state_add, state_remove):
self.add(object_id, state_add)
self.remove(object_id, state_remove)
def add_remove_lists(self, objec_id, states_add, states_remove):
for state_add in states_add:
self.add(objec_id, state_add)
for state_remove in states_remove:
self.remove(objec_id, state_remove)
| apache-2.0 | Python |
88e839144f4a1dac1468e03f5cd506841caadc84 | Fix typo in 'job not found message' and jobs list output code | marcoacierno/django-schedulermanager | django_schedulermanager/management/commands/schedulejob.py | django_schedulermanager/management/commands/schedulejob.py | from django.core.management.base import BaseCommand
from django_schedulermanager.manager import manager
class Command(BaseCommand):
help = 'Schedules a job'
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('jobs_name', nargs='+')
def handle(self, *args, **options):
jobs_to_schedule = options['jobs_name']
for job in jobs_to_schedule:
if job not in manager:
self.stdout.write(
'Unable to find job {}. Available jobs: {}'.format(job, ','.join(manager.jobs.keys()))
)
continue
if manager.is_scheduled(job):
self.stdout.write('Job {} already started'.format(job))
continue
job_options = manager.get_options(job)
# TODO: Implement settings override
manager.schedule(job, job_options)
self.stdout.write(self.style.SUCCESS('Successfully scheduled job {}!'.format(job)))
| from django.core.management.base import BaseCommand
from django_schedulermanager.manager import manager
class Command(BaseCommand):
help = 'Schedules a job'
def add_arguments(self, parser):
# Positional arguments
parser.add_argument('jobs_name', nargs='+')
def handle(self, *args, **options):
jobs_to_schedule = options['jobs_name']
for job in jobs_to_schedule:
if job not in manager:
self.stdout.write(
'Unable to find job {}. Avalable jobs: {}'.format(job, ','.join([job for job, _ in manager.jobs.items()]))
)
continue
if manager.is_scheduled(job):
self.stdout.write('Job {} already started'.format(job))
continue
job_options = manager.get_options(job)
# TODO: Implement settings override
manager.schedule(job, job_options)
self.stdout.write(self.style.SUCCESS('Successfully scheduled job {}!'.format(job)))
| mit | Python |
bd166b623a3a92c1ae21bc3e8a091b68dcba089a | Test cosmetics. | ofgulban/tetrahydra | tetrahydra/tests/test_core.py | tetrahydra/tests/test_core.py | """Test core functions."""
import pytest
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
# Given
data = np.random.random([2, 3])
expected = np.ones(2)
# When
output = np.sum(closure(data), axis=1)
# Then
assert output == pytest.approx(expected)
def test_perturb():
"""Test perturbation operator."""
# Given
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
expected = data * p_vals
# When
output = perturb(data, p_vals, reclose=False)
# Then
assert np.all(output == expected)
def test_power():
"""Test powering operator."""
# Given
data = np.random.random([2, 3])
expected = data**np.pi
# When
output = power(data, np.pi, reclose=False)
# Then
assert np.all(output == expected)
| """Test core functions."""
import pytest
import numpy as np
from tetrahydra.core import closure, perturb, power
def test_closure():
"""Test closure operator."""
data = np.random.random([2, 3])
assert np.sum(closure(data), axis=1) == pytest.approx(np.ones(2))
def test_perturb():
"""Test perturbation operator."""
data = np.random.random([2, 3])
p_vals = np.array([1., 2., 3.]) # perturbation values
assert perturb(data, p_vals, reclose=False) == pytest.approx(data * p_vals)
def test_power():
"""Test powering operator."""
data = np.random.random([2, 3])
assert power(data, np.pi, reclose=False) == pytest.approx(data**np.pi)
| bsd-3-clause | Python |
fee4b8137961ed828ebcabae3a2e7270601a1ce1 | Fix version number | lightning-viz/lightning-python,garretstuber/lightning-python,garretstuber/lightning-python,peterkshultz/lightning-python,lightning-viz/lightning-python,peterkshultz/lightning-python,peterkshultz/lightning-python,garretstuber/lightning-python | lightning/__init__.py | lightning/__init__.py | from .main import Lightning
from .session import Session
from .visualization import Visualization
from .types.plots import *
from .types.images import *
from .types.streaming import *
from .types.three import *
__version__ = "1.0.8"
| from .main import Lightning
from .session import Session
from .visualization import Visualization
from .types.plots import *
from .types.images import *
from .types.streaming import *
from .types.three import *
__version__ = "1.0.6"
| mit | Python |
02535df1ce4ae43ba142a169810910d375605792 | Fix the space correcting code because restructured text still adds a bunch of paragraph tags | Justasic/StackSmash,Justasic/StackSmash | StackSmash/apps/lib/templatetags/restructuredtext_filter.py | StackSmash/apps/lib/templatetags/restructuredtext_filter.py | from django import template
from django.core.cache import cache
from docutils import nodes
from docutils.core import publish_parts
from docutils.parsers.rst import directives, Directive
from docutils.core import publish_cmdline, default_description
from pygments import highlight
from pygments.lexers import get_lexer_by_name, TextLexer
from pygments.formatters import HtmlFormatter
from django.utils.safestring import mark_safe
register = template.Library()
# Options
# ~~~~~~~
# Set to True if you want inline CSS styles instead of classes
INLINESTYLES = False
# The default formatter
DEFAULT = HtmlFormatter(noclasses=INLINESTYLES)
# Add name -> formatter pairs for every variant you want to use
VARIANTS = {
'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
}
class Pygments(Directive):
""" Source code syntax hightlighting.
"""
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = dict([(key, directives.flag) for key in VARIANTS])
has_content = True
def run(self):
self.assert_has_content()
try:
lexer = get_lexer_by_name(self.arguments[0])
except ValueError:
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
# take an arbitrary option if more than one is given
formatter = self.options and VARIANTS[self.options.keys()[0]] or DEFAULT
parsed = highlight(u'\n'.join(self.content), lexer, formatter)
return [nodes.raw('', parsed, format='html')]
# Add syntax highlighting to code blocks
directives.register_directive('sourcecode', Pygments)
directives.register_directive('code', Pygments)
# This is our restructuredtextify tag to use in templates.
# The tag accepts an object which MUST have get_cache_key
# as a callable function!
@register.filter(name='restructuredtextify', needs_autoescape=True)
def restructuredtextify(content, slug, autoescape=None):
key = 'ss.lib.tag.%s' % slug.get_cache_key()
parts = cache.get(key)
if not parts:
parts = publish_parts(
source = content,
writer_name = "html4css1",
settings_overrides = {
'cloak_email_addresses': True,
'initial_header_level': 2,
},
)
# XXX: Hacky!!
# Because docutils adds its own paragraph tags into shit, this
# mess below attempts to correct new lines and <p> tags.
parts['fragment'] = parts['fragment'].replace('\n', '<br />')
parts['fragment'] = parts['fragment'].replace('<p></p>', '')
parts['fragment'] = parts['fragment'].replace('<p>\n</p>', '')
parts['fragment'] = parts['fragment'].replace('</p><br /><p>', '</p><p>')
parts['fragment'] = parts['fragment'].replace('</p>\n<br /><p>', '</p><p>')
parts['fragment'] = parts['fragment'].replace('</p><br />\n<p>', '</p><p>')
parts['fragment'] = parts['fragment'].replace('</p>\n<br />\n<p>', '</p><p>')
parts['fragment'] = parts['fragment'].replace('</p><br />', '</p>')
parts['fragment'] = parts['fragment'].replace('<p><br />', '</p>')
parts['fragment'] = parts['fragment'].replace('<br /></pre></div><br /><p>', '</pre></div><p>')
cache.set(key, parts)
return mark_safe(parts['fragment'])
| from django import template
from django.core.cache import cache
from docutils import nodes
from docutils.core import publish_parts
from docutils.parsers.rst import directives, Directive
from docutils.core import publish_cmdline, default_description
from pygments import highlight
from pygments.lexers import get_lexer_by_name, TextLexer
from pygments.formatters import HtmlFormatter
from django.utils.safestring import mark_safe
register = template.Library()
# Options
# ~~~~~~~
# Set to True if you want inline CSS styles instead of classes
INLINESTYLES = False
# The default formatter
DEFAULT = HtmlFormatter(noclasses=INLINESTYLES)
# Add name -> formatter pairs for every variant you want to use
VARIANTS = {
'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True),
}
class Pygments(Directive):
""" Source code syntax hightlighting.
"""
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = dict([(key, directives.flag) for key in VARIANTS])
has_content = True
def run(self):
self.assert_has_content()
try:
lexer = get_lexer_by_name(self.arguments[0])
except ValueError:
# no lexer found - use the text one instead of an exception
lexer = TextLexer()
# take an arbitrary option if more than one is given
formatter = self.options and VARIANTS[self.options.keys()[0]] or DEFAULT
parsed = highlight(u'\n'.join(self.content), lexer, formatter)
return [nodes.raw('', parsed, format='html')]
# Add syntax highlighting to code blocks
directives.register_directive('sourcecode', Pygments)
directives.register_directive('code', Pygments)
# This is our restructuredtextify tag to use in templates.
# The tag accepts an object which MUST have get_cache_key
# as a callable function!
@register.filter(name='restructuredtextify', needs_autoescape=True)
def restructuredtextify(content, slug, autoescape=None):
key = 'ss.lib.tag.%s' % slug.get_cache_key()
parts = cache.get(key)
if not parts:
parts = publish_parts(
source = content,
writer_name = "html4css1",
settings_overrides = {
'cloak_email_addresses': True,
'initial_header_level': 2,
},
)
# XXX: Hacky!!
# Because docutils adds its own paragraph tags into shit, this
# mess below attempts to correct new lines and <p> tags.
parts['fragment'] = parts['fragment'].replace('\n', '<br />')
parts['fragment'] = parts['fragment'].replace('<p></p>', '')
parts['fragment'] = parts['fragment'].replace('<p>\n</p>', '')
parts['fragment'] = parts['fragment'].replace('</p><br /><p>', '</p><p>')
cache.set(key, parts)
return mark_safe(parts['fragment'])
| bsd-2-clause | Python |
b8597aa3d53a0d68b8ac1e61bce97804fa02ba3a | Add versioning support to ryu controller | pablomuri/Engine,pablomuri/Engine,fp7-netide/Engine,pablomuri/Engine,fp7-netide/Engine,fp7-netide/Engine,fp7-netide/Engine,fp7-netide/Engine | loader/controllers.py | loader/controllers.py | # coding=utf-8
"""
Copyright (c) 2015, NetIDE Consortium (Create-Net (CN), Telefonica Investigacion Y Desarrollo SA (TID), Fujitsu
Technology Solutions GmbH (FTS), Thales Communications & Security SAS (THALES), Fundacion Imdea Networks (IMDEA),
Universitaet Paderborn (UPB), Intel Research & Innovation Ireland Ltd (IRIIL), Fraunhofer-Institut für
Produktionstechnologie (IPT), Telcaria Ideas SL (TELCA) )
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
http://www.eclipse.org/legal/epl-v10.html
Authors:
Gregor Best, gbe@mail.upb.de
"""
import subprocess
class Controller(object):
name = None
params = None
def version(self):
return None
def install(self):
raise NotImplementedError()
def start(self):
raise NotImplementedError()
class RyuController(Controller):
name = "ryu"
params = "--ofp-tcp-listen-port={}"
def __init__(self, port, entrypoint):
self.port = port
self.entrypoint = entrypoint
def __str__(self):
return 'RyuController(port={}, entrypoint={})'.format(self.port, self.entrypoint)
def install(self):
# TODO?
pass
def version(self):
""" Returns either the version of the controller as a string or None if the controller is not installed"""
try:
v = subprocess.check_output(["ryu", "--version"], stderr=subprocess.STDOUT).decode("utf-8")
return v.strip().split(" ", 1)[1]
except subprocess.CalledProcessError:
return None
def start(self):
cmdline = ["sudo", "ryu-manager", self.params.format(self.port)]
cmdline.append(self.entrypoint)
print('Launching "{}" now'.format(cmdline))
return subprocess.Popen(cmdline).pid
| # coding=utf-8
"""
Copyright (c) 2015, NetIDE Consortium (Create-Net (CN), Telefonica Investigacion Y Desarrollo SA (TID), Fujitsu
Technology Solutions GmbH (FTS), Thales Communications & Security SAS (THALES), Fundacion Imdea Networks (IMDEA),
Universitaet Paderborn (UPB), Intel Research & Innovation Ireland Ltd (IRIIL), Fraunhofer-Institut für
Produktionstechnologie (IPT), Telcaria Ideas SL (TELCA) )
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
http://www.eclipse.org/legal/epl-v10.html
Authors:
Gregor Best, gbe@mail.upb.de
"""
import subprocess
class Controller(object):
cmd = None
name = None
params = None
def valid(self):
raise NotImplementedError()
def install(self):
raise NotImplementedError()
def start(self):
raise NotImplementedError()
class RyuController(Controller):
name = "ryu"
cmd = "ryu-manager"
params = "--ofp-tcp-listen-port={}"
def __init__(self, port, entrypoint):
self.port = port
self.entrypoint = entrypoint
def install(self):
# TODO?
pass
def valid(self):
# TODO: check if self.cmd exists
return True
def start(self):
cmdline = ["sudo", self.cmd, self.params.format(self.port)]
cmdline.append(self.entrypoint)
print('Launching "{}" now'.format(cmdline))
return subprocess.Popen(cmdline).pid
# return -1
| epl-1.0 | Python |
d2d743248a97f7045d50e29541bb513efeed3f44 | Add missing numpy import to pytools.io | carthach/essentia,MTG/essentia,carthach/essentia,carthach/essentia,MTG/essentia,MTG/essentia,carthach/essentia,carthach/essentia,MTG/essentia,MTG/essentia | src/python/essentia/pytools/io.py | src/python/essentia/pytools/io.py | # Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import essentia.standard
import tempfile
import numpy as np
def test_audiofile(filename=None, type='sin440', duration=1):
"""Create a dummy wav audio file.
This can be useful for tests that require loading audio files.
Args:
filename (string): Filename (default=None). If not specified, a
named temporary file will be created.
type (string): The type of audio to generate: 'silence' or 'sin440'
(default)
duration (float): duration of audio in seconds (default 1 sec.)
Returns:
(string): Name of a temporary audio file
"""
if type == 'sin440':
samples = np.sin(2*np.pi*np.arange(44100*duration)*440/44100).astype(np.float32)
elif type == 'silence':
samples = [0] * int(44100 * duration)
else:
raise (ValueError, 'Wrong audio type:', type)
if not filename:
tf = tempfile.NamedTemporaryFile(delete=False, suffix=".wav")
tf.close()
filename = tf.name
essentia.standard.MonoWriter(filename=filename)(samples)
return filename
| # Copyright (C) 2006-2016 Music Technology Group - Universitat Pompeu Fabra
#
# This file is part of Essentia
#
# Essentia is free software: you can redistribute it and/or modify it under
# the terms of the GNU Affero General Public License as published by the Free
# Software Foundation (FSF), either version 3 of the License, or (at your
# option) any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the Affero GNU General Public License
# version 3 along with this program. If not, see http://www.gnu.org/licenses/
import essentia.standard
import tempfile
def test_audiofile(filename=None, type='sin440', duration=1):
"""Create a dummy wav audio file.
This can be useful for tests that require loading audio files.
Args:
filename (string): Filename (default=None). If not specified, a
named temporary file will be created.
type (string): The type of audio to generate: 'silence' or 'sin440'
(default)
duration (float): duration of audio in seconds (default 1 sec.)
Returns:
(string): Name of a temporary audio file
"""
if type == 'sin440':
samples = np.sin(2*np.pi*np.arange(44100*duration)*440/44100).astype(np.float32)
elif type == 'silence':
samples = [0] * int(44100 * duration)
else:
raise (ValueError, 'Wrong audio type:', type)
if not filename:
tf = tempfile.NamedTemporaryFile(delete=False, suffix=".wav")
tf.close()
filename = tf.name
essentia.standard.MonoWriter(filename=filename)(samples)
return filename
| agpl-3.0 | Python |
939e449887a9354a43944acb6137fac4ff7a5daf | Print % completion in 'importdata' command | rafidka/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse,hadithhouse/hadithhouse,rafidka/hadithhouse,hadithhouse/hadithhouse | HadithHouseWebsite/hadiths/management/commands/importdata.py | HadithHouseWebsite/hadiths/management/commands/importdata.py | import codecs
import os
from django.core.management.base import BaseCommand, CommandError
from hadiths.models import Hadith, Book
from hadiths import initial_data
class Command(BaseCommand):
help = 'Import data to the website'
def add_arguments(self, parser):
parser.add_argument('dataname', nargs=1, type=str)
def handle(self, *args, **options):
data_name = options['dataname'][0]
if data_name == 'holyquran':
self.import_holy_quran()
else:
raise CommandError('Invalid data name specified: ' + data_name)
def import_holy_quran(self):
file_path = os.path.join(os.path.dirname(__file__), 'quran-uthmani.txt')
holy_quran = Book.objects.get(title=initial_data.holy_quran)
sura = None
# The total number of verses in the Holy Quran is 6236, excluding Basmalas
# at the beginning of Suras.
total_verse_count = 6236
perc, prev_perc = 0, 0
return
with codecs.open(file_path, 'r', 'utf-8') as file:
for i, line in enumerate(file):
if line.startswith('#') or line.isspace():
# Ignore comment and empty lines.
continue
try:
chapter, verse_no, verse = line.split('|')
if sura is None or sura.number != chapter:
sura = holy_quran.chapters.get(number=chapter)
h = Hadith.objects.get_or_create(text=verse, book=holy_quran, chapter=sura)
perc = int(i*100/total_verse_count)
if perc != prev_perc:
self.stdout.write(str(perc) + '%')
self.stdout.flush()
prev_perc = perc
except Exception as e:
self.style.SUC
self.stderr.write('Failed while processing the line: ' + line)
self.stderr.write('Exception was: ' + str(e))
self.stdout.flush()
| import codecs
import os
from django.core.management.base import BaseCommand, CommandError
from hadiths.models import Hadith, Book
from hadiths import initial_data
class Command(BaseCommand):
help = 'Import data to the website'
def add_arguments(self, parser):
parser.add_argument('dataname', nargs=1, type=str)
def handle(self, *args, **options):
data_name = options['dataname'][0]
if data_name == 'holyquran':
self.import_holy_quran()
else:
raise CommandError('Invalid data name specified: ' + data_name)
def import_holy_quran(self):
file_path = os.path.join(os.path.dirname(__file__), 'quran-uthmani.txt')
holy_quran = Book.objects.get(title=initial_data.holy_quran)
sura = None
with codecs.open(file_path, 'r', 'utf-8') as file:
for line in file:
if line.startswith('#') or line.isspace():
# Ignore comment and empty lines.
continue
chapter, verse_no, verse = line.split('|')
if sura is None or sura.number != chapter:
sura = holy_quran.chapters.get(number=chapter)
h = Hadith.objects.get_or_create(text=verse, book=holy_quran, chapter=sura)
| mit | Python |
86227c6cbe7003ad39af1342fd5fc03d577675d5 | Add an additional nbgrader update test | jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jupyter/nbgrader | nbgrader/tests/apps/test_nbgrader_update.py | nbgrader/tests/apps/test_nbgrader_update.py | from os.path import join
from .. import run_nbgrader
from .base import BaseTestApp
class TestNbGraderUpdate(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["update", "--help-all"])
def test_no_args(self):
"""Is there an error if no arguments are given?"""
run_nbgrader(["update"], retcode=1)
def test_update_assign(self, db, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""")
fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""")
self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
run_nbgrader(["assign", "ps1", "--db", db], retcode=1)
# now update the metadata
run_nbgrader(["update", course_dir])
# now assign should suceed
run_nbgrader(["assign", "ps1", "--db", db])
def test_update_autograde(self, db, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""")
fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""")
self._copy_file(join("files", "test.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
run_nbgrader(["assign", "ps1", "--db", db])
# autograde should fail on old metadata, too
self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
run_nbgrader(["autograde", "ps1", "--db", db], retcode=1)
# now update the metadata
run_nbgrader(["update", course_dir])
# now autograde should suceed
run_nbgrader(["autograde", "ps1", "--db", db])
def test_update_autograde_old_assign(self, db, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""")
fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""")
self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
run_nbgrader(["assign", "ps1", "--db", db, "--CheckCellMetadata.enabled=False"])
# autograde should fail on old metadata, too
self._copy_file(join(course_dir, "release", "ps1", "p1.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
run_nbgrader(["autograde", "ps1", "--db", db], retcode=1)
# now update the metadata
run_nbgrader(["update", join(course_dir, "submitted")])
# now autograde should suceed
run_nbgrader(["autograde", "ps1", "--db", db])
| from os.path import join
from .. import run_nbgrader
from .base import BaseTestApp
class TestNbGraderUpdate(BaseTestApp):
def test_help(self):
"""Does the help display without error?"""
run_nbgrader(["update", "--help-all"])
def test_no_args(self):
"""Is there an error if no arguments are given?"""
run_nbgrader(["update"], retcode=1)
def test_update(self, db, course_dir):
with open("nbgrader_config.py", "a") as fh:
fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""")
fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""")
self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb"))
run_nbgrader(["assign", "ps1", "--db", db], retcode=1)
# now update the metadata
run_nbgrader(["update", course_dir])
# now assign should suceed
run_nbgrader(["assign", "ps1", "--db", db])
# autograde should fail on old metadata, too
self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb"))
run_nbgrader(["autograde", "ps1", "--db", db], retcode=1)
# now update the metadata
run_nbgrader(["update", course_dir])
# now autograde should suceed
run_nbgrader(["autograde", "ps1", "--db", db])
| bsd-3-clause | Python |
cdf6ab90f77583c9a716afd9d751b417396fe976 | add SESSION_COOKIE_NAME setting | djaodjin/djaodjin-deployutils,djaodjin/djaodjin-deployutils,djaodjin/djaodjin-deployutils | deployutils/settings.py | deployutils/settings.py | # Copyright (c) 2014, Djaodjin Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Convenience module for access of deployutils app settings, which enforces
default settings when the main settings module does not contain
the appropriate settings.
In a production environment, the static resources (images, css, js) are served
directly by nginx from RESOURCES_ROOT. Furthermore the CMS pages are
served by one process while the app is served by another process. This requires
to install the templates from the app repo into the CMS template directory
(INSTALLED_TEMPLATES_ROOT) after the TemplateNodes related to the assets
pipeline have been resolved.
"""
from django.conf import settings
DRY_RUN = getattr(settings, 'DEPLOYUTILS_DRY_RUN', False)
DEPLOYED_WEBAPP_ROOT = getattr(settings, 'DEPLOYUTILS_DEPLOYED_WEBAPP_ROOT',
'/var/www/%s' % settings.ALLOWED_HOSTS[0])
DEPLOYED_SERVERS = getattr(settings, 'DEPLOYUTILS_DEPLOYED_SERVERS',
(settings.ALLOWED_HOSTS[0], ))
INSTALLED_APPS = getattr(settings, 'DEPLOYUTILS_INSTALLED_APPS',
settings.INSTALLED_APPS)
RESOURCES_ROOT = getattr(settings, 'DEPLOYUTILS_RESOURCES_ROOT',
settings.APP_ROOT + '/htdocs/')
if not RESOURCES_ROOT.endswith('/'):
RESOURCES_ROOT = RESOURCES_ROOT + '/'
INSTALLED_TEMPLATES_ROOT = getattr(settings,
'DEPLOYUTILS_INSTALLED_TEMPLATES_ROOT',
settings.TEMPLATE_DIRS[0])
RESOURCES_REMOTE_LOCATION = getattr(settings,
'DEPLOYUTILS_RESOURCES_REMOTE_LOCATION',
'git@%s:%s' % (settings.ALLOWED_HOSTS[0], DEPLOYED_WEBAPP_ROOT))
SESSION_COOKIE_NAME = 'sessionid'
DJAODJIN_SECRET_KEY = getattr(settings,
'DJAODJIN_SECRET_KEY', None)
| # Copyright (c) 2014, Djaodjin Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Convenience module for access of deployutils app settings, which enforces
default settings when the main settings module does not contain
the appropriate settings.
In a production environment, the static resources (images, css, js) are served
directly by nginx from RESOURCES_ROOT. Furthermore the CMS pages are
served by one process while the app is served by another process. This requires
to install the templates from the app repo into the CMS template directory
(INSTALLED_TEMPLATES_ROOT) after the TemplateNodes related to the assets
pipeline have been resolved.
"""
from django.conf import settings
DRY_RUN = getattr(settings, 'DEPLOYUTILS_DRY_RUN', False)
DEPLOYED_WEBAPP_ROOT = getattr(settings, 'DEPLOYUTILS_DEPLOYED_WEBAPP_ROOT',
'/var/www/%s' % settings.ALLOWED_HOSTS[0])
DEPLOYED_SERVERS = getattr(settings, 'DEPLOYUTILS_DEPLOYED_SERVERS',
(settings.ALLOWED_HOSTS[0], ))
INSTALLED_APPS = getattr(settings, 'DEPLOYUTILS_INSTALLED_APPS',
settings.INSTALLED_APPS)
RESOURCES_ROOT = getattr(settings, 'DEPLOYUTILS_RESOURCES_ROOT',
settings.APP_ROOT + '/htdocs/')
if not RESOURCES_ROOT.endswith('/'):
RESOURCES_ROOT = RESOURCES_ROOT + '/'
INSTALLED_TEMPLATES_ROOT = getattr(settings,
'DEPLOYUTILS_INSTALLED_TEMPLATES_ROOT',
settings.TEMPLATE_DIRS[0])
RESOURCES_REMOTE_LOCATION = getattr(settings,
'DEPLOYUTILS_RESOURCES_REMOTE_LOCATION',
'git@%s:%s' % (settings.ALLOWED_HOSTS[0], DEPLOYED_WEBAPP_ROOT))
DJAODJIN_SECRET_KEY = getattr(settings,
'DJAODJIN_SECRET_KEY', None)
| bsd-2-clause | Python |
8a5984f9b49c978e296a315ed62b52c92548636c | fix populate wikivoyage | whosken/destinate | destinate/wikivoyage.py | destinate/wikivoyage.py | import mwclient
import mwparserfromhell
HOST = 'en.wikivoyage.org'
def find_city(city_name):
return fetch(city_name)
def fetch(page_name):
page = site.Pages[page_name]
retun {
'guide': build_guide(page.text()),
'images': format_image(page.images())
}
def format_image(image):
return {
'url': image['url'],
'height': image['height'],
'width': image['width'],
}
def build_guide(code):
wikicode = mwparserfromhell.parse(code)
wikicode = filter_sections(wikicode)
return wikicode.strip_code()
should_ignore_sections = lambda h: h.lower() in ('get in', 'get around', 'go next', 'learn')
def filter_sections(code):
try:
invalid_sections = code.get_sections(
levels=[1,2],
matches=should_ignore_sections,
include_lead=True,
include_headings=True
)
for section in invalid_sections:
code.remove(section)
except IndexError:
pass
return code
class LazyClient(object):
_site = False
def __getattr__(self, name):
if not LazyClient._site:
LazyClient._site = mwclient.Site(HOST)
return getattr(LazyClient._site, name)
site = LazyClient()
| import mwclient
def find_city(city_name):
pass
| apache-2.0 | Python |
7758988978e5ea3de9020673cf77890036ca3b99 | allow blank columns | neuropower/neuropower,neuropower/neuropower,neuropower/neuropower,neuropower/neuropower | neuropower/apps/neuropowertoolbox/models.py | neuropower/apps/neuropowertoolbox/models.py | from __future__ import unicode_literals
from django.db import models
from picklefield.fields import PickledObjectField
import os
from django.conf import settings
#import tempfile
#temp_dir = tempfile.gettempdir()
class NeuropowerModel(models.Model):
SID = models.CharField(max_length=300,default="")
step = models.IntegerField(default=0) #0 = nothing, 1 = parameters, 2 = peaktable done, 3 = model fit, 4 = powertable done
map_url = models.URLField(default="")
mask_url = models.URLField(default="")
map_local = models.CharField(max_length=300,default="")
mask_local = models.CharField(max_length=300,default="")
peaktable = PickledObjectField(default="")
location = models.CharField(max_length=300,default="")
spmfile = models.FileField(upload_to=os.path.join(settings.MEDIA_ROOT,"maps"),default="")
masklocation = models.CharField(max_length=300,default="")
maskfile = models.FileField(upload_to=os.path.join(settings.MEDIA_ROOT,"maps"),default="")
nvox = models.CharField(max_length=300,default="")
ZorT_c = (("Z","Z"),("T","T"))
Samples_c = ((1, ("One-sample")),(2, ("Two-sample")))
ZorT = models.CharField(max_length=10,choices=ZorT_c)
Exc = models.FloatField(default=0,blank=True)
ExcZ = models.FloatField(default=0,blank=True)
DoF = models.FloatField(default=0,blank=True)
Subj = models.IntegerField()
alpha = models.FloatField(default=0,blank=True)
Samples = models.IntegerField(choices=Samples_c)
SmoothEst_c = ((1,"Manual"),(2,"Estimate"))
SmoothEst = models.IntegerField(choices=SmoothEst_c,default=1)
Smoothx = models.FloatField(default=0,blank=True)
Smoothy = models.FloatField(default=0,blank=True)
Smoothz = models.FloatField(default=0,blank=True)
Voxx = models.FloatField(default=0,blank=True)
Voxy = models.FloatField(default=0,blank=True)
Voxz = models.FloatField(default=0,blank=True)
data = PickledObjectField(default="")
err = models.CharField(max_length=1000,default="")
pi1 = models.FloatField(default=0,blank=True)
a = models.FloatField(default=0,blank=True)
mu = models.FloatField(default=0,blank=True)
sigma = models.FloatField(default=0,blank=True)
data = PickledObjectField(default="")
reqPow = models.FloatField(default=0,blank=True)
reqSS = models.IntegerField(default=0,null=True, blank=True)
MCP_c = (("RFT", "Random Field Theory"),("BH", "Benjamini-Hochberg"),("BF","Bonferroni"),("UN","Uncorrected"))
MCP = models.CharField(max_length=10,choices=MCP_c)
def __unicode__(self):
return "<PowerModel:%s>" %self.SID
| from __future__ import unicode_literals
from django.db import models
from picklefield.fields import PickledObjectField
import os
from django.conf import settings
#import tempfile
#temp_dir = tempfile.gettempdir()
class NeuropowerModel(models.Model):
SID = models.CharField(max_length=300,default="")
step = models.IntegerField(default=0) #0 = nothing, 1 = parameters, 2 = peaktable done, 3 = model fit, 4 = powertable done
map_url = models.URLField(default="")
mask_url = models.URLField(default="")
map_local = models.CharField(max_length=300,default="")
mask_local = models.CharField(max_length=300,default="")
peaktable = PickledObjectField(default="")
location = models.CharField(max_length=300,default="")
spmfile = models.FileField(upload_to=os.path.join(settings.MEDIA_ROOT,"maps"),default="")
masklocation = models.CharField(max_length=300,default="")
maskfile = models.FileField(upload_to=os.path.join(settings.MEDIA_ROOT,"maps"),default="")
nvox = models.CharField(max_length=300,default="")
ZorT_c = (("Z","Z"),("T","T"))
Samples_c = ((1, ("One-sample")),(2, ("Two-sample")))
ZorT = models.CharField(max_length=10,choices=ZorT_c)
Exc = models.FloatField(default=0)
ExcZ = models.FloatField(default=0)
DoF = models.FloatField(default=0)
Subj = models.IntegerField()
alpha = models.FloatField(default=0)
Samples = models.IntegerField(choices=Samples_c)
SmoothEst_c = ((1,"Manual"),(2,"Estimate"))
SmoothEst = models.IntegerField(choices=SmoothEst_c,default=1)
Smoothx = models.FloatField(default=0)
Smoothy = models.FloatField(default=0)
Smoothz = models.FloatField(default=0)
Voxx = models.FloatField(default=0)
Voxy = models.FloatField(default=0)
Voxz = models.FloatField(default=0)
data = PickledObjectField(default="")
err = models.CharField(max_length=1000,default="")
pi1 = models.FloatField(default=0)
a = models.FloatField(default=0)
mu = models.FloatField(default=0)
sigma = models.FloatField(default=0)
data = PickledObjectField(default="")
reqPow = models.FloatField(default=0)
reqSS = models.IntegerField(default=0,null=True, blank=True)
MCP_c = (("RFT", "Random Field Theory"),("BH", "Benjamini-Hochberg"),("BF","Bonferroni"),("UN","Uncorrected"))
MCP = models.CharField(max_length=10,choices=MCP_c)
def __unicode__(self):
return "<PowerModel:%s>" %self.SID
| mit | Python |
cc3863493d77f60fae78e3d13bc8205a673cdafd | fix import operators for Arriva North East et al | stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,stev-0/bustimes.org.uk,jclgoodwin/bustimes.org.uk | busstops/management/commands/import_operators.py | busstops/management/commands/import_operators.py | """
Usage:
./manage.py import_operators < NOC_db.csv
"""
import sys
import csv
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def row_to_operator(row):
"Given a CSV row (a list), returns an Operator object"
id = row[0].replace('=', '').replace('\'', '')
region_id = row[12].replace('Admin', 'GB').replace('ADMIN', 'GB').replace('SC', 'S').replace('YO', 'Y').replace('WA', 'W').replace('LO', 'L')
region = Region.objects.get(id=region_id)
if row[1] in ('First', 'Arriva', 'Stagecoach') or row[1][:4] == 'inc.':
# uninformative OperatorPublicName
name = row[2] # OperatorReferenceName
else:
name = row[1] # OperatorPublicName
operator = Operator(
id=id,
name=name.strip(),
vehicle_mode=row[13],
parent=row[16],
region=region,
)
return operator
def handle(self, *args, **options):
reader = csv.reader(sys.stdin)
next(reader) # skip past header
for row in reader:
operator = self.row_to_operator(row)
operator.save()
| """
Usage:
./manage.py import_operators < NOC_db.csv
"""
import sys
import csv
from django.core.management.base import BaseCommand
from busstops.models import Operator, Region
class Command(BaseCommand):
@staticmethod
def row_to_operator(row):
"Given a CSV row (a list), returns an Operator object"
id = row[0].replace('=', '').replace('\'', '')
region_id = row[12].replace('Admin', 'GB').replace('ADMIN', 'GB').replace('SC', 'S').replace('YO', 'Y').replace('WA', 'W').replace('LO', 'L')
region = Region.objects.get(id=region_id)
if row[1] in ('First', 'Arriva', 'Stagecoach') or row[:4] == 'inc.':
# uninformative OperatorPublicName
name = row[2] # OperatorReferenceName
else:
name = row[1] # OperatorPublicName
operator = Operator(
id=id,
name=name.strip(),
vehicle_mode=row[13],
parent=row[16],
region=region,
)
return operator
def handle(self, *args, **options):
reader = csv.reader(sys.stdin)
next(reader) # skip past header
for row in reader:
operator = self.row_to_operator(row)
operator.save()
| mpl-2.0 | Python |
f2ef01e183e9775f05f4c897c2c29248f5c6c927 | exclude data no longer used. | UWIT-IAM/uw-restclients,uw-it-cte/uw-restclients,UWIT-IAM/uw-restclients,uw-it-cte/uw-restclients,uw-it-cte/uw-restclients,uw-it-aca/uw-restclients,UWIT-IAM/uw-restclients,uw-it-aca/uw-restclients | restclients/models/iasystem.py | restclients/models/iasystem.py | from django.db import models
class Evaluation(models.Model):
section_sln = models.IntegerField(max_length=5)
eval_open_date = models.DateTimeField()
eval_close_date = models.DateTimeField()
eval_status = models.CharField(max_length=7)
eval_url = models.URLField()
def __init__(self, *args, **kwargs):
super(Evaluation, self).__init__(*args, **kwargs)
self.instructor_ids = []
def __str__(self):
return "{%s: %d, %s: %s, %s: %s, %s: %s}" % (
"sln", self.section_sln,
"eval_open_date", self.eval_open_date,
"eval_close_date", self.eval_close_date,
"eval_url", self.eval_url)
| from django.db import models
class Evaluation(models.Model):
section_sln = models.IntegerField(max_length=5)
eval_open_date = models.DateTimeField()
eval_close_date = models.DateTimeField()
eval_status = models.CharField(max_length=7)
eval_is_online = models.BooleanField(default=False)
eval_url = models.URLField()
def __init__(self, *args, **kwargs):
super(Evaluation, self).__init__(*args, **kwargs)
self.instructor_ids = []
def __str__(self):
return "{sln: %d, eval_is_online: %s, status: %s}" % (
self.section_sln, self.eval_is_online, self.eval_status)
| apache-2.0 | Python |
8372543f62f0fd01b8661f4d6e64503cdd78e5ea | remove RPC | XertroV/opreturn-ninja,XertroV/opreturn-ninja,XertroV/opreturn-ninja | opreturnninja/views.py | opreturnninja/views.py | import json
import random
from pyramid.view import view_config
from .constants import ELECTRUM_SERVERS
from bitcoin.rpc import RawProxy, DEFAULT_USER_AGENT
import socket
@view_config(route_name='api', renderer='json')
def api_view(request):
global rpc
assert hasattr(request, 'json_body')
assert 'method' in request.json_body and 'params' in request.json_body
method = request.json_body['method']
params = request.json_body['params']
assert type(params) == list
if method == 'sendrawtransaction':
assert len(params) == 1
sent = False
while not sent:
try:
s = socket.create_connection(random.choice(list(ELECTRUM_SERVERS.items())))
s.send(b'{"id":"0", "method":"blockchain.transaction.broadcast", "params":["' + params[0].encode() + b'"]}\n')
r = {'result': s.recv(1024)[:-1].decode(), 'error': None, 'id': request.json_body['id']} # the slice is to remove the trailing new line
print(r)
return r
except ConnectionRefusedError as e:
print(e)
except socket.gaierror as e:
print(e)
return {
'result': None,
'error': 'RPC Request Unknown',
'id': request.json_body['id'],
}
@view_config(route_name='index', renderer='templates/index.pt')
def index_view(request):
return {} | import json
import random
from pyramid.view import view_config
from .constants import ELECTRUM_SERVERS
from bitcoin.rpc import RawProxy, DEFAULT_USER_AGENT
import socket
class RPC(RawProxy):
def passJson(self, json_to_dump):
self.__dict__['_RawProxy__conn'].request('POST', self.__dict__['_RawProxy__url'].path, json.dumps(json_to_dump),
{'Host': self.__dict__['_RawProxy__url'].hostname,
'User-Agent': DEFAULT_USER_AGENT,
'Authorization': self.__dict__['_RawProxy__auth_header'],
'Content-type': 'application/json'})
return self._get_response()
def __getattr__(self, name):
if name == '__conn':
return self.__conn
return RawProxy.__getattr__(self, name)
rpc = RPC()
def setupRpc():
global rpc
rpc = RPC()
setupRpc()
@view_config(route_name='api', renderer='json')
def api_view(request):
global rpc
assert hasattr(request, 'json_body')
assert 'method' in request.json_body and 'params' in request.json_body
method = request.json_body['method']
params = request.json_body['params']
assert type(params) == list
if method == 'sendrawtransaction':
assert len(params) == 1
sent = False
while not sent:
try:
s = socket.create_connection(random.choice(list(ELECTRUM_SERVERS.items())))
s.send(b'{"id":"0", "method":"blockchain.transaction.broadcast", "params":["' + params[0].encode() + b'"]}\n')
r = {'result': s.recv(1024)[:-1].decode(), 'error': None, 'id': request.json_body['id']} # the slice is to remove the trailing new line
print(r)
return r
except ConnectionRefusedError as e:
print(e)
except socket.gaierror as e:
print(e)
return {
'result': None,
'error': 'RPC Request Unknown',
'id': request.json_body['id'],
}
@view_config(route_name='index', renderer='templates/index.pt')
def index_view(request):
return {} | mit | Python |
2abb2227fb4f8c6d0cb855724a1f8d1380fc158c | Use open with | hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem,hhucn/netsec-uebungssystem | netsecus/korrekturtools.py | netsecus/korrekturtools.py | from __future__ import unicode_literals
import os
from . import helper
def readStatus(student):
student = student.lower()
path = helper.getConfigValue("settings", "attachment_path")
if not os.path.exists(path):
return
path = os.path.join(path, student)
if not os.path.exists(path):
return "Student ohne Abgabe"
path = os.path.join(path, "korrekturstatus.txt")
if not os.path.exists(path):
return "Unbearbeitet"
statusfile = open(path, "r")
status = statusfile.read()
statusfile.close()
return status
def writeStatus(student, status):
student = student.lower()
status = status.lower()
path = os.path.join(helper.getConfigValue("settings", "attachment_path"), student)
if not os.path.exists(path):
logging.error("Requested student '%s' hasn't submitted anything yet.")
return
path = os.path.join(path, "korrekturstatus.txt")
with open(path, "w") as statusfile:
statusfile.write(status)
| from __future__ import unicode_literals
import os
from . import helper
def readStatus(student):
student = student.lower()
path = helper.getConfigValue("settings", "attachment_path")
if not os.path.exists(path):
return
path = os.path.join(path, student)
if not os.path.exists(path):
return "Student ohne Abgabe"
path = os.path.join(path, "korrekturstatus.txt")
if not os.path.exists(path):
return "Unbearbeitet"
statusfile = open(path, "r")
status = statusfile.read()
statusfile.close()
return status
def writeStatus(student, status):
student = student.lower()
status = status.lower()
path = os.path.join(helper.getConfigValue("settings", "attachment_path"), student)
if not os.path.exists(path):
logging.error("Requested student '%s' hasn't submitted anything yet.")
return
path = os.path.join(path, "korrekturstatus.txt")
statusfile = open(path, "w")
statusfile.write(status)
statusfile.close()
| mit | Python |
c3878e39e97dd2390c3be3dd0229b3dbb4f0c885 | Add walls to the map. | codypiersall/platformer | structured_mainloop.py | structured_mainloop.py | import pygame
SCREEN_SIZE = (640, 480)
class Player(pygame.sprite.Sprite):
SPEED = 200
def __init__(self, *groups):
super().__init__(*groups)
self.image = pygame.image.load('frog.gif')
self.rect = pygame.rect.Rect((320, 240), self.image.get_size())
def update(self, dt, game):
# last position
last = self.rect.copy()
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
self.rect.x -= self.SPEED * dt
if key[pygame.K_RIGHT]:
self.rect.x += self.SPEED * dt
if key[pygame.K_UP]:
self.rect.y -= self.SPEED * dt
if key[pygame.K_DOWN]:
self.rect.y += self.SPEED * dt
for cell in pygame.sprite.spritecollide(self, game.walls, False):
self.rect = last
class Game():
def main(self, screen):
clock = pygame.time.Clock()
background = pygame.image.load('Castle.gif')
background = pygame.transform.scale(background, SCREEN_SIZE)
sprites = pygame.sprite.Group()
self.player = Player(sprites)
self.walls = pygame.sprite.Group()
block = pygame.image.load('Chest.gif')
for x in range(0, 640, 32):
for y in range(0, 480, 32):
if x in (0, 640-32) or y in (0, 480-32):
wall = pygame.sprite.Sprite(self.walls)
wall.image = block
wall.rect = pygame.rect.Rect((x,y), block.get_size())
while True:
dt = clock.tick(30) / 1000
for event in pygame.event.get():
if event.type == pygame.QUIT:
return
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
return
sprites.update(dt, self)
screen.blit(background, (0,0))
sprites.draw(screen)
pygame.display.flip()
if __name__ == '__main__':
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
game = Game()
game.main(screen) | import pygame
SCREEN_SIZE = (640, 480)
class Player(pygame.sprite.Sprite):
SPEED = 300
def __init__(self, *groups):
super().__init__(*groups)
self.image = pygame.image.load('frog.gif')
self.rect = pygame.rect.Rect((320, 240), self.image.get_size())
def update(self, dt):
key = pygame.key.get_pressed()
if key[pygame.K_LEFT]:
self.rect.x -= self.SPEED * dt
if key[pygame.K_RIGHT]:
self.rect.x += self.SPEED * dt
if key[pygame.K_UP]:
self.rect.y -= self.SPEED * dt
if key[pygame.K_DOWN]:
self.rect.y += self.SPEED * dt
class Game():
def main(self, screen):
clock = pygame.time.Clock()
background = pygame.image.load('Castle.gif')
background = pygame.transform.scale(background, SCREEN_SIZE)
sprites = pygame.sprite.Group()
self.player = Player(sprites)
while True:
dt = clock.tick(30) / 1000
for event in pygame.event.get():
if event.type == pygame.QUIT:
return
if event.type == pygame.KEYDOWN and event.key == pygame.K_ESCAPE:
return
sprites.update(dt)
screen.blit(background, (0,0))
sprites.draw(screen)
pygame.display.flip()
if __name__ == '__main__':
pygame.init()
screen = pygame.display.set_mode(SCREEN_SIZE)
game = Game()
game.main(screen) | bsd-3-clause | Python |
0edb45d851d7882ee1b9843e04f1d8fd3ff0abe4 | Add Pool to subliminal | Diaoul/subliminal,neo1691/subliminal,getzze/subliminal,SickRage/subliminal,hpsbranco/subliminal,kbkailashbagaria/subliminal,ratoaq2/subliminal,h3llrais3r/subliminal,ravselj/subliminal,fernandog/subliminal,ofir123/subliminal,juanmhidalgo/subliminal,nvbn/subliminal,bogdal/subliminal,Elettronik/subliminal,t4lwh/subliminal,pums974/subliminal,oxan/subliminal,goll/subliminal | subliminal/__init__.py | subliminal/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .api import list_subtitles, download_subtitles
from .async import Pool
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['list_subtitles', 'download_subtitles', 'Pool']
logging.getLogger(__name__).addHandler(NullHandler())
| # -*- coding: utf-8 -*-
# Copyright 2011-2012 Antoine Bertin <diaoulael@gmail.com>
#
# This file is part of subliminal.
#
# subliminal is free software; you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# subliminal is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with subliminal. If not, see <http://www.gnu.org/licenses/>.
from .api import list_subtitles, download_subtitles
from .infos import __version__
import logging
try:
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
__all__ = ['list_subtitles', 'download_subtitles']
logging.getLogger(__name__).addHandler(NullHandler())
| mit | Python |
f0ad9568c907eac93662357482db346928bddb95 | add verify ssl option to global configuration schema. | polyaxon/polyaxon,polyaxon/polyaxon,polyaxon/polyaxon | polyaxon_cli/schemas/global_configuration.py | polyaxon_cli/schemas/global_configuration.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from marshmallow import fields
from polyaxon_cli.schemas.base import BaseConfig, BaseSchema
class GlobalConfigurationSchema(BaseSchema):
verbose = fields.Bool(allow_none=True)
host = fields.Str(allow_none=True)
http_port = fields.Str(allow_none=True)
ws_port = fields.Str(allow_none=True)
use_https = fields.Bool(allow_none=True)
verify_ssl = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return GlobalConfigurationConfig
class GlobalConfigurationConfig(BaseConfig):
SCHEMA = GlobalConfigurationSchema
IDENTIFIER = 'global'
def __init__(self,
verbose=False,
host='localhost',
http_port=80,
ws_port=80,
use_https=False,
verify_ssl=True):
self.verbose = verbose
self.host = host
self.http_port = str(http_port)
self.ws_port = str(ws_port)
self.use_https = use_https
self.verify_ssl = verify_ssl
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
from marshmallow import fields
from polyaxon_cli.schemas.base import BaseConfig, BaseSchema
class GlobalConfigurationSchema(BaseSchema):
verbose = fields.Bool(allow_none=True)
host = fields.Str(allow_none=True)
http_port = fields.Str(allow_none=True)
ws_port = fields.Str(allow_none=True)
use_https = fields.Bool(allow_none=True)
@staticmethod
def schema_config():
return GlobalConfigurationConfig
class GlobalConfigurationConfig(BaseConfig):
SCHEMA = GlobalConfigurationSchema
IDENTIFIER = 'global'
def __init__(self,
verbose=False,
host='localhost',
http_port=80,
ws_port=80,
use_https=False):
self.verbose = verbose
self.host = host
self.http_port = str(http_port)
self.ws_port = str(ws_port)
self.use_https = use_https
| apache-2.0 | Python |
40c61b8dce69a54d8e8a019615ba3e26c089d7d7 | use substance-box 1.0 as default box | turbulent/substance,turbulent/substance | substance/constants.py | substance/constants.py |
class Constants(object):
class ConstError(TypeError):
pass
def __init__(self, **kwargs):
for name, value in list(kwargs.items()):
super(Constants, self).__setattr__(name, value)
def __setattr__(self, name, value):
if name in self.__dist__:
raise self.ConstError("Can't rebind const(%s)" % name)
self.__dict__[name] = value
def __delattr__(self, name):
if name in self.__dict__:
raise self.ConstError("Can't unbind const(%s)" % name)
raise NameError(name)
Tables = Constants(
BOXES="boxes"
)
DefaultEngineBox = 'turbulent/substance-box:1.0'
EngineStates = Constants(
RUNNING="running",
STOPPED="stopped",
SUSPENDED="suspended",
UNKNOWN="unknown",
INEXISTENT="inexistent"
)
Syncher = Constants(
UP=">>",
DOWN="<<",
BOTH="<>"
)
|
class Constants(object):
class ConstError(TypeError):
pass
def __init__(self, **kwargs):
for name, value in list(kwargs.items()):
super(Constants, self).__setattr__(name, value)
def __setattr__(self, name, value):
if name in self.__dist__:
raise self.ConstError("Can't rebind const(%s)" % name)
self.__dict__[name] = value
def __delattr__(self, name):
if name in self.__dict__:
raise self.ConstError("Can't unbind const(%s)" % name)
raise NameError(name)
Tables = Constants(
BOXES="boxes"
)
DefaultEngineBox = 'turbulent/substance-box:0.7'
EngineStates = Constants(
RUNNING="running",
STOPPED="stopped",
SUSPENDED="suspended",
UNKNOWN="unknown",
INEXISTENT="inexistent"
)
Syncher = Constants(
UP=">>",
DOWN="<<",
BOTH="<>"
)
| apache-2.0 | Python |
c84877afceed7a0eddf10ac62be86ec0027541ae | Use skipFields method in the expando base logic. | rhyolight/nupic.son,rhyolight/nupic.son,rhyolight/nupic.son | app/soc/logic/models/expando_base.py | app/soc/logic/models/expando_base.py | #!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers functions for updating different kinds of Expando models.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from soc.logic.models import base
class Logic(base.Logic):
"""Base logic for Expando entity classes.
"""
def __init__(self, model, base_model=None, scope_logic=None,
name=None, skip_properties=None, id_based=False):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(model=model, base_model=base_model,
scope_logic=scope_logic, name=name,
skip_properties=skip_properties,
id_based=id_based)
def updateEntityProperties(self, entity, entity_properties, silent=False,
store=True):
"""Update existing entity using supplied properties.
Overwrites base because of Expando properties.
Args:
entity: a model entity
entity_properties: keyword arguments that correspond to entity
properties and their values
silent: iff True does not call _onUpdate method
store: iff True updated entity is actually stored in the data model
Returns:
The original entity with any supplied properties changed.
"""
if not entity:
raise NoEntityError
if not entity_properties:
raise InvalidArgumentError
for name, value in entity_properties.iteritems():
# if the property is not to be updated, skip it
if self.skipField(name):
continue
if self._updateField(entity, entity_properties, name):
setattr(entity, name, value)
if store:
entity.put()
# call the _onUpdate method
if not silent:
self._onUpdate(entity)
return entity
| #!/usr/bin/python2.5
#
# Copyright 2009 the Melange authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helpers functions for updating different kinds of Expando models.
"""
__authors__ = [
'"Lennard de Rijk" <ljvderijk@gmail.com>',
]
from soc.logic.models import base
class Logic(base.Logic):
"""Base logic for Expando entity classes.
"""
def __init__(self, model, base_model=None, scope_logic=None,
name=None, skip_properties=None, id_based=False):
"""Defines the name, key_name and model for this entity.
"""
super(Logic, self).__init__(model=model, base_model=base_model,
scope_logic=scope_logic, name=name,
skip_properties=skip_properties,
id_based=id_based)
def updateEntityProperties(self, entity, entity_properties, silent=False,
store=True):
"""Update existing entity using supplied properties.
Overwrites base because of Expando properties.
Args:
entity: a model entity
entity_properties: keyword arguments that correspond to entity
properties and their values
silent: iff True does not call _onUpdate method
store: iff True updated entity is actually stored in the data model
Returns:
The original entity with any supplied properties changed.
"""
if not entity:
raise NoEntityError
if not entity_properties:
raise InvalidArgumentError
for name, value in entity_properties.iteritems():
# if the property is not to be updated, skip it
if name in self._skip_properties:
continue
if self._updateField(entity, entity_properties, name):
setattr(entity, name, value)
if store:
entity.put()
# call the _onUpdate method
if not silent:
self._onUpdate(entity)
return entity
| apache-2.0 | Python |
443d435076858cdd63c9e908c606e26e120fa01e | Change celery beat schedule definition. | mozilla/mozillians,mozilla/mozillians,mozilla/mozillians,johngian/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,akatsoulas/mozillians,johngian/mozillians,mozilla/mozillians,johngian/mozillians,johngian/mozillians | mozillians/celery.py | mozillians/celery.py | from __future__ import absolute_import
import os
from celery import Celery as BaseCelery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mozillians.settings')
from django.conf import settings # noqa
RUN_DAILY = 60 * 60 * 24
RUN_HOURLY = 60 * 60
RUN_EVERY_SIX_HOURS = 6 * 60 * 60
class Celery(BaseCelery):
def on_configure(self):
from raven.contrib.celery import register_signal, register_logger_signal
from raven.contrib.django.raven_compat.models import client as raven_client
register_logger_signal(raven_client)
register_signal(raven_client)
app = Celery('mozillians')
app.add_defaults({
'worker_hijack_root_logger': False,
})
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
app.conf.beat_schedule = {
'celery-healthcheck': {
'task': 'mozillians.common.tasks.celery_healthcheck',
'schedule': RUN_HOURLY,
'args': ()
},
'invalidate-group-membership': {
'task': 'mozillians.groups.tasks.invalidate_group_membership',
'schedule': RUN_DAILY,
'args': ()
},
'notify-membership-renewal': {
'task': 'mozillians.groups.tasks.notify_membership_renewal',
'schedule': RUN_DAILY,
'args': ()
},
'delete-reported-spam-accounts': {
'task': 'mozillians.users.tasks.delete_reported_spam_accounts',
'schedule': RUN_DAILY,
'args': ()
},
'periodically-send_cis_data': {
'task': 'mozillians.users.tasks.periodically_send_cis_data',
'schedule': RUN_EVERY_SIX_HOURS,
'args': ()
},
'remove-incomplete-accounts': {
'task': 'mozillians.users.tasks.remove_incomplete_accounts',
'schedule': RUN_HOURLY,
'args': ()
}
}
| from __future__ import absolute_import
import os
from celery import Celery as BaseCelery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mozillians.settings')
from django.conf import settings # noqa
RUN_DAILY = 60 * 60 * 24
RUN_HOURLY = 60 * 60
RUN_EVERY_SIX_HOURS = 6 * 60 * 60
class Celery(BaseCelery):
def on_configure(self):
from raven.contrib.celery import register_signal, register_logger_signal
from raven.contrib.django.raven_compat.models import client as raven_client
register_logger_signal(raven_client)
register_signal(raven_client)
app = Celery('mozillians')
app.add_defaults({
'worker_hijack_root_logger': False,
})
app.config_from_object('django.conf:settings', namespace='CELERY')
app.autodiscover_tasks()
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
from mozillians.groups.tasks import invalidate_group_membership, notify_membership_renewal
from mozillians.users.tasks import (delete_reported_spam_accounts, periodically_send_cis_data,
remove_incomplete_accounts)
from mozillians.common.tasks import celery_healthcheck
sender.add_periodic_task(RUN_DAILY, invalidate_group_membership.s(),
name='invalidate-group-membership')
sender.add_periodic_task(RUN_DAILY, notify_membership_renewal.s(),
name='notify-membership-renewal')
sender.add_periodic_task(RUN_DAILY, delete_reported_spam_accounts.s(),
name='delete-reported-spam-accounts')
sender.add_periodic_task(RUN_HOURLY, celery_healthcheck.s(),
name='celery-healthcheck')
sender.add_periodic_task(RUN_EVERY_SIX_HOURS, periodically_send_cis_data.s(),
name='periodically-send-cis-data')
sender.add_periodic_task(RUN_HOURLY, remove_incomplete_accounts.s(),
name='remove-incomplete-accounts')
| bsd-3-clause | Python |
23b76d2aace3b64487c4715cbc2371c5eb7b80f4 | Fix royalroadl.com chapter dates | kemayo/leech,Zomega/leech | sites/royalroad.py | sites/royalroad.py | #!/usr/bin/python
import http.client
import logging
import datetime
import re
import urllib
from . import register, Site, Section, Chapter
logger = logging.getLogger(__name__)
@register
class RoyalRoad(Site):
"""Royal Road: a place where people write novels, mostly seeming to be light-novel in tone."""
@staticmethod
def matches(url):
# e.g. https://royalroadl.com/fiction/6752/lament-of-the-fallen
match = re.match(r'^(https?://(?:www\.)?royalroadl\.com/fiction/\d+)/?.*', url)
if match:
return match.group(1) + '/'
def extract(self, url):
workid = re.match(r'^https?://(?:www\.)?royalroadl\.com/fiction/(\d+)/?.*', url).group(1)
soup = self._soup('https://www.royalroadl.com/fiction/{}'.format(workid))
# should have gotten redirected, for a valid title
original_maxheaders = http.client._MAXHEADERS
http.client._MAXHEADERS = 1000
story = Section(
title=soup.find('h1', property='name').string.strip(),
author=soup.find('meta', property='books:author').get('content').strip(),
url=soup.find('meta', property='og:url').get('content').strip()
)
for chapter in soup.select('#chapters tbody tr[data-url]'):
chapter_url = str(urllib.parse.urljoin(story.url, str(chapter.get('data-url'))))
# Have to get exact publishing time from the chapter page
chapter_soup = self._soup(chapter_url)
updated = datetime.datetime.fromtimestamp(
int(chapter_soup.find(class_="profile-info").find('time').get('unixtime')),
)
story.add(Chapter(title=chapter.find('a', href=True).string.strip(), contents=self._chapter(chapter_url), date=updated))
http.client._MAXHEADERS = original_maxheaders
return story
def _chapter(self, url):
logger.info("Extracting chapter @ %s", url)
soup = self._soup(url)
content = soup.find('div', class_='chapter-content')
# TODO: this could be more robust, and I don't know if there's post-chapter notes anywhere as well.
author_note = soup.find('div', class_='author-note-portlet')
return (author_note and (author_note.prettify() + '<hr/>') or '') + content.prettify()
| #!/usr/bin/python
import http.client
import logging
import datetime
import re
import urllib
from . import register, Site, Section, Chapter
logger = logging.getLogger(__name__)
@register
class RoyalRoad(Site):
"""Royal Road: a place where people write novels, mostly seeming to be light-novel in tone."""
@staticmethod
def matches(url):
# e.g. https://royalroadl.com/fiction/6752/lament-of-the-fallen
match = re.match(r'^(https?://(?:www\.)?royalroadl\.com/fiction/\d+)/?.*', url)
if match:
return match.group(1) + '/'
def extract(self, url):
workid = re.match(r'^https?://(?:www\.)?royalroadl\.com/fiction/(\d+)/?.*', url).group(1)
soup = self._soup('https://www.royalroadl.com/fiction/{}'.format(workid))
# should have gotten redirected, for a valid title
original_maxheaders = http.client._MAXHEADERS
http.client._MAXHEADERS = 1000
story = Section(
title=soup.find('h1', property='name').string.strip(),
author=soup.find('meta', property='books:author').get('content').strip(),
url=soup.find('meta', property='og:url').get('content').strip()
)
for chapter in soup.select('#chapters tbody tr[data-url]'):
chapter_url = str(urllib.parse.urljoin(story.url, str(chapter.get('data-url'))))
updated = datetime.datetime.fromtimestamp(
int(chapter.find('time').get('unixtime')),
)
story.add(Chapter(title=chapter.find('a', href=True).string.strip(), contents=self._chapter(chapter_url), date=updated))
http.client._MAXHEADERS = original_maxheaders
return story
def _chapter(self, url):
logger.info("Extracting chapter @ %s", url)
soup = self._soup(url)
content = soup.find('div', class_='chapter-content')
# TODO: this could be more robust, and I don't know if there's post-chapter notes anywhere as well.
author_note = soup.find('div', class_='author-note-portlet')
return (author_note and (author_note.prettify() + '<hr/>') or '') + content.prettify()
| mit | Python |
7a59903500bd766cd51aca28f679c846bc59389b | read url parameter per request | EndPointCorp/appctl,EndPointCorp/appctl | catkin/src/portal_config/scripts/serve_config.py | catkin/src/portal_config/scripts/serve_config.py | #!/usr/bin/env python
import rospy
import urllib2
from portal_config.srv import *
NODE_NAME = 'portal_config'
SRV_QUERY = '/'.join(('', NODE_NAME, 'query'))
PARAM_URL = '~url'
DEFAULT_URL = 'http://lg-head/portal/config.json'
# XXX TODO: return an error if the config file isn't valid JSON
class ConfigRequestHandler():
def get_url(self):
return rospy.get_param(PARAM_URL, DEFAULT_URL)
def get_config(self):
url = self.get_url()
response = urllib2.urlopen(url)
return response.read()
def handle_request(self, request):
config = self.get_config()
return PortalConfigResponse(config)
def main():
rospy.init_node(NODE_NAME)
#if not rospy.has_param(PARAM_URL):
# rospy.set_param(PARAM_URL, DEFAULT_URL)
handler = ConfigRequestHandler()
s = rospy.Service(
SRV_QUERY,
PortalConfig,
handler.handle_request
)
rospy.spin()
if __name__ == '__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 smartindent
| #!/usr/bin/env python
import rospy
import urllib2
from portal_config.srv import *
# XXX TODO: return an error if the config file isn't valid JSON
class ConfigRequestHandler():
def __init__(self, url):
self.url = url
def get_config(self):
response = urllib2.urlopen(self.url)
return response.read()
def handle_request(self, request):
config = self.get_config()
return PortalConfigResponse(config)
def main():
rospy.init_node('portal_config')
url = rospy.get_param('~url', 'http://lg-head/portal/config.json')
handler = ConfigRequestHandler(url)
s = rospy.Service(
'/portal_config/query',
PortalConfig,
handler.handle_request
)
rospy.spin()
if __name__ == '__main__':
main()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4 smartindent
| apache-2.0 | Python |
0406cc1352cbdec15c8a885e3c875ea94e3677a5 | add post_message method to client | llimllib/slackrtm | slackrtm/client.py | slackrtm/client.py | #!/usr/bin/python
# mostly a proxy object to abstract how some of this works
import json
from .server import Server
class SlackClient(object):
def __init__(self, token):
self.token = token
self.server = Server(self.token, False)
def rtm_connect(self):
self.server.rtm_connect()
def api_call(self, method, **kwargs):
return self.server.api_call(method, **kwargs)
def rtm_read(self):
# in the future, this should handle some events internally i.e. channel
# creation
if self.server:
json_data = self.server.websocket_safe_read()
data = []
if json_data != '':
for d in json_data.split('\n'):
data.append(json.loads(d))
for item in data:
self.process_changes(item)
return data
else:
raise SlackNotConnected
def rtm_send_message(self, channel_id, message):
return self.server.channels[channel_id].send_message(message)
def post_message(self, channel_id, message, **kwargs):
params = {
"post_data": {
"text": message,
"channel": channel_id,
}
}
params["post_data"].update(kwargs)
self.server.api_call("chat.postMessage", params)
def process_changes(self, data):
if "type" in data.keys():
if data["type"] in ['channel_created', 'im_created']:
channel = data["channel"]
self.server.attach_channel(channel.get("name", ""), channel["id"], [])
class SlackNotConnected(Exception):
pass
| #!/usr/bin/python
# mostly a proxy object to abstract how some of this works
import json
from .server import Server
class SlackClient(object):
def __init__(self, token):
self.token = token
self.server = Server(self.token, False)
def rtm_connect(self):
self.server.rtm_connect()
def api_call(self, method, **kwargs):
return self.server.api_call(method, **kwargs)
def rtm_read(self):
# in the future, this should handle some events internally i.e. channel
# creation
if self.server:
json_data = self.server.websocket_safe_read()
data = []
if json_data != '':
for d in json_data.split('\n'):
data.append(json.loads(d))
for item in data:
self.process_changes(item)
return data
else:
raise SlackNotConnected
def rtm_send_message(self, channel_id, message):
return self.server.channels[channel_id].send_message(message)
def process_changes(self, data):
if "type" in data.keys():
if data["type"] in ['channel_created', 'im_created']:
channel = data["channel"]
self.server.attach_channel(channel.get("name", ""), channel["id"], [])
class SlackNotConnected(Exception):
pass
| mit | Python |
ba77b9a4fc56fe979cd709c584f055370db5d153 | Update __init__.py to reflect renaming. | emrob/sleuth | sleuth/__init__.py | sleuth/__init__.py | """
Sleuth: A debugging and diagnostic tool for Python.
------
"""
import sys
if sys.version_info[:2] < (3, 0):
raise ImportError("Sleuth requires Python 3.")
del sys
__version__ = '0.2.0d'
from .__main__ import main
from .error import *
from .inject import *
from .wrap import *
| """
Sleuth: A debugging and diagnostic tool for Python.
------
"""
import sys
if sys.version_info[:2] < (3, 0):
raise ImportError("Sleuth requires Python 3.")
del sys
__version__ = '0.2.0d'
from .__main__ import main
from .error import *
from .inject import *
from .sleuth import *
| mit | Python |
2ba9eaba0bcb229055db09147f1cb654190badbf | Use a list for the style cycle so that subsequent calls to the plotting functions don't mix up the line styles. | maxalbert/paper-supplement-nanoparticle-sensing | notebooks/style_helpers.py | notebooks/style_helpers.py | import brewer2mpl
from cycler import cycler
N = 5
cmap = brewer2mpl.get_map('Set1', 'Qualitative', N, reverse=False)
color_cycle = cycler('color', cmap.hex_colors)
marker_cycle = cycler('marker', ['s', '^', 'o', 'D', 'v'])
markersize_cycle = cycler('markersize', [10, 12, 11, 10, 12])
style_cycle = list(color_cycle + marker_cycle + markersize_cycle)[:N]
cmap = brewer2mpl.get_map('Set1', 'Qualitative', 3, reverse=False)
color_cycle = cycler('color', ['black', '#88CCDD', '#c73027'])
marker_cycle = cycler('marker', [' ', ' ', ' '])
markersize_cycle = cycler('markersize', [8, 8, 8])
fillstyle_cycle = cycler('fillstyle', ['full', 'full', 'full'])
linestyle_cycle = cycler('linestyle', ['dashed', 'solid', 'solid'])
linewidth_cycle = cycler('linewidth', [2, 2.25, 2])
style_cycle_fig7 = list(color_cycle + marker_cycle + markersize_cycle + fillstyle_cycle + linestyle_cycle + linewidth_cycle)[:N]
| import brewer2mpl
import itertools
from cycler import cycler
cmap = brewer2mpl.get_map('Set1', 'Qualitative', 5, reverse=False)
color_cycle = cycler('color', cmap.hex_colors)
marker_cycle = cycler('marker', ['s', '^', 'o', 'D', 'v'])
markersize_cycle = cycler('markersize', [10, 12, 11, 10, 12])
style_cycle = itertools.cycle(color_cycle + marker_cycle + markersize_cycle)
cmap = brewer2mpl.get_map('Set1', 'Qualitative', 3, reverse=False)
color_cycle = cycler('color', ['black', '#88CCDD', '#c73027'])
marker_cycle = cycler('marker', [' ', ' ', ' '])
markersize_cycle = cycler('markersize', [8, 8, 8])
fillstyle_cycle = cycler('fillstyle', ['full', 'full', 'full'])
linestyle_cycle = cycler('linestyle', ['dashed', 'solid', 'solid'])
linewidth_cycle = cycler('linewidth', [2, 2.25, 2])
style_cycle_fig7 = (color_cycle + marker_cycle + markersize_cycle + fillstyle_cycle + linestyle_cycle + linewidth_cycle)
| mit | Python |
1c5d204096947658feef7fa19479961727a4af9a | Fix #519 | hzy/raven-python,jbarbuto/raven-python,arthurlogilab/raven-python,akheron/raven-python,percipient/raven-python,arthurlogilab/raven-python,akheron/raven-python,getsentry/raven-python,nikolas/raven-python,getsentry/raven-python,recht/raven-python,lepture/raven-python,jmagnusson/raven-python,danriti/raven-python,dbravender/raven-python,ronaldevers/raven-python,ewdurbin/raven-python,someonehan/raven-python,Photonomie/raven-python,danriti/raven-python,johansteffner/raven-python,arthurlogilab/raven-python,johansteffner/raven-python,jmagnusson/raven-python,ronaldevers/raven-python,danriti/raven-python,smarkets/raven-python,smarkets/raven-python,percipient/raven-python,smarkets/raven-python,someonehan/raven-python,jbarbuto/raven-python,akalipetis/raven-python,nikolas/raven-python,recht/raven-python,ewdurbin/raven-python,akheron/raven-python,akalipetis/raven-python,Photonomie/raven-python,jbarbuto/raven-python,jmp0xf/raven-python,someonehan/raven-python,jmagnusson/raven-python,lepture/raven-python,getsentry/raven-python,arthurlogilab/raven-python,smarkets/raven-python,jmp0xf/raven-python,percipient/raven-python,recht/raven-python,hzy/raven-python,dbravender/raven-python,nikolas/raven-python,nikolas/raven-python,jmp0xf/raven-python,johansteffner/raven-python,hzy/raven-python,jbarbuto/raven-python,lepture/raven-python,Photonomie/raven-python,ewdurbin/raven-python,ronaldevers/raven-python,akalipetis/raven-python,dbravender/raven-python | raven/contrib/celery/__init__.py | raven/contrib/celery/__init__.py | """
raven.contrib.celery
~~~~~~~~~~~~~~~~~~~~
>>> class CeleryClient(CeleryMixin, Client):
>>> def send_encoded(self, *args, **kwargs):
>>> "Errors through celery"
>>> self.send_raw.delay(*args, **kwargs)
>>> @task(routing_key='sentry')
>>> def send_raw(*args, **kwargs):
>>> return super(client, self).send_encoded(*args, **kwargs)
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import logging
from celery.signals import after_setup_logger, task_failure
from raven.handlers.logging import SentryHandler
class CeleryFilter(logging.Filter):
def filter(self, record):
# Context is fixed in Celery 3.x so use internal flag instead
extra_data = getattr(record, 'data', {})
if not isinstance(extra_data, dict):
return record.funcName != '_log_error'
# Fallback to funcName for Celery 2.5
return extra_data.get('internal', record.funcName != '_log_error')
def register_signal(client):
def process_failure_signal(sender, task_id, args, kwargs, **kw):
# This signal is fired inside the stack so let raven do its magic
client.captureException(
extra={
'task_id': task_id,
'task': sender,
'args': args,
'kwargs': kwargs,
})
task_failure.connect(process_failure_signal, weak=False)
def register_logger_signal(client, logger=None):
filter_ = CeleryFilter()
if logger is None:
logger = logging.getLogger()
handler = SentryHandler(client)
handler.setLevel(logging.ERROR)
handler.addFilter(filter_)
def process_logger_event(sender, logger, loglevel, logfile, format,
colorize, **kw):
# Attempt to find an existing SentryHandler, and if it exists ensure
# that the CeleryFilter is installed.
# If one is found, we do not attempt to install another one.
for h in logger.handlers:
if type(h) == SentryHandler:
h.addFilter(filter_)
return False
logger.addHandler(handler)
after_setup_logger.connect(process_logger_event, weak=False)
| """
raven.contrib.celery
~~~~~~~~~~~~~~~~~~~~
>>> class CeleryClient(CeleryMixin, Client):
>>> def send_encoded(self, *args, **kwargs):
>>> "Errors through celery"
>>> self.send_raw.delay(*args, **kwargs)
>>> @task(routing_key='sentry')
>>> def send_raw(*args, **kwargs):
>>> return super(client, self).send_encoded(*args, **kwargs)
:copyright: (c) 2010-2012 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import
import logging
from celery.signals import after_setup_logger, task_failure
from raven.handlers.logging import SentryHandler
class CeleryFilter(logging.Filter):
def filter(self, record):
# Context is fixed in Celery 3.x so use internal flag instead
extra_data = getattr(record, 'data', {})
if not isinstance(extra_data, dict):
return record.funcName != '_log_error'
# Fallback to funcName for Celery 2.5
return extra_data.get('internal', record.funcName != '_log_error')
def register_signal(client):
def process_failure_signal(sender, task_id, args, kwargs, **kw):
# This signal is fired inside the stack so let raven do its magic
client.captureException(
extra={
'task_id': task_id,
'task': sender,
'args': args,
'kwargs': kwargs,
})
task_failure.connect(process_failure_signal, weak=False)
def register_logger_signal(client, logger=None):
filter_ = CeleryFilter()
if logger is None:
logger = logging.getLogger()
handler = SentryHandler(client)
handler.setLevel(logging.ERROR)
handler.addFilter(filter_)
def process_logger_event(sender, logger, loglevel, logfile, format,
colorize, **kw):
# Attempt to find an existing SentryHandler, and if it exists ensure
# that the CeleryFilter is installed.
# If one is found, we do not attempt to install another one.
for h in logger.handlers:
if type(h) == SentryHandler:
h.addFilter(filter_)
return False
logger.addHandler(handler)
after_setup_logger.connect(process_logger_event, weak=False)
| bsd-3-clause | Python |
1c51fc78e072eee4bd62964ff71629f13e200975 | fix Python ModuleLinkTest | TresAmigosSD/SMV,TresAmigosSD/SMV,TresAmigosSD/SMV,TresAmigosSD/SMV | src/test/python/testModuleLink.py | src/test/python/testModuleLink.py | #
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from smvbasetest import SmvBaseTest
from smv import smvPy
from fixture.stage2.links import L,B
import unittest
class ModuleLinkTest(SmvBaseTest):
PublishDir = 'testpub'
@classmethod
def smvAppInitArgs(cls):
return ['--smv-props', 'smv.stages=fixture.stage1:fixture.stage2',
'-m', 'output.A', '--publish', cls.PublishDir]
@classmethod
def tearDownClass(cls):
import shutil
import os
shutil.rmtree(os.path.join(cls.DataDir, 'publish', cls.PublishDir), ignore_errors=True)
def setUp(self):
super(ModuleLinkTest, self).setUp()
def test_module_link_can_be_resolved(self):
self.smvPy.j_smvApp.run()
l = self.smvPy.runModule(L.urn())
lExpected = self.createDF("k:String;v:Integer", "a,;b,2")
self.should_be_same(lExpected, l) # link resolution
b = self.smvPy.runModule(B.urn())
bExpected = self.createDF("k:String;v:Integer;v2:Integer", "a,,;b,2,3")
self.should_be_same(bExpected, b) # link as dependency
| #
# This file is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from smvbasetest import SmvBaseTest
from smv import smvPy
import unittest
class ModuleLinkTest(SmvBaseTest):
PublishDir = 'testpub'
@classmethod
def smvAppInitArgs(cls):
return ['--smv-props', 'smv.stages=fixture.stage1:fixture.stage2',
'-m', 'output.A', '--publish', cls.PublishDir]
@classmethod
def tearDownClass(cls):
import shutil
import os
shutil.rmtree(os.path.join(cls.DataDir, 'publish', cls.PublishDir), ignore_errors=True)
def setUp(self):
super(ModuleLinkTest, self).setUp()
@unittest.skip("temporarily ignore due to datasethash problem")
def test_module_link_can_be_resolved(self):
self.smvPy.j_smvApp.run()
l = self.smvPy.runModule('mod:fixture.stage2.links.L')
lExpected = self.createDF("k:String;v:Integer", "a,;b,2")
self.should_be_same(lExpected, l) # link resolution
b = self.smvPy.runModule('mod:fixture.stage2.links.B')
bExpected = self.createDF("k:String;v:Integer;v2:Integer", "a,,;b,2,3")
self.should_be_same(bExpected, b) # link as dependency
| apache-2.0 | Python |
0f4290101e300c179de532d0cf7ae5133602b7d9 | Update the default CLANG_USER_VISIBLE_VERSION to 6.0.0 | practicalswift/swift,ahoppen/swift,jopamer/swift,shahmishal/swift,glessard/swift,aschwaighofer/swift,shahmishal/swift,danielmartin/swift,amraboelela/swift,rudkx/swift,amraboelela/swift,natecook1000/swift,apple/swift,gregomni/swift,tkremenek/swift,airspeedswift/swift,parkera/swift,jckarter/swift,apple/swift,karwa/swift,stephentyrone/swift,hooman/swift,CodaFi/swift,alblue/swift,atrick/swift,amraboelela/swift,sschiau/swift,rudkx/swift,huonw/swift,jmgc/swift,rudkx/swift,devincoughlin/swift,shahmishal/swift,allevato/swift,stephentyrone/swift,aschwaighofer/swift,apple/swift,benlangmuir/swift,xwu/swift,jopamer/swift,atrick/swift,gregomni/swift,jmgc/swift,harlanhaskins/swift,CodaFi/swift,devincoughlin/swift,CodaFi/swift,brentdax/swift,apple/swift,practicalswift/swift,jopamer/swift,harlanhaskins/swift,apple/swift,practicalswift/swift,CodaFi/swift,glessard/swift,parkera/swift,allevato/swift,roambotics/swift,gribozavr/swift,danielmartin/swift,airspeedswift/swift,huonw/swift,practicalswift/swift,alblue/swift,xwu/swift,huonw/swift,shajrawi/swift,shajrawi/swift,devincoughlin/swift,alblue/swift,jmgc/swift,amraboelela/swift,stephentyrone/swift,austinzheng/swift,benlangmuir/swift,ahoppen/swift,devincoughlin/swift,practicalswift/swift,brentdax/swift,nathawes/swift,ahoppen/swift,JGiola/swift,karwa/swift,gribozavr/swift,gribozavr/swift,gregomni/swift,parkera/swift,glessard/swift,shajrawi/swift,jckarter/swift,stephentyrone/swift,JGiola/swift,lorentey/swift,jopamer/swift,tkremenek/swift,atrick/swift,nathawes/swift,brentdax/swift,alblue/swift,danielmartin/swift,gribozavr/swift,jopamer/swift,jmgc/swift,apple/swift,danielmartin/swift,rudkx/swift,xedin/swift,brentdax/swift,nathawes/swift,roambotics/swift,nathawes/swift,gribozavr/swift,allevato/swift,CodaFi/swift,danielmartin/swift,karwa/swift,hooman/swift,stephentyrone/swift,hooman/swift,karwa/swift,atrick/swift,gregomni/swift,brentdax/swift,shahmishal/swift,aschwaighofer/swift,benlangmuir/swift,roambotics/swift,JGiola/swift,huonw/swift,danielmartin/swift,shahmishal/swift,austinzheng/swift,gribozavr/swift,danielmartin/swift,sschiau/swift,aschwaighofer/swift,rudkx/swift,glessard/swift,shajrawi/swift,xwu/swift,tkremenek/swift,harlanhaskins/swift,glessard/swift,gregomni/swift,jopamer/swift,xedin/swift,sschiau/swift,JGiola/swift,airspeedswift/swift,allevato/swift,CodaFi/swift,practicalswift/swift,benlangmuir/swift,xwu/swift,jmgc/swift,parkera/swift,sschiau/swift,ahoppen/swift,atrick/swift,gregomni/swift,shajrawi/swift,xwu/swift,roambotics/swift,aschwaighofer/swift,karwa/swift,JGiola/swift,shahmishal/swift,tkremenek/swift,shajrawi/swift,roambotics/swift,tkremenek/swift,tkremenek/swift,jopamer/swift,natecook1000/swift,natecook1000/swift,benlangmuir/swift,xedin/swift,sschiau/swift,devincoughlin/swift,shahmishal/swift,alblue/swift,karwa/swift,airspeedswift/swift,airspeedswift/swift,austinzheng/swift,rudkx/swift,brentdax/swift,benlangmuir/swift,xedin/swift,airspeedswift/swift,ahoppen/swift,stephentyrone/swift,sschiau/swift,nathawes/swift,austinzheng/swift,lorentey/swift,hooman/swift,xedin/swift,jckarter/swift,hooman/swift,amraboelela/swift,gribozavr/swift,huonw/swift,huonw/swift,xwu/swift,karwa/swift,CodaFi/swift,ahoppen/swift,jmgc/swift,shajrawi/swift,shajrawi/swift,alblue/swift,lorentey/swift,devincoughlin/swift,devincoughlin/swift,amraboelela/swift,austinzheng/swift,jmgc/swift,alblue/swift,harlanhaskins/swift,practicalswift/swift,karwa/swift,natecook1000/swift,allevato/swift,hooman/swift,sschiau/swift,harlanhaskins/swift,natecook1000/swift,jckarter/swift,amraboelela/swift,austinzheng/swift,natecook1000/swift,aschwaighofer/swift,allevato/swift,gribozavr/swift,xedin/swift,nathawes/swift,devincoughlin/swift,lorentey/swift,parkera/swift,harlanhaskins/swift,austinzheng/swift,huonw/swift,allevato/swift,sschiau/swift,xedin/swift,shahmishal/swift,airspeedswift/swift,jckarter/swift,lorentey/swift,xedin/swift,hooman/swift,nathawes/swift,tkremenek/swift,glessard/swift,JGiola/swift,lorentey/swift,aschwaighofer/swift,brentdax/swift,jckarter/swift,lorentey/swift,jckarter/swift,atrick/swift,lorentey/swift,parkera/swift,practicalswift/swift,stephentyrone/swift,xwu/swift,roambotics/swift,natecook1000/swift,parkera/swift,harlanhaskins/swift,parkera/swift | utils/build_swift/defaults.py | utils/build_swift/defaults.py | # This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
"""
Default option value definitions.
"""
from .argparse import CompilerVersion
__all__ = [
# Command line configuarable
'BUILD_VARIANT',
'CMAKE_GENERATOR',
'COMPILER_VENDOR',
'SWIFT_USER_VISIBLE_VERSION',
'CLANG_USER_VISIBLE_VERSION',
'SWIFT_ANALYZE_CODE_COVERAGE',
'DARWIN_XCRUN_TOOLCHAIN',
'DARWIN_DEPLOYMENT_VERSION_OSX',
'DARWIN_DEPLOYMENT_VERSION_IOS',
'DARWIN_DEPLOYMENT_VERSION_TVOS',
'DARWIN_DEPLOYMENT_VERSION_WATCHOS',
'UNIX_INSTALL_PREFIX',
'DARWIN_INSTALL_PREFIX',
# Constants
]
# Options that can be "configured" by command line options
BUILD_VARIANT = 'Debug'
CMAKE_GENERATOR = 'Ninja'
COMPILER_VENDOR = 'none'
SWIFT_USER_VISIBLE_VERSION = CompilerVersion('4.2')
CLANG_USER_VISIBLE_VERSION = CompilerVersion('6.0.0')
SWIFT_ANALYZE_CODE_COVERAGE = 'false'
DARWIN_XCRUN_TOOLCHAIN = 'default'
DARWIN_DEPLOYMENT_VERSION_OSX = '10.9'
DARWIN_DEPLOYMENT_VERSION_IOS = '7.0'
DARWIN_DEPLOYMENT_VERSION_TVOS = '9.0'
DARWIN_DEPLOYMENT_VERSION_WATCHOS = '2.0'
UNIX_INSTALL_PREFIX = '/usr'
DARWIN_INSTALL_PREFIX = ('/Applications/Xcode.app/Contents/Developer/'
'Toolchains/XcodeDefault.xctoolchain/usr')
# Options that can only be "configured" by editing this file.
#
# These options are not exposed as command line options on purpose. If you
# need to change any of these, you should do so on trunk or in a branch.
| # This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
"""
Default option value definitions.
"""
from .argparse import CompilerVersion
__all__ = [
# Command line configuarable
'BUILD_VARIANT',
'CMAKE_GENERATOR',
'COMPILER_VENDOR',
'SWIFT_USER_VISIBLE_VERSION',
'CLANG_USER_VISIBLE_VERSION',
'SWIFT_ANALYZE_CODE_COVERAGE',
'DARWIN_XCRUN_TOOLCHAIN',
'DARWIN_DEPLOYMENT_VERSION_OSX',
'DARWIN_DEPLOYMENT_VERSION_IOS',
'DARWIN_DEPLOYMENT_VERSION_TVOS',
'DARWIN_DEPLOYMENT_VERSION_WATCHOS',
'UNIX_INSTALL_PREFIX',
'DARWIN_INSTALL_PREFIX',
# Constants
]
# Options that can be "configured" by command line options
BUILD_VARIANT = 'Debug'
CMAKE_GENERATOR = 'Ninja'
COMPILER_VENDOR = 'none'
SWIFT_USER_VISIBLE_VERSION = CompilerVersion('4.2')
CLANG_USER_VISIBLE_VERSION = CompilerVersion('5.0.0')
SWIFT_ANALYZE_CODE_COVERAGE = 'false'
DARWIN_XCRUN_TOOLCHAIN = 'default'
DARWIN_DEPLOYMENT_VERSION_OSX = '10.9'
DARWIN_DEPLOYMENT_VERSION_IOS = '7.0'
DARWIN_DEPLOYMENT_VERSION_TVOS = '9.0'
DARWIN_DEPLOYMENT_VERSION_WATCHOS = '2.0'
UNIX_INSTALL_PREFIX = '/usr'
DARWIN_INSTALL_PREFIX = ('/Applications/Xcode.app/Contents/Developer/'
'Toolchains/XcodeDefault.xctoolchain/usr')
# Options that can only be "configured" by editing this file.
#
# These options are not exposed as command line options on purpose. If you
# need to change any of these, you should do so on trunk or in a branch.
| apache-2.0 | Python |
08bcc2b60ead91d9c0e2492338e2b360edd88def | update jsonp middleware for 1.3 cache framework | sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,propublica/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,propublica/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words,sunlightlabs/Capitol-Words | cwod_site/cwod_api/middleware/jsonp.py | cwod_site/cwod_api/middleware/jsonp.py | import re
import urllib
class JSONPMiddleware(object):
'''
Middleware to handle jsonp requests on projects while still providing for
caching of content. What happens here is:
1. Some page makes a jquery $.getJSON request to foo.json?callback=? on projects
2. jquery replaces callback=? with callback=foo, where foo is random
3. Projects middleware detects that a .json request occurred with a callback,
strips out and saves the callback name from the GET variables
4. URL, view, and caching logic proceed as normal without the callback variable,
returning an HttpResponse with pure json content
5. Projects middleware wraps foo(...) around the json per the callback variable
and returns to the user
'''
def process_request(self, request):
if not request.path.endswith('.json') or 'callback' not in request.GET:
return None
# Store on request object
request.jsonp_callback = request.GET['callback']
# Remove from GET vars
mutable = request.GET._mutable
request.GET._mutable = True
del request.GET['callback']
try: # jquery puts this in too
del request.GET['_']
except KeyError:
pass
request.GET._mutable = mutable
# Update request.META with our new querystring. Cache keys built with
# HTTPRequest.get_full_path() use request.META['QUERYSTRING'] rather than GET
request.META['QUERYSTRING'] = urllib.urlencode(request.GET)
return None
def process_response(self, request, response):
try:
callback = request.jsonp_callback
except AttributeError:
return response
else:
response.content = '%s(%s)' % (re.sub(r'[^\w_]', '', callback), response.content.decode('utf-8'))
return response
| import re
class JSONPMiddleware(object):
'''
Middleware to handle jsonp requests on projects while still providing for
caching of content. What happens here is:
1. Some page makes a jquery $.getJSON request to foo.json?callback=? on projects
2. jquery replaces callback=? with callback=foo, where foo is random
3. Projects middleware detects that a .json request occurred with a callback,
strips out and saves the callback name from the GET variables
4. URL, view, and caching logic proceed as normal without the callback variable,
returning an HttpResponse with pure json content
5. Projects middleware wraps foo(...) around the json per the callback variable
and returns to the user
'''
def process_request(self, request):
if not request.path.endswith('.json') or 'callback' not in request.GET:
return None
# Store on request object
request.jsonp_callback = request.GET['callback']
# key path before mutating the GET dict
print request.get_full_path()
# Remove from GET vars
mutable = request.GET._mutable
request.GET._mutable = True
del request.GET['callback']
try: # jquery puts this in too
del request.GET['_']
except KeyError:
pass
request.GET._mutable = mutable
# key path after mutating the GET dict
print request.get_full_path()
return None
def process_response(self, request, response):
try:
callback = request.jsonp_callback
except AttributeError:
return response
else:
response.content = '%s(%s)' % (re.sub(r'[^\w_]', '', callback), response.content.decode('utf-8'))
return response
| bsd-3-clause | Python |
a68df85c1124331003d69c6da7fcdfe2cbd279e5 | Fix versioning | PhonologicalCorpusTools/PolyglotDB,PhonologicalCorpusTools/PyAnnotationGraph,PhonologicalCorpusTools/PolyglotDB,PhonologicalCorpusTools/PyAnnotationGraph,MontrealCorpusTools/PolyglotDB,MontrealCorpusTools/PolyglotDB | polyglotdb/__init__.py | polyglotdb/__init__.py | __ver_major__ = 1
__ver_minor__ = 2
__ver_patch__ = '0a'
__version__ = f"{__ver_major__}.{__ver_minor__}.{__ver_patch__}"
__all__ = ['query', 'io', 'corpus', 'config', 'exceptions', 'CorpusContext', 'CorpusConfig']
import polyglotdb.query.annotations as graph
import polyglotdb.io as io
import polyglotdb.corpus as corpus
import polyglotdb.exceptions as exceptions
import polyglotdb.config as config
CorpusConfig = config.CorpusConfig
CorpusContext = corpus.CorpusContext
| __ver_major__ = 1
__ver_minor__ = 1
__ver_patch__ = '3a'
__version__ = f"{__ver_major__}.{__ver_minor__}.{__ver_patch__}"
__all__ = ['query', 'io', 'corpus', 'config', 'exceptions', 'CorpusContext', 'CorpusConfig']
import polyglotdb.query.annotations as graph
import polyglotdb.io as io
import polyglotdb.corpus as corpus
import polyglotdb.exceptions as exceptions
import polyglotdb.config as config
CorpusConfig = config.CorpusConfig
CorpusContext = corpus.CorpusContext
| mit | Python |
1449131d87e3def4bd23835dd37d9f4744f2f62d | revert hostname calculation due to changed upgrade process | Mirantis/octane,Mirantis/octane,stackforge/fuel-octane,stackforge/fuel-octane | octane/commands/cleanup.py | octane/commands/cleanup.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from cliff import command as cmd
from fuelclient import objects
from octane import magic_consts
from octane.util import env as env_util
from octane.util import ssh
LOG = logging.getLogger(__name__)
def cleanup_environment(env_id):
env = objects.Environment(env_id)
controller = env_util.get_one_controller(env)
sftp = ssh.sftp(controller)
admin_pass = env_util.get_admin_password(env, controller)
script_filename = 'clean_env.py'
with ssh.tempdir(controller) as tempdir:
script_src_filename = os.path.join(
magic_consts.CWD, "helpers", script_filename)
script_dst_filename = os.path.join(tempdir, script_filename)
sftp.put(script_src_filename, script_dst_filename)
command = [
'sh', '-c', '. /root/openrc; export OS_PASSWORD={0}; python {1}'
.format(admin_pass, script_dst_filename),
]
with ssh.popen(command, node=controller, stdin=ssh.PIPE) as proc:
roles = ["controller", "compute"]
for node in env_util.get_nodes(env, roles):
proc.stdin.write(node.data['hostname'].split('.')[0] + "\n")
class CleanupCommand(cmd.Command):
"""Cleanup upgraded environment"""
def get_parser(self, prog_name):
parser = super(CleanupCommand, self).get_parser(prog_name)
parser.add_argument(
'env', type=int, metavar='ENV_ID',
help="ID of environment to cleanup")
return parser
def take_action(self, parsed_args):
cleanup_environment(parsed_args.env)
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
from cliff import command as cmd
from fuelclient import objects
from octane import magic_consts
from octane.util import env as env_util
from octane.util import ssh
LOG = logging.getLogger(__name__)
def cleanup_environment(env_id):
env = objects.Environment(env_id)
controller = env_util.get_one_controller(env)
sftp = ssh.sftp(controller)
admin_pass = env_util.get_admin_password(env, controller)
script_filename = 'clean_env.py'
with ssh.tempdir(controller) as tempdir:
script_src_filename = os.path.join(
magic_consts.CWD, "helpers", script_filename)
script_dst_filename = os.path.join(tempdir, script_filename)
sftp.put(script_src_filename, script_dst_filename)
command = [
'sh', '-c', '. /root/openrc; export OS_PASSWORD={0}; python {1}'
.format(admin_pass, script_dst_filename),
]
data = ""
for node in env_util.get_controllers(env):
data = data + node.data['fqdn'] + "\n"
for node in env_util.get_nodes(env, ['compute']):
data = data + node.data['hostname'] + "\n"
with ssh.popen(command, node=controller, stdin=ssh.PIPE) as proc:
proc.stdin.write(data)
class CleanupCommand(cmd.Command):
"""Cleanup upgraded environment"""
def get_parser(self, prog_name):
parser = super(CleanupCommand, self).get_parser(prog_name)
parser.add_argument(
'env', type=int, metavar='ENV_ID',
help="ID of environment to cleanup")
return parser
def take_action(self, parsed_args):
cleanup_environment(parsed_args.env)
| apache-2.0 | Python |
829c051ee4cc25b7dffc60147090f095207ebe96 | update TestProgressMonitor | alphatwirl/alphatwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl,TaiSakuma/AlphaTwirl,alphatwirl/alphatwirl | tests/unit/ProgressBar/test_ProgressMonitor.py | tests/unit/ProgressBar/test_ProgressMonitor.py | from AlphaTwirl.ProgressBar import ProgressReporter, Queue, ProgressMonitor
import unittest
##__________________________________________________________________||
class MockPresentation(object):
def __init__(self): self.reports = [ ]
def present(self, report): self.reports.append(report)
##__________________________________________________________________||
class MockReport(object): pass
##__________________________________________________________________||
class TestQueue(unittest.TestCase):
def test_put(self):
presentation = MockPresentation()
queue = Queue(presentation)
report = MockReport()
queue.put(report)
self.assertEqual([report, ], presentation.reports)
##__________________________________________________________________||
class TestProgressMonitor(unittest.TestCase):
def test_begin_end(self):
presentation = MockPresentation()
monitor = ProgressMonitor(presentation)
monitor.begin()
monitor.end()
def test_createReporter(self):
presentation = MockPresentation()
monitor = ProgressMonitor(presentation)
self.assertIsInstance(monitor.createReporter(), ProgressReporter)
##__________________________________________________________________||
| from AlphaTwirl.ProgressBar import ProgressReporter, Queue, ProgressMonitor
import unittest
##__________________________________________________________________||
class MockPresentation(object):
def __init__(self): self.reports = [ ]
def present(self, report): self.reports.append(report)
##__________________________________________________________________||
class MockReport(object): pass
##__________________________________________________________________||
class TestQueue(unittest.TestCase):
def test_put(self):
presentation = MockPresentation()
queue = Queue(presentation)
report = MockReport()
queue.put(report)
self.assertEqual([report, ], presentation.reports)
##__________________________________________________________________||
class TestProgressMonitor(unittest.TestCase):
def test_monitor(self):
presentation = MockPresentation()
monitor = ProgressMonitor(presentation)
monitor.monitor()
def test_createReporter(self):
presentation = MockPresentation()
monitor = ProgressMonitor(presentation)
self.assertIsInstance(monitor.createReporter(), ProgressReporter)
##__________________________________________________________________||
| bsd-3-clause | Python |
e3f9250acbb4db849e97158780ace3dc9b37873c | add missing import | gentoo/identity.gentoo.org,gentoo/identity.gentoo.org,dastergon/identity.gentoo.org,dastergon/identity.gentoo.org | okupy/common/encryption.py | okupy/common/encryption.py | from Crypto.Cipher import AES
from django.conf import settings
from random import choice
import base64
import hashlib
import os
import string
def sha1_password(password):
'''
Create a SHA1 salted hash
'''
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + base64.encodestring(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
'''
Encrypt the password in AES encryption, using the secret key
specified in the settings file
Taken from
http://www.codekoala.com/blog/2009/aes-encryption-python-using-pycrypto/
'''
BLOCK_SIZE = 32
PADDING = '{'
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
EncodeAES = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
cipher = AES.new(settings.SECRET_KEY[:BLOCK_SIZE])
return EncodeAES(cipher, password)
def decrypt_password(password):
'''
Decrypt the password in AES encryption, using the secret key
specified in the settings file
Taken from
http://www.codekoala.com/blog/2009/aes-encryption-python-using-pycrypto/
'''
BLOCK_SIZE = 32
PADDING = '{'
DecodeAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
cipher = AES.new(settings.SECRET_KEY[:BLOCK_SIZE])
return DecodeAES(cipher, password)
def random_string(length):
'''
Returns a random string for temporary URLs
'''
return ''.join([choice(string.letters + string.digits) for i in range(length)])
| from Crypto.Cipher import AES
from django.conf import settings
import base64
import hashlib
import os
import string
def sha1_password(password):
'''
Create a SHA1 salted hash
'''
salt = os.urandom(4)
h = hashlib.sha1(password)
h.update(salt)
return "{SSHA}" + base64.encodestring(h.digest() + salt)[:-1]
def check_password(challenge_password, password,):
challenge_bytes = decode(challenge_password[6:])
digest = challenge_bytes[:20]
salt = challenge_bytes[20:]
hr = hashlib.sha1(password)
hr.update(salt)
return digest == hr.digest()
def encrypt_password(password):
'''
Encrypt the password in AES encryption, using the secret key
specified in the settings file
Taken from
http://www.codekoala.com/blog/2009/aes-encryption-python-using-pycrypto/
'''
BLOCK_SIZE = 32
PADDING = '{'
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
EncodeAES = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
cipher = AES.new(settings.SECRET_KEY[:BLOCK_SIZE])
return EncodeAES(cipher, password)
def decrypt_password(password):
'''
Decrypt the password in AES encryption, using the secret key
specified in the settings file
Taken from
http://www.codekoala.com/blog/2009/aes-encryption-python-using-pycrypto/
'''
BLOCK_SIZE = 32
PADDING = '{'
DecodeAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
cipher = AES.new(settings.SECRET_KEY[:BLOCK_SIZE])
return DecodeAES(cipher, password)
def random_string(length):
'''
Returns a random string for temporary URLs
'''
return ''.join([choice(string.letters + string.digits) for i in range(length)])
| agpl-3.0 | Python |
6a6c5d9c12308cc6638aa5139bf6e7eeb84256df | Bump version after release | OpenSCAP/openscap-daemon,OpenSCAP/openscap-daemon | openscap_daemon/version.py | openscap_daemon/version.py | # Copyright 2015 Red Hat Inc., Durham, North Carolina.
# All Rights Reserved.
#
# openscap-daemon is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# openscap-daemon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with openscap-daemon. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Martin Preisler <mpreisle@redhat.com>
VERSION_MAJOR = 0
VERSION_MINOR = 1
VERSION_PATCH = 8
VERSION_STRING = "%i.%i.%i" % (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
__all__ = ["VERSION_MAJOR", "VERSION_MINOR", "VERSION_PATCH", "VERSION_STRING"]
| # Copyright 2015 Red Hat Inc., Durham, North Carolina.
# All Rights Reserved.
#
# openscap-daemon is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 2.1 of the License, or
# (at your option) any later version.
#
# openscap-daemon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with openscap-daemon. If not, see <http://www.gnu.org/licenses/>.
#
# Authors:
# Martin Preisler <mpreisle@redhat.com>
VERSION_MAJOR = 0
VERSION_MINOR = 1
VERSION_PATCH = 7
VERSION_STRING = "%i.%i.%i" % (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
__all__ = ["VERSION_MAJOR", "VERSION_MINOR", "VERSION_PATCH", "VERSION_STRING"]
| lgpl-2.1 | Python |
f38d930b0bcc7ca27bc06e9818d68f88089ac33f | Update suffix | davidgasquez/kaggle-airbnb | scripts/generate_submission.py | scripts/generate_submission.py | #!/usr/bin/env python
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from xgboost.sklearn import XGBClassifier
from utils.io import generate_submission
def main():
path = '../data/processed/'
prefix = 'processed_'
suffix = '4'
train_users = pd.read_csv(path + prefix + 'train_users.csv' + suffix)
test_users = pd.read_csv(path + prefix + 'test_users.csv' + suffix)
y_train = train_users['country_destination']
train_users.drop(['country_destination', 'id'], axis=1, inplace=True)
train_users = train_users.fillna(-1)
x_train = train_users.values
label_encoder = LabelEncoder()
encoded_y_train = label_encoder.fit_transform(y_train)
test_users_ids = test_users['id']
test_users.drop('id', axis=1, inplace=True)
test_users = test_users.fillna(-1)
x_test = test_users.values
clf = XGBClassifier(
max_depth=7,
learning_rate=0.18,
n_estimators=80,
objective="multi:softprob",
gamma=0,
min_child_weight=1,
max_delta_step=0,
subsample=1,
colsample_bytree=1,
colsample_bylevel=1,
reg_alpha=0,
reg_lambda=1,
scale_pos_weight=1,
base_score=0.5,
missing=None,
silent=True,
nthread=-1,
seed=42
)
clf.fit(x_train, encoded_y_train)
y_pred = clf.predict_proba(x_test)
generate_submission(y_pred, test_users_ids, label_encoder, name='gb')
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import pandas as pd
from sklearn.preprocessing import LabelEncoder
from xgboost.sklearn import XGBClassifier
from utils.io import generate_submission
def main():
path = '../data/processed/'
prefix = 'processed_'
suffix = '1'
train_users = pd.read_csv(path + prefix + 'train_users.csv' + suffix)
test_users = pd.read_csv(path + prefix + 'test_users.csv' + suffix)
y_train = train_users['country_destination']
train_users.drop(['country_destination', 'id'], axis=1, inplace=True)
train_users = train_users.fillna(-1)
x_train = train_users.values
label_encoder = LabelEncoder()
encoded_y_train = label_encoder.fit_transform(y_train)
test_users_ids = test_users['id']
test_users.drop('id', axis=1, inplace=True)
test_users = test_users.fillna(-1)
x_test = test_users.values
clf = XGBClassifier(
max_depth=7,
learning_rate=0.18,
n_estimators=80,
objective="multi:softprob",
gamma=0,
min_child_weight=1,
max_delta_step=0,
subsample=1,
colsample_bytree=1,
colsample_bylevel=1,
reg_alpha=0,
reg_lambda=1,
scale_pos_weight=1,
base_score=0.5,
missing=None,
silent=True,
nthread=-1,
seed=42
)
clf.fit(x_train, encoded_y_train)
y_pred = clf.predict_proba(x_test)
generate_submission(y_pred, test_users_ids, label_encoder, name='gb')
if __name__ == '__main__':
main()
| mit | Python |
8336b7090dcefe19116a63f22e9799d7f5b926e9 | Disable building the 2.0 profile in mono-basic. | BansheeMediaPlayer/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild,BansheeMediaPlayer/bockbuild,mono/bockbuild | packages/mono-basic.py | packages/mono-basic.py | GitHubTarballPackage ('mono', 'mono-basic', '3.0', 'bd316e914e1a230c29b5d637239334df41a79c7f',
configure = './configure --prefix="%{prefix}" --with-profile2=no',
override_properties = { 'make': 'make' }
)
| GitHubTarballPackage ('mono', 'mono-basic', '3.0', 'a74642af7f72d1012c87d82d7a12ac04a17858d5',
configure = './configure --prefix="%{prefix}"',
override_properties = { 'make': 'make' }
)
| mit | Python |
74ae937ba4d13acf49f9df0fa33579f2b5d07177 | Bump version | Phylliade/ikpy | src/ikpy/_version.py | src/ikpy/_version.py | __version__ = '3.1dev'
| __version__ = '3.0.1'
| apache-2.0 | Python |
3910675ef42fb7978c2bcbaa8943eebbfaf4e1af | add test for MemoryRequestIdStore | evernym/plenum,evernym/zeno | plenum/test/client/test_request_id_store.py | plenum/test/client/test_request_id_store.py | import pytest
from plenum.client.request_id_store import *
import os
from plenum.test.conftest import tdir
import random
def check_request_id_store(store: RequestIdStore):
for signerIndex in range(3):
signerId = "signer-id-{}".format(signerIndex)
assert store.currentId(signerId) is None
for requestIndex in range(3):
reqId = store.nextId(str(signerId))
assert reqId == requestIndex + 1
assert store.currentId(signerId) == reqId
def test_file_request_id_store(tdir):
# creating tem file
os.mkdir(tdir)
storeFileName = "test_file_request_id_store_{}".format(random.random())
storeFilePath = os.path.join(tdir, storeFileName)
with FileRequestIdStore(storeFilePath) as store:
# since random empty file created for this test loaded storage should be empty
assert len(store._storage) == 0
check_request_id_store(store)
# check that store does contain the data
assert os.path.getsize(storeFilePath) == 42
os.remove(storeFilePath)
def test_memory_request_id_store():
store = MemoryRequestIdStore()
check_request_id_store(store) | import pytest
from plenum.client.request_id_store import FileRequestIdStore
import os
from plenum.test.conftest import tdir
import random
def test_file_request_id_store(tdir):
# creating tem file
os.mkdir(tdir)
storeFileName = "test_file_request_id_store_{}".format(random.random())
storeFilePath = os.path.join(tdir, storeFileName)
with FileRequestIdStore(storeFilePath) as store:
# since random empty file created for this test loaded storage should be empty
assert len(store._storage) == 0
for signerIndex in range(3):
signerId = "signer-id-{}".format(signerIndex)
assert store.currentId(signerId) is None
for requestIndex in range(3):
reqId = store.nextId(str(signerId))
assert reqId == requestIndex + 1
assert store.currentId(signerId) == reqId
# check that store does contain the data
assert os.path.getsize(storeFilePath) == 42
os.remove(storeFilePath) | apache-2.0 | Python |
f8afe612d39b8c39c031a2aa0ccafe38bb85ef83 | Update test cases. | supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer,supersaiyanmode/HomePiServer | test/core/services/test_service_meta.py | test/core/services/test_service_meta.py | from threading import Event, Thread
import pytest
from app.core.messaging import Receiver, Sender, SchemaValidationFailed
from app.core.services import EventDrivenService, BaseService
from app.services.messaging import MessageService
CONFIG = {
"redis_config": {
"USE_FAKE_REDIS": True
},
"queues": {}
}
class Service(EventDrivenService, BaseService):
def on_service_start(self, *args, **kwargs):
super().on_service_start(*args, **kwargs)
self.value_event = Event()
params = {
"arg1": {"type": "string"}
}
self.express_capability("test", "testdesc", params, self.handle)
def get_component_name(self):
return "test"
def handle(self, arg1):
self.value = arg1
self.value_event.set()
def get_value(self):
self.value_event.wait()
return self.value
class TestEventDrivenService(object):
@classmethod
def setup_class(cls):
event = Event()
cls.message_service = MessageService(CONFIG)
cls.message_service.notify_start = lambda: event.set()
cls.message_service_thread = Thread(
target=cls.message_service.on_service_start)
cls.message_service_thread.start()
event.wait()
cls.service = Service()
cls.service.on_service_start()
@classmethod
def teardown_class(cls):
cls.message_service.on_service_stop()
cls.message_service_thread.join()
cls.service.on_service_stop()
def test_express_simple_capability_with_bad_schema(self):
receiver = Receiver("/services/test/capabilities")
receiver.start()
obj = receiver.receive()
assert len(obj.task) == 1
value = next(iter(obj.task.values()))
value.pop("id")
queue = value.pop("queue")
assert value == {
"name": "test",
"description": "testdesc",
"params": {"arg1": {"type": "string"}},
}
sender = Sender(queue)
sender.start()
with pytest.raises(SchemaValidationFailed):
sender.send({"arg2": "new-value"})
def test_express_simple_capability_with_correct_schema(self):
receiver = Receiver("/services/test/capabilities")
receiver.start()
obj = receiver.receive()
assert len(obj.task) == 1
value = next(iter(obj.task.values()))
value.pop("id")
queue = value.pop("queue")
assert value == {
"name": "test",
"description": "testdesc",
"params": {"arg1": {"type": "string"}},
}
sender = Sender(queue)
sender.start()
sender.send({"arg1": "new-value"})
assert self.service.get_value() == "new-value"
| from threading import Event, Thread
from app.core.messaging import Receiver, Sender
from app.core.services import EventDrivenService, Capability
from app.core.services import BaseService, BackgroundThreadServiceStart
from app.services.messaging import MessageService
CONFIG = {
"redis_config": {
"USE_FAKE_REDIS": True
},
"queues": {}
}
class Service(EventDrivenService, BaseService):
def on_service_start(self, *args, **kwargs):
super().on_service_start(*args, **kwargs)
params = {
"arg1": {"type": "string"}
}
capability = Capability("test", "test", params)
self.express_capability(capability, self.handle)
def get_component_name(self):
return "test"
def handle(self, arg1):
self.value = arg1
class TestEventDrivenService(object):
@classmethod
def setup_class(cls):
event = Event()
cls.message_service = MessageService(CONFIG)
cls.message_service.notify_start = lambda: event.set()
cls.message_service_thread = Thread(
target=cls.message_service.on_service_start)
cls.message_service_thread.start()
event.wait()
@classmethod
def teardown_class(cls):
cls.message_service.on_service_stop()
cls.message_service_thread.join()
def setup_method(self):
self.service = Service()
self.service.on_service_start()
def teardown_method(self):
self.service.on_service_stop()
def test_express_simple_capability_with_bad_schema(self):
receiver = Receiver("/services/test/capabilities")
receiver.start()
obj = receiver.receive()
assert len(obj.task) == 1
value = next(iter(obj.task.values()))
qid = value.pop("id")
assert value == {
"name": "test",
"description": "test",
"params": {"arg1": {"type": "string"}},
}
| mit | Python |
5758912f86f73a019e7df309aea5bc1bcd7516e5 | Make some broadcast admin fields editable. | bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv,bryanveloso/avalonstar-tv | avalonstar/components/broadcasts/admin.py | avalonstar/components/broadcasts/admin.py | # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Broadcast, Series
class BroadcastAdmin(admin.ModelAdmin):
list_display = ['number', 'airdate', 'status', 'series']
list_editable = ['airdate', 'status', 'series']
list_display_links = ['number', 'airdate']
raw_id_fields = ['games', 'series']
autocomplete_lookup_fields = {
'fk': ['series'],
'm2m': ['games']
}
admin.site.register(Broadcast, BroadcastAdmin)
class SeriesAdmin(admin.ModelAdmin):
pass
admin.site.register(Series, SeriesAdmin)
| # -*- coding: utf-8 -*-
from django.contrib import admin
from .models import Broadcast, Series
class BroadcastAdmin(admin.ModelAdmin):
list_display = ['number', 'airdate', 'status', 'series']
list_display_links = ['number', 'airdate']
raw_id_fields = ['games', 'series']
autocomplete_lookup_fields = {
'fk': ['series'],
'm2m': ['games']
}
admin.site.register(Broadcast, BroadcastAdmin)
class SeriesAdmin(admin.ModelAdmin):
pass
admin.site.register(Series, SeriesAdmin)
| apache-2.0 | Python |
39b8cb70ffd6be60c6d757ecd4703a3a0ca2a415 | Improve logs and change delete pos | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/workflow/steps/build_database.py | dbaas/workflow/steps/build_database.py | # -*- coding: utf-8 -*-
import logging
from base import BaseStep
from logical.models import Database
import datetime
LOG = logging.getLogger(__name__)
class BuildDatabase(BaseStep):
def __unicode__(self):
return "Creating logical database..."
def do(self, workflow_dict):
try:
if not workflow_dict['team'] or not workflow_dict['description'] or not workflow_dict['databaseinfra']:
return False
LOG.info("Creating Database...")
database = Database.provision(name= workflow_dict['name'], databaseinfra= workflow_dict['databaseinfra'])
LOG.info("Database %s created!" % database)
workflow_dict['database'] = database
LOG.info("Updating database team")
database.team = workflow_dict['team']
if 'project' in workflow_dict:
LOG.info("Updating database project")
database.project = workflow_dict['project']
LOG.info("Updating database description")
database.description = workflow_dict['description']
database.save()
return True
except Exception, e:
print e
return False
def undo(self, workflow_dict):
try:
if not 'database' in workflow_dict:
return False
LOG.info("Destroying the database....")
if not workflow_dict['database'].is_in_quarantine:
LOG.info("Putting Database in quarentine...")
database = workflow_dict['database']
database.is_in_quarantine= True
database.quarantine_dt = datetime.datetime.now().date()
database.save()
database.delete()
return True
except Exception, e:
print e
return False
| # -*- coding: utf-8 -*-
import logging
from base import BaseStep
from logical.models import Database
LOG = logging.getLogger(__name__)
class BuildDatabase(BaseStep):
def __unicode__(self):
return "Creating logical database..."
def do(self, workflow_dict):
try:
if not workflow_dict['team'] or not workflow_dict['description'] or not workflow_dict['databaseinfra']:
return False
LOG.info("Creating Database...")
database = Database.provision(name= workflow_dict['name'], databaseinfra= workflow_dict['databaseinfra'])
workflow_dict['database'] = database
database.team = workflow_dict['team']
if 'project' in workflow_dict:
database.project = workflow_dict['project']
database.description = workflow_dict['description']
database.save()
return True
except Exception, e:
print e
return False
def undo(self, workflow_dict):
try:
LOG.info("Destroying the database....")
workflow_dict['database'].delete()
return True
except Exception, e:
print e
return False
| bsd-3-clause | Python |
0de277c18b1f0a99f515430f45031071af8e7ea3 | improve test | zlorb/mitmproxy,mhils/mitmproxy,Kriechi/mitmproxy,StevenVanAcker/mitmproxy,mosajjal/mitmproxy,MatthewShao/mitmproxy,mhils/mitmproxy,mosajjal/mitmproxy,laurmurclar/mitmproxy,zlorb/mitmproxy,mitmproxy/mitmproxy,ujjwal96/mitmproxy,mhils/mitmproxy,xaxa89/mitmproxy,StevenVanAcker/mitmproxy,xaxa89/mitmproxy,mhils/mitmproxy,mosajjal/mitmproxy,ujjwal96/mitmproxy,vhaupert/mitmproxy,MatthewShao/mitmproxy,ujjwal96/mitmproxy,vhaupert/mitmproxy,cortesi/mitmproxy,mosajjal/mitmproxy,mitmproxy/mitmproxy,MatthewShao/mitmproxy,MatthewShao/mitmproxy,mitmproxy/mitmproxy,ddworken/mitmproxy,xaxa89/mitmproxy,cortesi/mitmproxy,zlorb/mitmproxy,Kriechi/mitmproxy,ujjwal96/mitmproxy,zlorb/mitmproxy,mitmproxy/mitmproxy,cortesi/mitmproxy,mitmproxy/mitmproxy,ddworken/mitmproxy,cortesi/mitmproxy,laurmurclar/mitmproxy,ddworken/mitmproxy,ddworken/mitmproxy,Kriechi/mitmproxy,StevenVanAcker/mitmproxy,mhils/mitmproxy,xaxa89/mitmproxy,vhaupert/mitmproxy,laurmurclar/mitmproxy,vhaupert/mitmproxy,StevenVanAcker/mitmproxy,laurmurclar/mitmproxy,Kriechi/mitmproxy | test/mitmproxy/console/test_flowlist.py | test/mitmproxy/console/test_flowlist.py | from unittest import mock
import mitmproxy.tools.console.flowlist as flowlist
from mitmproxy.tools import console
from mitmproxy import proxy
from mitmproxy import options
class TestFlowlist:
def mkmaster(self, **opts):
if "verbosity" not in opts:
opts["verbosity"] = 1
o = options.Options(**opts)
return console.master.ConsoleMaster(o, proxy.DummyServer())
def test_new_request(self):
m = self.mkmaster()
x = flowlist.FlowListBox(m)
with mock.patch('mitmproxy.tools.console.signals.status_message.send') as mock_thing:
x.new_request("nonexistent url", "GET")
mock_thing.assert_called_once_with(message="Invalid URL: No hostname given")
| import mitmproxy.tools.console.flowlist as flowlist
from mitmproxy.tools import console
from mitmproxy import proxy
from mitmproxy import options
from .. import tservers
from unittest import mock
class TestFlowlist(tservers.MasterTest):
def mkmaster(self, **opts):
if "verbosity" not in opts:
opts["verbosity"] = 1
o = options.Options(**opts)
return console.master.ConsoleMaster(o, proxy.DummyServer())
def test_new_request(self):
m = self.mkmaster()
x = flowlist.FlowListBox(m)
with mock.patch('mitmproxy.tools.console.signals.status_message.send') as mock_thing:
x.new_request("nonexistent url", "GET")
mock_thing.assert_called_once_with(message="Invalid URL: No hostname given")
| mit | Python |
47a3f587b2530f9c46b5b7db5df52fb3999b8f2d | Change Model | Javid-Izadfar/TaOonja,Javid-Izadfar/TaOonja,Javid-Izadfar/TaOonja | taOonja/game/models.py | taOonja/game/models.py | import os
from django.db import models
class Location(models.Model):
name = models.CharField(max_length=250)
local_name = models.CharField(max_length=250)
visited = models.BooleanField(default=False)
coordinates = models.CharField(max_length=250)
detail = models.CharField(max_length=500)
img = models.ImageField(upload_to = "media/", blank=True, null=True)
def __str__(self):
return self.name
| import os
from django.db import models
#def get_image_path(filename):
# return os.path.join('media')
class Location(models.Model):
name = models.CharField(max_length=250)
local_name = models.CharField(max_length=250)
visited = models.BooleanField(default=False)
def __str__(self):
return self.name
class Detail(models.Model):
coordinates = models.CharField(max_length=250)
detail = models.CharField(max_length=500)
img = models.ImageField(upload_to = "media/", blank=True, null=True)
location = models.OneToOneField(Location, on_delete=models.CASCADE, primary_key=True)
def __str__(self):
return self.detail
| mit | Python |
c7aaee482440608453ba9f6472f9600ff7b55653 | Add module documentation and extra functions | markshroyer/pointfree,markshroyer/pointfree | fpkit.py | fpkit.py | """
Functional programming toolkit for Python
Implements easy function composition and currying via operator overloads
and some trickery using decorators. This makes it possible to do things
like:
@curryable
def add(a, b):
return a + b
@curryable
def mult(a, b):
return a * b
# Function currying
x = add(3)(5)
f = mult(3)
y = f(5)
print (x, y) # prints (8, 15)
# Currying and forward composition with the >> operator
g = add(1) >> mult(9) >> add(6)
# Regular function composition with the * operator
h = printfn * g
h(3) # prints 4
This syntax also works with generators, so that you can set up generator
pipelines with the >> operator. See examples.py, distributed with this
module, for more examples.
https://github.com/markshroyer/fpkit
"""
__author__ = "Mark Shroyer"
__email__ = "code@markshroyer.com"
__version__ = 0.1
import inspect
class Comp:
"""@composable function decorator
Converts a regular Python function into one which can be composed with
other Python functions using the * and >> operators.
"""
def __init__(self, f):
self.f = f
def __mul__(self, g):
return self.__class__(lambda *a: self.f(g(*a)))
def __rshift__(self, g):
return self.__class__(lambda *a: g(self.f(*a)))
def __call__(self, *a):
return self.f(*a)
def compv(val):
"""Turn a non-callable value into a composable function
Makes a composable function that returns the given value when called.
"""
return Comp(lambda *a: val)
def curr(f):
"""@curryable function decorator
Converts a regular Python function into one supporting a form of
partial application. Supports positional arguments only. Functions
with this decorator are automatically composable.
"""
def thunk(f, n, acum):
if n <= 0:
return f(*acum)
else:
return Comp(lambda *a: thunk(f, n-len(a), acum+list(a)))
return Comp(thunk(f, len(inspect.getargspec(f)[0]), []))
# Verbose form for function decorators
composable = Comp
curryable = curr
@composable
def ignore(iterator):
for x in iterator: pass
@composable
def printf(output):
print output,
@composable
def printfn(output):
print output
| """
fpkit
Functional programming toolkit for Python.
"""
__author__ = "Mark Shroyer"
__email__ = "code@markshroyer.com"
__version__ = 0.1
import inspect
class Comp:
"""@composable function decorator
Converts a regular Python function into one which can be composed with
other Python functions using the * and >> operators.
"""
def __init__(self, f):
self.f = f
def __mul__(self, g):
return self.__class__(lambda *a: self.f(g(*a)))
def __rshift__(self, g):
return self.__class__(lambda *a: g(self.f(*a)))
def __call__(self, *a):
return self.f(*a)
def compval(val):
"""Turn a non-callable value into a composable function
Makes a composable function that returns the given value when called.
"""
return Comp(lambda *a: val)
def curr(f):
"""@curryable function decorator
Converts a regular Python function into one supporting a form of
partial application. Supports positional arguments only. Functions
with this decorator are automatically composable.
"""
def thunk(f, n, acum):
if n <= 0:
return f(*acum)
else:
return Comp(lambda *a: thunk(f, n-len(a), acum+list(a)))
return Comp(thunk(f, len(inspect.getargspec(f)[0]), []))
# Verbose form for function decorators
composable = Comp
curryable = curr
| apache-2.0 | Python |
5e5f3a27c8a9a4e657f1b4ab4fbeae87ff201c8b | allow edit ids for individuals in mme | macarthur-lab/seqr,ssadedin/seqr,ssadedin/seqr,macarthur-lab/xbrowse,ssadedin/seqr,macarthur-lab/seqr,ssadedin/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/seqr,macarthur-lab/seqr,macarthur-lab/xbrowse,macarthur-lab/xbrowse,ssadedin/seqr | seqr/utils/model_sync_utils.py | seqr/utils/model_sync_utils.py | from bs4 import BeautifulSoup
from seqr.models import Individual
from seqr.model_utils import find_matching_xbrowse_model
def convert_html_to_plain_text(html_string, remove_line_breaks=False):
"""Returns string after removing all HTML markup.
Args:
html_string (str): string with HTML markup
remove_line_breaks (bool): whether to also remove line breaks and extra white space from string
"""
if not html_string:
return ''
text = BeautifulSoup(html_string, "html.parser").get_text()
# remove empty lines as well leading and trailing space on non-empty lines
if remove_line_breaks:
text = ' '.join(line.strip() for line in text.splitlines() if line.strip())
return text
def can_edit_family_id(family):
_can_edit_entity_id(family.project, 'family_id')
def can_edit_individual_id(individual):
_can_edit_entity_id(individual.family.project, 'individual_id')
def _can_edit_entity_id(project, entity_id_key):
base_project = find_matching_xbrowse_model(project)
if base_project.vcffile_set.count() and not base_project.has_elasticsearch_index():
raise ValueError('Editing {} is disabled for projects which still use the mongo datastore'.format(entity_id_key))
| from bs4 import BeautifulSoup
from seqr.models import Individual
from seqr.model_utils import find_matching_xbrowse_model
def convert_html_to_plain_text(html_string, remove_line_breaks=False):
"""Returns string after removing all HTML markup.
Args:
html_string (str): string with HTML markup
remove_line_breaks (bool): whether to also remove line breaks and extra white space from string
"""
if not html_string:
return ''
text = BeautifulSoup(html_string, "html.parser").get_text()
# remove empty lines as well leading and trailing space on non-empty lines
if remove_line_breaks:
text = ' '.join(line.strip() for line in text.splitlines() if line.strip())
return text
def can_edit_family_id(family):
_can_edit_entity_id(family.project, 'family_id', family.family_id)
def can_edit_individual_id(individual):
_can_edit_entity_id(individual.family.project, 'individual_id', individual.individual_id)
def _can_edit_entity_id(project, entity_id_key, entity_id):
base_project = find_matching_xbrowse_model(project)
if base_project.vcffile_set.count() and not base_project.has_elasticsearch_index():
raise ValueError('Editing {} is disabled for projects which still use the mongo datastore'.format(entity_id_key))
if project.is_mme_enabled:
filter_key = 'family__family_id' if entity_id_key == 'family_id' else 'individual_id'
individual_filter = {'family__project': project, filter_key: entity_id}
if any(indiv for indiv in Individual.objects.filter(**individual_filter) if indiv.mme_submitted_date and not indiv.mme_deleted_date):
raise ValueError('Editing {} is disabled for {} because it has matchmaker submissions'.format(
entity_id_key, entity_id))
| agpl-3.0 | Python |
1cbe86ed9aa8123cf91fb06b0199a9ac797c6419 | fix building manual | rr-/dotfiles,rr-/dotfiles,rr-/dotfiles | cfg/mutt/install.py | cfg/mutt/install.py | from dotinstall import packages
from dotinstall import util
def run():
packages.try_install('neomutt')
packages.try_install('w3m')
packages.try_install('lynx')
packages.try_install('docbook-xsl')
util.create_file('~/.mutt/certificates')
util.create_dir('~/.mutt/cache/bodies')
util.create_dir('~/.mutt/cache/headers')
util.create_symlink('./file_email', '~/.mutt/')
util.create_symlink('./colors.muttrc', '~/.mutt/')
util.create_symlink('./muttrc', '~/.mutt/')
util.create_symlink('./mailcap', '~/.mailcap')
| from dotinstall import packages
from dotinstall import util
def run():
packages.try_install('neomutt')
packages.try_install('w3m')
util.create_file('~/.mutt/certificates')
util.create_dir('~/.mutt/cache/bodies')
util.create_dir('~/.mutt/cache/headers')
util.create_symlink('./file_email', '~/.mutt/')
util.create_symlink('./colors.muttrc', '~/.mutt/')
util.create_symlink('./muttrc', '~/.mutt/')
util.create_symlink('./mailcap', '~/.mailcap')
| mit | Python |
b6a60b0c62547b5b6f3f1d6e89d0f5bfb798fbb9 | make atlassian errors ignorable | uhjish/link | link/wrappers/__init__.py | link/wrappers/__init__.py | """
I don't exactly love that you have to do this. I will look for a new design
"""
from apiwrappers import *
from dbwrappers import *
from nosqlwrappers import *
from consolewrappers import *
from alexawrappers import *
from hivewrappers import *
from elasticsearchwrappers import *
from liverailwrappers import *
import logging
try:
from atlassianwrappers import *
except:
logging.warning("missing dependencies for atlassianwrappers, ignoring...")
pass
| """
I don't exactly love that you have to do this. I will look for a new design
"""
from apiwrappers import *
from dbwrappers import *
from nosqlwrappers import *
from consolewrappers import *
from atlassianwrappers import *
from alexawrappers import *
from hivewrappers import *
from elasticsearchwrappers import *
from liverailwrappers import *
| apache-2.0 | Python |
802d03af3f1fe1719ff0843690b51b3200c20044 | Remove unused import. | markstory/lint-review,markstory/lint-review,markstory/lint-review | lintreview/tools/black.py | lintreview/tools/black.py | from __future__ import absolute_import
import os
import logging
import lintreview.docker as docker
from lintreview.review import IssueComment
from lintreview.tools import Tool
log = logging.getLogger(__name__)
class Black(Tool):
name = 'black'
def check_dependencies(self):
"""See if the python3 image exists
"""
return docker.image_exists('python3')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext == '.py'
def process_files(self, files):
"""
Run code checks with pep8.
Only a single process is made for all files
to save resources.
"""
log.debug('Processing %s files with %s', files, self.name)
command = self.create_command()
command.append('--check')
command += files
output = docker.run('python3', command, source_dir=self.base_path)
if not output:
return False
output = output.split("\n")
effected_files = [
'* ' + docker.strip_base(line.replace('would reformat ', ''))
for line in output
if line.startswith('would reformat')
]
if len(effected_files):
msg = (
'The following files do not match the `black` styleguide:'
'\n\n'
)
msg += "\n".join(effected_files)
self.problems.add(IssueComment(msg))
def has_fixer(self):
"""
black has a fixer that can be enabled through configuration.
"""
return bool(self.options.get('fixer', False))
def process_fixer(self, files):
"""Run black, in fixer mode.
"""
command = self.create_fixer_command(files)
docker.run('python3', command, source_dir=self.base_path)
def create_command(self):
command = ['black']
if 'safe' in self.options:
command.append('--safe')
if 'config' in self.options:
command.extend(['--config',
docker.apply_base(self.options['config'])])
return command
def create_fixer_command(self, files):
command = self.create_command()
command += files
return command
| from __future__ import absolute_import
import os
import logging
import lintreview.docker as docker
from lintreview.review import IssueComment
from lintreview.tools import Tool, python_image
log = logging.getLogger(__name__)
class Black(Tool):
name = 'black'
def check_dependencies(self):
"""See if the python3 image exists
"""
return docker.image_exists('python3')
def match_file(self, filename):
base = os.path.basename(filename)
name, ext = os.path.splitext(base)
return ext == '.py'
def process_files(self, files):
"""
Run code checks with pep8.
Only a single process is made for all files
to save resources.
"""
log.debug('Processing %s files with %s', files, self.name)
command = self.create_command()
command.append('--check')
command += files
output = docker.run('python3', command, source_dir=self.base_path)
if not output:
return False
output = output.split("\n")
effected_files = [
'* ' + docker.strip_base(line.replace('would reformat ', ''))
for line in output
if line.startswith('would reformat')
]
if len(effected_files):
msg = (
'The following files do not match the `black` styleguide:'
'\n\n'
)
msg += "\n".join(effected_files)
self.problems.add(IssueComment(msg))
def has_fixer(self):
"""
black has a fixer that can be enabled through configuration.
"""
return bool(self.options.get('fixer', False))
def process_fixer(self, files):
"""Run black, in fixer mode.
"""
command = self.create_fixer_command(files)
docker.run('python3', command, source_dir=self.base_path)
def create_command(self):
command = ['black']
if 'safe' in self.options:
command.append('--safe')
if 'config' in self.options:
command.extend(['--config',
docker.apply_base(self.options['config'])])
return command
def create_fixer_command(self, files):
command = self.create_command()
command += files
return command
| mit | Python |
5a0114506d71f5c73f2e8eab572dd3922d7233e4 | Add a presubmit check so that no new unit tests in content/ are added to the unit_tests target | Jonekee/chromium.src,junmin-zhu/chromium-rivertrail,keishi/chromium,hgl888/chromium-crosswalk,anirudhSK/chromium,ondra-novak/chromium.src,jaruba/chromium.src,jaruba/chromium.src,dednal/chromium.src,keishi/chromium,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,Jonekee/chromium.src,jaruba/chromium.src,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,keishi/chromium,timopulkkinen/BubbleFish,ltilve/chromium,timopulkkinen/BubbleFish,hgl888/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,anirudhSK/chromium,timopulkkinen/BubbleFish,ChromiumWebApps/chromium,hujiajie/pa-chromium,bright-sparks/chromium-spacewalk,robclark/chromium,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,ondra-novak/chromium.src,hujiajie/pa-chromium,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,timopulkkinen/BubbleFish,Just-D/chromium-1,zcbenz/cefode-chromium,M4sse/chromium.src,Just-D/chromium-1,robclark/chromium,rogerwang/chromium,dushu1203/chromium.src,dushu1203/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,robclark/chromium,ondra-novak/chromium.src,dushu1203/chromium.src,Chilledheart/chromium,junmin-zhu/chromium-rivertrail,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,chuan9/chromium-crosswalk,patrickm/chromium.src,zcbenz/cefode-chromium,axinging/chromium-crosswalk,littlstar/chromium.src,rogerwang/chromium,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,krieger-od/nwjs_chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,zcbenz/cefode-chromium,Just-D/chromium-1,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,nacl-webkit/chrome_deps,robclark/chromium,hgl888/chromium-crosswalk,Chilledheart/chromium,anirudhSK/chromium,jaruba/chromium.src,robclark/chromium,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,littlstar/chromium.src,anirudhSK/chromium,dednal/chromium.src,chuan9/chromium-crosswalk,fujunwei/chromium-crosswalk,krieger-od/nwjs_chromium.src,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,ltilve/chromium,markYoungH/chromium.src,ChromiumWebApps/chromium,Just-D/chromium-1,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,zcbenz/cefode-chromium,dednal/chromium.src,hgl888/chromium-crosswalk-efl,jaruba/chromium.src,ChromiumWebApps/chromium,dednal/chromium.src,nacl-webkit/chrome_deps,keishi/chromium,dednal/chromium.src,ondra-novak/chromium.src,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,jaruba/chromium.src,ChromiumWebApps/chromium,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,Chilledheart/chromium,M4sse/chromium.src,bright-sparks/chromium-spacewalk,mogoweb/chromium-crosswalk,dushu1203/chromium.src,chuan9/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,robclark/chromium,fujunwei/chromium-crosswalk,anirudhSK/chromium,keishi/chromium,hgl888/chromium-crosswalk-efl,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,littlstar/chromium.src,rogerwang/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,mogoweb/chromium-crosswalk,markYoungH/chromium.src,patrickm/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,markYoungH/chromium.src,Jonekee/chromium.src,Fireblend/chromium-crosswalk,robclark/chromium,jaruba/chromium.src,dushu1203/chromium.src,dednal/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,patrickm/chromium.src,mogoweb/chromium-crosswalk,hujiajie/pa-chromium,patrickm/chromium.src,Jonekee/chromium.src,ltilve/chromium,Jonekee/chromium.src,krieger-od/nwjs_chromium.src,timopulkkinen/BubbleFish,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,markYoungH/chromium.src,jaruba/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,axinging/chromium-crosswalk,zcbenz/cefode-chromium,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,hujiajie/pa-chromium,ondra-novak/chromium.src,krieger-od/nwjs_chromium.src,patrickm/chromium.src,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,axinging/chromium-crosswalk,Just-D/chromium-1,hujiajie/pa-chromium,ChromiumWebApps/chromium,rogerwang/chromium,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,ltilve/chromium,mogoweb/chromium-crosswalk,Jonekee/chromium.src,rogerwang/chromium,ltilve/chromium,M4sse/chromium.src,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,nacl-webkit/chrome_deps,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,rogerwang/chromium,Fireblend/chromium-crosswalk,Chilledheart/chromium,Fireblend/chromium-crosswalk,dednal/chromium.src,Pluto-tv/chromium-crosswalk,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,Chilledheart/chromium,bright-sparks/chromium-spacewalk,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,zcbenz/cefode-chromium,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,littlstar/chromium.src,mogoweb/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,keishi/chromium,PeterWangIntel/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dushu1203/chromium.src,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,M4sse/chromium.src,rogerwang/chromium,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,axinging/chromium-crosswalk,patrickm/chromium.src,hgl888/chromium-crosswalk,ltilve/chromium,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,krieger-od/nwjs_chromium.src,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,M4sse/chromium.src,M4sse/chromium.src,nacl-webkit/chrome_deps,nacl-webkit/chrome_deps,Fireblend/chromium-crosswalk,timopulkkinen/BubbleFish,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,hujiajie/pa-chromium,anirudhSK/chromium,zcbenz/cefode-chromium,nacl-webkit/chrome_deps,fujunwei/chromium-crosswalk,hujiajie/pa-chromium,timopulkkinen/BubbleFish,Fireblend/chromium-crosswalk,ChromiumWebApps/chromium,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,robclark/chromium,hgl888/chromium-crosswalk,ondra-novak/chromium.src,dednal/chromium.src,crosswalk-project/chromium-crosswalk-efl,ChromiumWebApps/chromium,keishi/chromium,dednal/chromium.src,axinging/chromium-crosswalk,anirudhSK/chromium,dushu1203/chromium.src,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,rogerwang/chromium,bright-sparks/chromium-spacewalk,jaruba/chromium.src,timopulkkinen/BubbleFish,markYoungH/chromium.src,dednal/chromium.src,M4sse/chromium.src,hgl888/chromium-crosswalk-efl,junmin-zhu/chromium-rivertrail,mohamed--abdel-maksoud/chromium.src,jaruba/chromium.src,nacl-webkit/chrome_deps,dushu1203/chromium.src,timopulkkinen/BubbleFish,dushu1203/chromium.src,M4sse/chromium.src,markYoungH/chromium.src,crosswalk-project/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,krieger-od/nwjs_chromium.src,junmin-zhu/chromium-rivertrail,junmin-zhu/chromium-rivertrail,Pluto-tv/chromium-crosswalk,keishi/chromium,bright-sparks/chromium-spacewalk,junmin-zhu/chromium-rivertrail,Chilledheart/chromium,M4sse/chromium.src,jaruba/chromium.src,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,ltilve/chromium,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,zcbenz/cefode-chromium,anirudhSK/chromium,keishi/chromium,nacl-webkit/chrome_deps,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,rogerwang/chromium,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,ChromiumWebApps/chromium,crosswalk-project/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,hgl888/chromium-crosswalk,ltilve/chromium,robclark/chromium,zcbenz/cefode-chromium,Pluto-tv/chromium-crosswalk,junmin-zhu/chromium-rivertrail,timopulkkinen/BubbleFish,nacl-webkit/chrome_deps,robclark/chromium,keishi/chromium,ChromiumWebApps/chromium,axinging/chromium-crosswalk,rogerwang/chromium,PeterWangIntel/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,zcbenz/cefode-chromium,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,dednal/chromium.src,hujiajie/pa-chromium,keishi/chromium,Jonekee/chromium.src | chrome/PRESUBMIT.py | chrome/PRESUBMIT.py | # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for changes affecting chrome/
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into gcl.
"""
import re
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Objective C confuses everything.
r'.*cocoa.*',
r'.*_mac\.(cc|h)$',
r'.*_mac_.*',
# All the messages files do weird multiple include trickery
r'.*_messages_internal\.h$',
r'render_messages.h$',
# Autogenerated window resources files are off limits
r'.*resource.h$',
# GTK macros in C-ish header code cause false positives
r'gtk_.*\.h$',
# Header trickery
r'.*-inl\.h$',
# Templates
r'sigslotrepeater\.h$',
# GCC attribute trickery
r'sel_main\.cc$',
# Mozilla code
r'mork_reader\.h$',
r'mork_reader\.cc$',
r'nss_decryptor_linux\.cc$',
# Has safe printf usage that cpplint complains about
r'safe_browsing_util\.cc$',
# Too much math on one line?
r'bloom_filter\.cc$',
# Bogus ifdef tricks
r'renderer_webkitplatformsupport_impl\.cc$',
# Lines > 100 chars
r'gcapi\.cc$',
)
def _CheckChangeLintsClean(input_api, output_api):
"""Makes sure that the chrome/ code is cpplint clean."""
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
return input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources)
def _CheckNoContentUnitTestsInChrome(input_api, output_api):
"""Makes sure that no unit tests from content/ are included in unit_tests."""
problems = []
for f in input_api.AffectedFiles():
if not f.LocalPath().endswith('chrome_tests.gypi'):
continue
for line_num, line in f.ChangedContents():
m = re.search(r"'(.*\/content\/.*unittest.*)'", line)
if m:
problems.append(m.group(1))
if not problems:
return []
return [output_api.PresubmitPromptWarning(
'Unit tests located in content/ should be added to the ' +
'content_tests.gypi:content_unittests target.',
items=problems)]
def _CommonChecks(input_api, output_api):
"""Checks common to both upload and commit."""
results = []
results.extend(_CheckNoContentUnitTestsInChrome(input_api, output_api))
return results
def CheckChangeOnUpload(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
results.extend(_CheckChangeLintsClean(input_api, output_api))
return results
def CheckChangeOnCommit(input_api, output_api):
results = []
results.extend(_CommonChecks(input_api, output_api))
return results
| # Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Makes sure that the chrome/ code is cpplint clean."""
INCLUDE_CPP_FILES_ONLY = (
r'.*\.cc$', r'.*\.h$'
)
EXCLUDE = (
# Objective C confuses everything.
r'.*cocoa.*',
r'.*_mac\.(cc|h)$',
r'.*_mac_.*',
# All the messages files do weird multiple include trickery
r'.*_messages_internal\.h$',
r'render_messages.h$',
# Autogenerated window resources files are off limits
r'.*resource.h$',
# GTK macros in C-ish header code cause false positives
r'gtk_.*\.h$',
# Header trickery
r'.*-inl\.h$',
# Templates
r'sigslotrepeater\.h$',
# GCC attribute trickery
r'sel_main\.cc$',
# Mozilla code
r'mork_reader\.h$',
r'mork_reader\.cc$',
r'nss_decryptor_linux\.cc$',
# Has safe printf usage that cpplint complains about
r'safe_browsing_util\.cc$',
# Too much math on one line?
r'bloom_filter\.cc$',
# Bogus ifdef tricks
r'renderer_webkitplatformsupport_impl\.cc$',
# Lines > 100 chars
r'gcapi\.cc$',
)
def CheckChangeOnUpload(input_api, output_api):
results = []
black_list = input_api.DEFAULT_BLACK_LIST + EXCLUDE
sources = lambda x: input_api.FilterSourceFile(
x, white_list=INCLUDE_CPP_FILES_ONLY, black_list=black_list)
results.extend(input_api.canned_checks.CheckChangeLintsClean(
input_api, output_api, sources))
return results
| bsd-3-clause | Python |
3b6a0a7c97d164b4270b8f247fa68b8b5d6ce1dd | fix reply | gonicus/gosa,gonicus/gosa,gonicus/gosa,gonicus/gosa | backend/src/gosa/backend/routes/system.py | backend/src/gosa/backend/routes/system.py | from gosa.common.hsts_request_handler import HSTSRequestHandler
class State:
system_state = "initializing"
class SystemStateReporter(HSTSRequestHandler):
"""
Return the current system state
"""
_xsrf = None
# disable xsrf feature
def check_xsrf_cookie(self):
pass
def get(self):
self.write(State.system_state)
| from gosa.common.hsts_request_handler import HSTSRequestHandler
class State:
system_state = "initializing"
class SystemStateReporter(HSTSRequestHandler):
"""
Return the current system state
"""
_xsrf = None
# disable xsrf feature
def check_xsrf_cookie(self):
pass
def get(self, path):
return State.system_state
| lgpl-2.1 | Python |
664ed1cf663010d36b979892d8d37e94a9a3ca98 | Add a title to the graph with the date in it. | peterbmarks/froniusLogger,peterbmarks/froniusLogger | graph.py | graph.py | """
Simple code to draw a graph of a day of power.
Requires matplotlib
On Fedora Linux: sudo dnf install python3-matplotlib
Usage: python3 graph.py [csv file name]
If you don't give the file name it will use today's
"""
import csv
import time
import datetime
import sys
import os
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
def main():
date_base = time.strftime("%Y_%m_%d")
if len(sys.argv) > 1:
date_base = os.path.splitext(sys.argv[1])[0]
in_file_name = date_base + ".csv"
data_reader = csv.reader(open(in_file_name,'r'), delimiter='\t')
y = []
x = []
max_watts = 0.0
for row in data_reader:
if len(row) > 1:
timeval = row[0]
dt = datetime.datetime.strptime(timeval, "%H:%M:%S")
x.append(mdates.date2num(dt))
watts = float(row[1])
y.append(watts)
if watts > max_watts:
max_watts = watts
fig, ax = plt.subplots()
ax.plot(x,y)
ax.xaxis_date()
my_fmt = mdates.DateFormatter('%H:%M')
ax.xaxis.set_major_formatter(my_fmt)
title_date = time.strftime("%d-%b-%Y")
plt.title("Solar generation on %s" % title_date)
plt.ylabel("Watts")
plt.xlabel("Time")
text_x = datetime.datetime.strptime("06:00", "%H:%M")
text_y = max_watts
plt.text(text_x, text_y, "Max: %dW" % max_watts)
plt.show()
print("writing: %s" % date_base)
plt.savefig('%s_graph' % date_base)
if __name__ == "__main__":
main()
| """
Simple code to draw a graph of a day of power.
Requires matplotlib
On Fedora Linux: sudo dnf install python3-matplotlib
Usage: python3 graph.py [csv file name]
If you don't give the file name it will use today's
"""
import csv
import time
import datetime
import sys
import os
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
def main():
date_base = time.strftime("%Y_%m_%d")
if len(sys.argv) > 1:
date_base = os.path.splitext(sys.argv[1])[0]
in_file_name = date_base + ".csv"
data_reader = csv.reader(open(in_file_name,'r'), delimiter='\t')
y = []
x = []
max_watts = 0.0
for row in data_reader:
if len(row) > 1:
timeval = row[0]
dt = datetime.datetime.strptime(timeval, "%H:%M:%S")
x.append(mdates.date2num(dt))
watts = float(row[1])
y.append(watts)
if watts > max_watts:
max_watts = watts
fig, ax = plt.subplots()
ax.plot(x,y)
ax.xaxis_date()
my_fmt = mdates.DateFormatter('%H:%M')
ax.xaxis.set_major_formatter(my_fmt)
plt.ylabel("Watts")
plt.xlabel("Time")
text_x = datetime.datetime.strptime("06:00", "%H:%M")
text_y = max_watts
plt.text(text_x, text_y, "Max: %dW" % max_watts)
plt.show()
print("writing: %s" % date_base)
plt.savefig('%s_graph' % date_base)
if __name__ == "__main__":
main()
| apache-2.0 | Python |
17c5585dcc4dae8fcec96cced8a765804bf1cf13 | update google.py | ceaniket/lyricsar | lyricsar/plugin/google.py | lyricsar/plugin/google.py |
searchpage=urllib2.urlopen(request)
print searchpage
lyrcweb=gogle.get_link(searchpage)
print lyrcweb
|
import string
import sys
import urllib2
import simplejson
import httplib
from bs4 import BeautifulSoup
from setting import GOOGLE_API_URL
from lyricsar.errorlist import errorlist
class google:
response=""
""" this plugin provide link by traverse results of google.com with extra keyword"""
def __init__(self,title):
self.search_url=GOOGLE_API_URL
self.title=title
def get_search(self):
"""get_search function make a url from title and url"""
string.replace(self.title," ","%20")
url=self.search_url+self.title
return str(url)
def get_link(self,searchpage):
"""get_link fetch first link from google results """
try:
results = simplejson.load(searchpage)
print results
return results
except:
sys.exit(errorlist['0e02'])
#================================ tesing =======================================
if __name__ =="__main__":
gogle=google("meherbaan+lyrics")
url=gogle.get_search()
request=urllib2.Request(url,None,{'Referer':'www.lyricsmint.com'})
print url
proxy = urllib2.ProxyHandler({'http': 'http://mh514uvamp1-16:1434@10.0.0.5:8080'})
auth = urllib2.HTTPBasicAuthHandler()
opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler)
urllib2.install_opener(opener)
searchpage=urllib2.urlopen(request)
print searchpage
lyrcweb=gogle.get_link(searchpage)
print lyrcweb
| mit | Python |
0ea683383edefc79fbeb54e97ff42c7acd391561 | add users model | ez4u/ijust_backend,ez4u/ijust_backend | project/models/user.py | project/models/user.py | # -*- coding: utf-8 -*-
# python imports
from passlib.apps import custom_app_context as pwd_context
# project imports
from project.extensions import db
class User(db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(32), unique=True)
email = db.Column(db.String(32), unique=True)
password = db.Column(db.String(128))
firstname = db.Column(db.String(32))
lastname = db.Column(db.String(32))
def hash_password(self, password):
password = password.encode('utf-8')
self.password = pwd_context.encrypt(password)
def verify_password(self, password):
password = password.encode('utf-8')
return pwd_context.verify(password, self.password)
def to_json(self):
return dict(email=self.email,
firstname=self.firstname,
lastname=self.lastname)
| # -*- coding: utf-8 -*-
| apache-2.0 | Python |
bccb40657174e2b02da96712b053ca81f143ba12 | Update version to 9.2.27.dev0 [ci skip] | angr/claripy | claripy/__init__.py | claripy/__init__.py | #!/usr/bin/env python
# pylint: disable=F0401,W0401,W0603,
__version__ = "9.2.27.dev0"
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import os
import sys
import socket
import logging
l = logging.getLogger("claripy")
l.addHandler(logging.NullHandler())
from .errors import *
from . import operations
from . import ops as _all_operations
# This is here for later, because we'll fuck the namespace in a few lines
from . import backends as _backends_module
from .backends import Backend
from .backend_object import BackendObject
#
# backend objects
#
from . import bv
from . import fp
from . import vsa
from .fp import FSORT_DOUBLE, FSORT_FLOAT
from .annotation import *
#
# Operations
#
from .ast.base import *
from .ast.bv import *
from .ast.fp import *
from .ast.bool import *
from .ast.strings import *
from . import ast
del BV
del Bool
del FP
del Base
ast._import()
def BV(name, size, explicit_name=None): #pylint:disable=function-redefined
l.critical("DEPRECATION WARNING: claripy.BV is deprecated and will soon be removed. Please use claripy.BVS, instead.")
print("DEPRECATION WARNING: claripy.BV is deprecated and will soon be removed. Please use claripy.BVS, instead.")
return BVS(name, size, explicit_name=explicit_name)
#
# Initialize the backends
#
from . import backend_manager as _backend_manager
_backend_manager.backends._register_backend(_backends_module.BackendConcrete(), 'concrete', True, True)
_backend_manager.backends._register_backend(_backends_module.BackendVSA(), 'vsa', False, False)
if not os.environ.get('WORKER', False) and os.environ.get('REMOTE', False):
try:
_backend_z3 = _backends_module.backendremote.BackendRemote()
except socket.error:
raise ImportError("can't connect to backend")
else:
_backend_z3 = _backends_module.BackendZ3()
_backend_manager.backends._register_backend(_backend_z3, 'z3', False, False)
backends = _backend_manager.backends
def downsize():
"""
Clear all temporary data associated with any backend
"""
backends.downsize()
#
# Frontends
#
from .frontend import Frontend as _Frontend
from . import frontends
from . import frontend_mixins
from .solvers import *
#
# Convenient button
#
def reset():
"""
Attempt to refresh any caching state associated with the module
"""
downsize()
from .ast import bv # pylint:disable=redefined-outer-name
bv._bvv_cache.clear()
from .debug import set_debug
| #!/usr/bin/env python
# pylint: disable=F0401,W0401,W0603,
__version__ = "9.2.26.dev0"
if bytes is str:
raise Exception("This module is designed for python 3 only. Please install an older version to use python 2.")
import os
import sys
import socket
import logging
l = logging.getLogger("claripy")
l.addHandler(logging.NullHandler())
from .errors import *
from . import operations
from . import ops as _all_operations
# This is here for later, because we'll fuck the namespace in a few lines
from . import backends as _backends_module
from .backends import Backend
from .backend_object import BackendObject
#
# backend objects
#
from . import bv
from . import fp
from . import vsa
from .fp import FSORT_DOUBLE, FSORT_FLOAT
from .annotation import *
#
# Operations
#
from .ast.base import *
from .ast.bv import *
from .ast.fp import *
from .ast.bool import *
from .ast.strings import *
from . import ast
del BV
del Bool
del FP
del Base
ast._import()
def BV(name, size, explicit_name=None): #pylint:disable=function-redefined
l.critical("DEPRECATION WARNING: claripy.BV is deprecated and will soon be removed. Please use claripy.BVS, instead.")
print("DEPRECATION WARNING: claripy.BV is deprecated and will soon be removed. Please use claripy.BVS, instead.")
return BVS(name, size, explicit_name=explicit_name)
#
# Initialize the backends
#
from . import backend_manager as _backend_manager
_backend_manager.backends._register_backend(_backends_module.BackendConcrete(), 'concrete', True, True)
_backend_manager.backends._register_backend(_backends_module.BackendVSA(), 'vsa', False, False)
if not os.environ.get('WORKER', False) and os.environ.get('REMOTE', False):
try:
_backend_z3 = _backends_module.backendremote.BackendRemote()
except socket.error:
raise ImportError("can't connect to backend")
else:
_backend_z3 = _backends_module.BackendZ3()
_backend_manager.backends._register_backend(_backend_z3, 'z3', False, False)
backends = _backend_manager.backends
def downsize():
"""
Clear all temporary data associated with any backend
"""
backends.downsize()
#
# Frontends
#
from .frontend import Frontend as _Frontend
from . import frontends
from . import frontend_mixins
from .solvers import *
#
# Convenient button
#
def reset():
"""
Attempt to refresh any caching state associated with the module
"""
downsize()
from .ast import bv # pylint:disable=redefined-outer-name
bv._bvv_cache.clear()
from .debug import set_debug
| bsd-2-clause | Python |
ce86e48c2d996af9d747927d6f1206569d604500 | use view to provide active confs | dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq | corehq/apps/userreports/data_source_providers.py | corehq/apps/userreports/data_source_providers.py | from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from corehq.apps.userreports.models import DataSourceConfiguration, StaticDataSourceConfiguration
import six
class DataSourceProvider(six.with_metaclass(ABCMeta, object)):
@abstractmethod
def get_data_sources(self):
pass
class DynamicDataSourceProvider(DataSourceProvider):
def get_data_sources(self):
return [DataSourceConfiguration.wrap(r['doc'])
for r in
DataSourceConfiguration.get_db().view('userreports/active_data_sources',
reduce=False, include_docs=True)]
class StaticDataSourceProvider(DataSourceProvider):
def get_data_sources(self):
return StaticDataSourceConfiguration.all()
class MockDataSourceProvider(DataSourceProvider):
# for testing only
def get_data_sources(self):
return []
| from __future__ import absolute_import
from abc import ABCMeta, abstractmethod
from corehq.apps.userreports.models import DataSourceConfiguration, StaticDataSourceConfiguration
import six
class DataSourceProvider(six.with_metaclass(ABCMeta, object)):
@abstractmethod
def get_data_sources(self):
pass
class DynamicDataSourceProvider(DataSourceProvider):
def get_data_sources(self):
return [config for config in DataSourceConfiguration.all() if not config.is_deactivated]
class StaticDataSourceProvider(DataSourceProvider):
def get_data_sources(self):
return StaticDataSourceConfiguration.all()
class MockDataSourceProvider(DataSourceProvider):
# for testing only
def get_data_sources(self):
return []
| bsd-3-clause | Python |
720f0926249f8f9e1d255a6d206e3f5a42bd17a4 | Connect straight to the IP | remedi/pixel-knights,remedi/pixel-knights,remedi/pixel-knights | match_making/mm_server.py | match_making/mm_server.py | #!/usr/bin/env python
import socket
#Create which contains all the servers and maps they are running. This string is then send to clients.
def createServerList(serverList):
ser_string = ""
for server in serverList:
ser_string = ser_string + server[0] + " " + server[1] + "\200"
return ser_string
def getOwnAddr():
#Connect to google server
try:
ownAddr = socket.create_connection(('8.8.8.8', 53), 5)
#Retrieve own IP
my_IP = ownAddr.getsockname()[0]
ownAddr.close()
print "Retrieved own IP: ", my_IP
except socket.timeout:
print "No connection, creating localserver"
my_IP = 'localhost'
return my_IP
def main():
my_IP = getOwnAddr()
port = 3500
clientSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#All servers that have announced themselves are added to this list:
serverList = []
#Look for open port:
while True:
try:
clientSocket.bind((my_IP, port))
break
except socket.error:
port += 1
print "Listening for connection on: ", clientSocket.getsockname()
#print "Redirection message: ", serverAddr
clientSocket.listen(10)
while 1:
(client, addr) = clientSocket.accept()
recv = client.recv(1024)
if recv[0] == 'H':
print "Client connected from: ", addr
print "Responding with serverlist of %d servers" % len(serverList)
sendMe = createServerList(serverList)
sendMe = "L%x " % (len(serverList)) + sendMe
#sendMe = "L{:02X} ".format(len(serverList)) + sendMe
client.send(sendMe);
elif recv[0] == 'S':
print "Map server connected: ", addr
client.send("O");
# Close this connection as soon as possible:
client.close()
addrString = addr[0] + " " + str(addr[1])
serverList.append([addrString, chr(ord(recv[1])+48)])
print "Added: ", serverList[-1]
print "Serverlist is now %d long" % len(serverList)
else:
print "Got unexpected message from client: %s msg: %s" % (addr, recv)
client.close()
if __name__ == "__main__":
main()
| #!/usr/bin/env python
import socket
#Create which contains all the servers and maps they are running. This string is then send to clients.
def createServerList(serverList):
ser_string = ""
for server in serverList:
ser_string = ser_string + server[0] + " " + server[1] + "\200"
return ser_string
def getOwnAddr():
#We're creating connection to google server, to find out our own IP
googleAddr = socket.getaddrinfo('8.8.8.8', 53)
#googleAddr = socket.getaddrinfo('www.google.fi', 80)
#Choose last tuple from the received list of addresses to get an IPv4 connection
googleAddr = googleAddr[-1][-1]
#print "Connecting to google server:", googleAddr
#Connect to google server
try:
ownAddr = socket.create_connection(googleAddr, 5)
#Retrieve own IP
my_IP = ownAddr.getsockname()[0]
ownAddr.close()
print "Retrieved own IP: ", my_IP
except socket.timeout:
print "No connection, creating localserver"
my_IP = 'localhost'
return my_IP
def main():
my_IP = getOwnAddr()
port = 3500
#serverAddr = "M4 127.0.0.1 4375"
clientSocket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#All servers that have announced themselves are added to this list:
serverList = []
#Look for open port:
while True:
try:
clientSocket.bind((my_IP, port))
break
except socket.error:
port += 1
print "Listening for connection on: ", clientSocket.getsockname()
#print "Redirection message: ", serverAddr
clientSocket.listen(10)
while 1:
(client, addr) = clientSocket.accept()
recv = client.recv(1024)
if recv[0] == 'H':
print "Client connected from: ", addr
print "Responding with serverlist of %d servers" % len(serverList)
sendMe = createServerList(serverList)
sendMe = "L%x " % (len(serverList)) + sendMe
#sendMe = "L{:02X} ".format(len(serverList)) + sendMe
client.send(sendMe);
elif recv[0] == 'S':
print "Map server connected: ", addr
client.send("O");
# Close this connection as soon as possible:
client.close()
addrString = addr[0] + " " + str(addr[1])
serverList.append([addrString, chr(ord(recv[1])+48)])
print "Added: ", serverList[-1]
print "Serverlist is now %d long" % len(serverList)
else:
print "Got unexpected message from client: %s msg: %s" % (addr, recv)
client.close()
if __name__ == "__main__":
main()
| mit | Python |
2695690b345f9835f990a631ccf0797956653904 | Bump version to 6.1.4a1 | platformio/platformio-core,platformio/platformio-core | platformio/__init__.py | platformio/__init__.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 1, "4a1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "contact@piolabs.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.2",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (6, 1, 3)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO Labs"
__email__ = "contact@piolabs.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO Labs"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_mirror_hosts__ = [
"registry.platformio.org",
"registry.nm1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.4.2",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-scons": "~4.40300.0",
"tool-cppcheck": "~1.270.0",
"tool-clangtidy": "~1.120001.0",
"tool-pvs-studio": "~7.18.0",
}
__check_internet_hosts__ = [
"185.199.110.153", # Github.com
"88.198.170.159", # platformio.org
"github.com",
] + __registry_mirror_hosts__
| apache-2.0 | Python |
96957203434f5e27f0f1bf0895371d380c3cf12a | Bump version to 5.0.1a1 | platformio/platformio,platformio/platformio-core,platformio/platformio-core | platformio/__init__.py | platformio/__init__.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (5, 0, "1a1")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_api__ = [
"https://api.registry.platformio.org",
"https://api.registry.ns1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.3.0",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-unity": "~1.20500.0",
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
"tool-cppcheck": "~1.210.0",
"tool-clangtidy": "~1.100000.0",
"tool-pvs-studio": "~7.9.0",
}
__check_internet_hosts__ = [
"140.82.118.3", # Github.com
"35.231.145.151", # Gitlab.com
"88.198.170.159", # platformio.org
"github.com",
"platformio.org",
]
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (5, 0, 0)
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = (
"A professional collaborative platform for embedded development. "
"Cross-platform IDE and Unified Debugger. "
"Static Code Analyzer and Remote Unit Testing. "
"Multi-platform and Multi-architecture Build System. "
"Firmware File Explorer and Memory Inspection. "
"IoT, Arduino, CMSIS, ESP-IDF, FreeRTOS, libOpenCM3, mbedOS, Pulp OS, SPL, "
"STM32Cube, Zephyr RTOS, ARM, AVR, Espressif (ESP8266/ESP32), FPGA, "
"MCS-51 (8051), MSP430, Nordic (nRF51/nRF52), NXP i.MX RT, PIC32, RISC-V, "
"STMicroelectronics (STM8/STM32), Teensy"
)
__url__ = "https://platformio.org"
__author__ = "PlatformIO"
__email__ = "contact@platformio.org"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__accounts_api__ = "https://api.accounts.platformio.org"
__registry_api__ = [
"https://api.registry.platformio.org",
"https://api.registry.ns1.platformio.org",
]
__pioremote_endpoint__ = "ssl:host=remote.platformio.org:port=4413"
__default_requests_timeout__ = (10, None) # (connect, read)
__core_packages__ = {
"contrib-piohome": "~3.3.0",
"contrib-pysite": "~2.%d%d.0" % (sys.version_info.major, sys.version_info.minor),
"tool-unity": "~1.20500.0",
"tool-scons": "~2.20501.7" if sys.version_info.major == 2 else "~4.40001.0",
"tool-cppcheck": "~1.210.0",
"tool-clangtidy": "~1.100000.0",
"tool-pvs-studio": "~7.9.0",
}
__check_internet_hosts__ = [
"140.82.118.3", # Github.com
"35.231.145.151", # Gitlab.com
"88.198.170.159", # platformio.org
"github.com",
"platformio.org",
]
| apache-2.0 | Python |
d38aed748d6fa68ae5f3418f4dcfdb6ea6f3930d | Update uniprot_proteome_table.py | malvikasharan/APRICOT,malvikasharan/APRICOT | apricotlib/uniprot_proteome_table.py | apricotlib/uniprot_proteome_table.py | #!/usr/bin/env python
import sys
try:
import urllib.request
from urllib.request import urlopen
except ImportError:
print('Python package urllib is missing. Please install/update.\n')
sys.exit(0)
__description__ = "Downloads the complete proteome data from UniProt."
__author__ = "Malvika Sharan <malvika.sharan@uni-wuerzburg.de>"
__email__ = "malvika.sharan@uni-wuerzburg.de"
def format_uniprot_table(proteome_table, uniprot_link):
'''Downloads protein information
table from UniProt database for
the selected taxonomy id'''
try:
response = urlopen(uniprot_link)
for entry in str(response.read()).split('\\n'):
if not entry == "'" and not entry == '"':
if not entry.startswith("b'Entry") and not entry.startswith('b"Entry'):
proteome_table.write("%s\n" % '\t'.join(list(entry.split('\\t'))))
print('"\nDownloaded protein information using UniProt link: %s\n"' % (
uniprot_link))
except:
print(
"UniProt entry is apparently deleted, please check: %s"
% uniprot_link)
if __name__ == '__main__':
proteome_table = sys.argv[1]
uniprot_query = sys.argv[2]
if "http://www.uniprot.org/" in uniprot_link:
format_uniprot_table(proteome_table, uniprot_query)
else:
try:
format_uniprot_table(proteome_table, uniprot_query)
except KeyError:
print("Please provide query UniProt ids or a taxonomy id for UniProt table retrieval.")
| #!/usr/bin/env python
import sys
try:
import urllib.request
from urllib.request import urlopen
except ImportError:
print('Python package urllib is missing. Please install/update.'
'Please ignore if you are only testing the example provided by APRICOT')
sys.exit(0)
__description__ = "Downloads the complete proteome data from UniProt."
__author__ = "Malvika Sharan <malvika.sharan@uni-wuerzburg.de>"
__email__ = "malvika.sharan@uni-wuerzburg.de"
def format_uniprot_table(proteome_table, uniprot_link):
'''Downloads protein information
table from UniProt database for
the selected taxonomy id'''
try:
response = urlopen(uniprot_link)
for entry in str(response.read()).split('\\n'):
if not entry == "'" and not entry == '"':
if not entry.startswith("b'Entry") and not entry.startswith('b"Entry'):
proteome_table.write("%s\n" % '\t'.join(list(entry.split('\\t'))))
print('"\nDownloaded protein information using UniProt link: %s\n"' % (
uniprot_link))
except:
print(
"UniProt entry is apparently deleted, please check: %s"
% uniprot_link)
if __name__ == '__main__':
proteome_table = sys.argv[1]
uniprot_query = sys.argv[2]
if "http://www.uniprot.org/" in uniprot_link:
format_uniprot_table(proteome_table, uniprot_query)
else:
try:
format_uniprot_table(proteome_table, uniprot_query)
except KeyError:
print("Please provide query UniProt ids or a taxonomy id for UniProt table retrieval.")
| isc | Python |
6ea3662df84c3c204fd6c023d413883f59bb3109 | tag 1.0.2 | tony/tmuxp | tmuxp/__about__.py | tmuxp/__about__.py | __title__ = 'tmuxp'
__package_name__ = 'tmuxp'
__version__ = '1.0.2'
__description__ = 'tmux session manager'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2016 Tony Narlock'
| __title__ = 'tmuxp'
__package_name__ = 'tmuxp'
__version__ = '1.0.1'
__description__ = 'tmux session manager'
__email__ = 'tony@git-pull.com'
__author__ = 'Tony Narlock'
__license__ = 'BSD'
__copyright__ = 'Copyright 2013-2016 Tony Narlock'
| bsd-3-clause | Python |
29ad11103d922ca1866bd0f1ef66b321c366718a | delete node if exists test works | miracode/data-structures | simple_graph.py | simple_graph.py |
class Node(object):
def __init__(self, value=None):
self.value = value
class Edge(object):
def __init__(self, n1=None, n2=None):
self.n1 = n1
self.n2 = n2
class Graph(object):
def __init__(self):
self.nodes_list = []
self.edges_list = []
def nodes(self):
"""Return list of all nodes in graph"""
return self.nodes_list
def edges(self):
"""Return list of all edges in graph"""
return self.edges_list
def add_node(self, node):
"""Add new node to the graph"""
self.nodes_list.append(node)
def add_edge(self, n1, n2):
"""Add new edge to graph"""
new_edge = Edge(n1, n2)
self.edges_list.append(new_edge)
# If new nodes do not exists in list of nodes, add them
if n1 not in self.nodes():
self.add_node(n1)
if n2 not in self.nodes():
self.add_node(n2)
def del_node(self, n):
"""Delete specified node from the graph"""
# Determine if node exists in edges & remove
for edge in self.edges_list:
if n == edge.n1 or n == edge.n2:
self.edges_list.remove(edge)
# If node exists in list of nodes, remove. Else raise error
node_in_graph = False
for node in self.nodes_list:
if n == node:
self.nodes_list.remove(n)
node_in_graph = True
if not node_in_graph:
raise IndexError("Node does not exist in graph.")
|
class Node(object):
def __init__(self, value=None):
self.value = value
class Edge(object):
def __init__(self, n1=None, n2=None):
self.n1 = n1
self.n2 = n2
class Graph(object):
def __init__(self):
self.nodes_list = []
self.edges_list = []
def nodes(self):
"""Return list of all nodes in graph"""
return self.nodes_list
def edges(self):
"""Return list of all edges in graph"""
return self.edges_list
def add_node(self, node):
"""Add new node to the graph"""
self.nodes_list.append(node)
def add_edge(self, n1, n2):
"""Add new edge to graph"""
new_edge = Edge(n1, n2)
self.edges_list.append(new_edge)
# If new nodes do not exists in list of nodes, add them
if n1 not in self.nodes():
self.add_node(n1)
if n2 not in self.nodes():
self.add_node(n2)
| mit | Python |
9dd8f98e736dc70ae4f1a78f13ee643a63aa2e47 | Update unit test for static metaobjet to work with new optimizations | gbaty/pyside2,M4rtinK/pyside-android,enthought/pyside,M4rtinK/pyside-android,pankajp/pyside,PySide/PySide,RobinD42/pyside,qtproject/pyside-pyside,M4rtinK/pyside-bb10,pankajp/pyside,IronManMark20/pyside2,PySide/PySide,pankajp/pyside,BadSingleton/pyside2,enthought/pyside,enthought/pyside,pankajp/pyside,IronManMark20/pyside2,BadSingleton/pyside2,RobinD42/pyside,RobinD42/pyside,PySide/PySide,RobinD42/pyside,PySide/PySide,enthought/pyside,BadSingleton/pyside2,IronManMark20/pyside2,M4rtinK/pyside-bb10,qtproject/pyside-pyside,M4rtinK/pyside-bb10,gbaty/pyside2,qtproject/pyside-pyside,RobinD42/pyside,M4rtinK/pyside-android,qtproject/pyside-pyside,enthought/pyside,PySide/PySide,IronManMark20/pyside2,enthought/pyside,M4rtinK/pyside-bb10,M4rtinK/pyside-android,gbaty/pyside2,BadSingleton/pyside2,qtproject/pyside-pyside,IronManMark20/pyside2,RobinD42/pyside,BadSingleton/pyside2,M4rtinK/pyside-bb10,pankajp/pyside,RobinD42/pyside,M4rtinK/pyside-android,M4rtinK/pyside-android,gbaty/pyside2,enthought/pyside,gbaty/pyside2,M4rtinK/pyside-bb10 | tests/signals/static_metaobject_test.py | tests/signals/static_metaobject_test.py | #!/usr/bin/env python
"""Tests covering signal emission and receiving to python slots"""
import unittest
from PySide.QtCore import QObject, SIGNAL
from helper import UsesQCoreApplication
class MyObject(QObject):
def __init__(self, parent=None):
QObject.__init__(self, parent)
self._slotCalledCount = 0
def mySlot(self):
self._slotCalledCount = self._slotCalledCount + 1
class StaticMetaObjectTest(UsesQCoreApplication):
def testSignalPropagation(self):
o = MyObject()
o2 = MyObject()
# SIGNAL foo not created yet
self.assertEqual(o.metaObject().indexOfSignal("foo()"), -1)
o.connect(SIGNAL("foo()"), o2.mySlot)
# SIGNAL foo create after connect
self.assert_(o.metaObject().indexOfSignal("foo()") > 0)
# SIGNAL does not propagate to others objects of the same type
self.assertEqual(o2.metaObject().indexOfSignal("foo()"), -1)
del o
del o2
o = MyObject()
# The SIGNAL was destroyed with old objects
self.assertEqual(o.metaObject().indexOfSignal("foo()"), -1)
def testSharedSignalEmission(self):
o = QObject()
m = MyObject()
o.connect(SIGNAL("foo2()"), m.mySlot)
m.connect(SIGNAL("foo2()"), m.mySlot)
o.emit(SIGNAL("foo2()"))
self.assertEqual(m._slotCalledCount, 1)
del o
m.emit(SIGNAL("foo2()"))
self.assertEqual(m._slotCalledCount, 2)
if __name__ == '__main__':
unittest.main()
| #!/usr/bin/env python
"""Tests covering signal emission and receiving to python slots"""
import sys
import unittest
import functools
from PySide.QtCore import *
from helper import BasicPySlotCase, UsesQCoreApplication
class MyObject(QObject):
def __init__(self, parent=None):
QObject.__init__(self, parent)
self._slotCalledCount = 0
def mySlot(self):
self._slotCalledCount = self._slotCalledCount + 1
class StaticMetaObjectTest(UsesQCoreApplication):
def testSignalPropagation(self):
o = MyObject()
o2 = MyObject()
m = o.metaObject()
# SIGNAL foo not created yet
self.assertEqual(m.indexOfSignal("foo()"), -1)
o.connect(SIGNAL("foo()"), o2.mySlot)
# SIGNAL foo create after connect
self.assert_(m.indexOfSignal("foo()") > 0)
m = o2.metaObject()
# SIGNAL propagate to others objects of the same type
self.assert_(m.indexOfSignal("foo()") > 0)
del o
# SIGNAL foo continues registered after deletion of original object
self.assert_(m.indexOfSignal("foo()") > 0)
del o2
o = MyObject()
m = o.metaObject()
# new objects still have the SIGNAL foo registered
self.assert_(m.indexOfSignal("foo()") > 0)
def testSharedSignalEmission(self):
o = QObject()
m = MyObject()
o.connect(SIGNAL("foo2()"), m.mySlot)
m.connect(SIGNAL("foo2()"), m.mySlot)
o.emit(SIGNAL("foo2()"))
self.assertEqual(m._slotCalledCount, 1)
del o
m.emit(SIGNAL("foo2()"))
self.assertEqual(m._slotCalledCount, 2)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 | Python |
38fd8ad23dcd125c872e332cdf51d3e465924c42 | update to fix standalone testing | davisd50/sparc.common | sparc/common/tests/test_cli.py | sparc/common/tests/test_cli.py | import os
import zope.testrunner
from sparc.testing.fixture import test_suite_mixin
class test_suite(test_suite_mixin):
package = 'sparc.common'
module = 'cli'
if __name__ == '__main__':
zope.testrunner.run([
'--path', os.path.dirname(__file__),
'--tests-pattern', os.path.splitext(
os.path.basename(__file__))[0]
]) | import unittest
from sparc.testing.fixture import test_suite_mixin
class test_suite(test_suite_mixin):
package = 'sparc.common'
module = 'cli'
if __name__ == '__main__':
unittest.main(defaultTest='test_suite') | mit | Python |
cdd4a34559a7f1281399288d943f1158857f2829 | Add vpc_network to create_routers | yunify/qingcloud-cli | qingcloud/cli/iaas_client/actions/router/create_routers.py | qingcloud/cli/iaas_client/actions/router/create_routers.py | # =========================================================================
# Copyright 2012-present Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from qingcloud.cli.iaas_client.actions.base import BaseAction
class CreateRoutersAction(BaseAction):
action = 'CreateRouters'
command = 'create-routers'
usage = '%(prog)s [-c <count>] [-n <router_name>] [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-c', '--count', dest='count',
action='store', type=int, default=1,
help='the number of routers to create.')
parser.add_argument('-N', '--router_name', dest='router_name',
action='store', type=str, default='',
help='the short name of routers')
parser.add_argument('-s', '--security_group', dest='security_group',
action='store', type=str, default='',
help='ID of the security group you want to apply to router, use default security group if not specified')
parser.add_argument('-n', '--vpc_network', dest='vpc_network',
action='store', type=str, default=None,
help='VPC IP addresses range, currently support "192.168.0.0/16" or "172.16.0.0/16", required in zone pek3a')
@classmethod
def build_directive(cls, options):
required_params = {
'router_name': options.router_name,
}
for param in required_params:
if required_params[param] is None or required_params[param] == '':
print('error: [%s] should be specified' % param)
return None
return {
'count' : options.count,
'router_name' : options.router_name,
'security_group': options.security_group,
'vpc_network': options.vpc_network,
}
| # =========================================================================
# Copyright 2012-present Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from qingcloud.cli.iaas_client.actions.base import BaseAction
class CreateRoutersAction(BaseAction):
action = 'CreateRouters'
command = 'create-routers'
usage = '%(prog)s [-c <count>] [-n <router_name>] [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-c', '--count', dest='count',
action='store', type=int, default=1,
help='the number of routers to create.')
parser.add_argument('-N', '--router_name', dest='router_name',
action='store', type=str, default='',
help='the short name of routers')
parser.add_argument('-s', '--security_group', dest='security_group',
action='store', type=str, default='',
help='ID of the security group you want to apply to router, use default security group if not specified')
@classmethod
def build_directive(cls, options):
required_params = {
'router_name': options.router_name,
}
for param in required_params:
if required_params[param] is None or required_params[param] == '':
print('error: [%s] should be specified' % param)
return None
return {
'count' : options.count,
'router_name' : options.router_name,
'security_group': options.security_group,
}
| apache-2.0 | Python |
0e45ad2d7f459ecf6a7068b9309b27a9aa4208e3 | remove invalid homepage (#28269) | LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack,LLNL/spack | var/spack/repos/builtin/packages/r-illuminahumanmethylationepicmanifest/package.py | var/spack/repos/builtin/packages/r-illuminahumanmethylationepicmanifest/package.py | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RIlluminahumanmethylationepicmanifest(RPackage):
"""Manifest for Illumina's EPIC methylation arrays"""
url = "https://bioconductor.org/packages/release/data/annotation/src/contrib/IlluminaHumanMethylationEPICmanifest_0.3.0.tar.gz"
bioc = "IlluminaHumanMethylationEPICmanifest"
version('0.3.0', sha256='e39a69d98486cec981e97c56f45bbe47d2ccb5bbb66a1b16fa0685575493902a')
depends_on('r@3.2.0:', type=('build', 'run'))
depends_on('r-minfi', type=('build', 'run'))
| # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RIlluminahumanmethylationepicmanifest(RPackage):
"""Manifest for Illumina's EPIC methylation arrays"""
homepage = "https://bitbucket.com/kasperdanielhansen/Illumina_EPIC"
url = "https://bioconductor.org/packages/release/data/annotation/src/contrib/IlluminaHumanMethylationEPICmanifest_0.3.0.tar.gz"
bioc = "IlluminaHumanMethylationEPICmanifest"
version('0.3.0', sha256='e39a69d98486cec981e97c56f45bbe47d2ccb5bbb66a1b16fa0685575493902a')
depends_on('r@3.2.0:', type=('build', 'run'))
depends_on('r-minfi', type=('build', 'run'))
| lgpl-2.1 | Python |
4324c02773aa4bcfc916552ab35dd99a3187b154 | Add tokens parsing. | thewizardplusplus/micro,thewizardplusplus/micro,thewizardplusplus/micro | micro.py | micro.py | #!/usr/bin/env python
from sys import argv
def get_code():
return argv[1]
def get_tokens(code):
return code.split(' ')
if __name__ == '__main__':
code = get_code()
tokens = get_tokens(code)
print(tokens)
| #!/usr/bin/env python
from sys import argv
def get_code():
return argv[1]
if __name__ == '__main__':
code = get_code()
print(code)
| mit | Python |
d2150bbf65c190d0c92c99b1206222df867c6cb1 | bump version to 0.1.8 | Maplecroft/django-cms-saq,Maplecroft/django-cms-saq,Maplecroft/django-cms-saq | cms_saq/__init__.py | cms_saq/__init__.py | __version__ = "0.1.8"
| __version__ = "0.1.7"
| mit | Python |
d3c941ad4a955043fa41bfb800f6310e6f91a7bc | Modify return tag search data with tag name | NA5G/coco-server-was,NA5G/coco-server-was,NA5G/coco-server-was | coco/users/views.py | coco/users/views.py | # -*- coding: utf-8 -*-
from django.shortcuts import render
from django.contrib import messages
from django.contrib.auth import login as auth_login
from django.contrib.auth import logout as auth_logout
from django.shortcuts import redirect
# from django.contrib.auth.decorators import login_required
from forms import LoginForm, SignUpForm
def anonymous_required(view_function, redirect_to=None):
return AnonymousRequired(view_function, redirect_to)
class AnonymousRequired(object):
def __init__(self, view_function, redirect_to):
if redirect_to is None:
from django.conf import settings
redirect_to = settings.LOGIN_REDIRECT_URL
self.view_function = view_function
self.redirect_to = redirect_to
def __call__(self, request, *args, **kwargs):
if request.user is not None and request.user.is_authenticated():
return redirect(self.redirect_to)
return self.view_function(request, *args, **kwargs)
@anonymous_required
def sign_up(request):
form = SignUpForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
user = form.sign_up(request)
messages.success(request, '아이디가 생성됐습니다.')
return redirect('/login')
return render(request, 'users/sign_up.html', {
'form': form,
})
@anonymous_required
def login(request):
form = LoginForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
user = form.login(request)
if user:
auth_login(request, user)
return redirect('/')
return render(request, 'users/login.html', {
'form': form,
})
def logout(request):
if request.user is not None and request.user.is_authenticated():
auth_logout(request)
return redirect('/')
| # -*- coding: utf-8 -*-
from django.shortcuts import render
from django.contrib import messages
from django.contrib.auth import login as auth_login
from django.contrib.auth import logout as auth_logout
from django.shortcuts import redirect
# from django.contrib.auth.decorators import login_required
from forms import LoginForm, SignUpForm
def anonymous_required(view_function, redirect_to=None):
return AnonymousRequired(view_function, redirect_to)
class AnonymousRequired(object):
def __init__(self, view_function, redirect_to):
if redirect_to is None:
from django.conf import settings
redirect_to = settings.LOGIN_REDIRECT_URL
self.view_function = view_function
self.redirect_to = redirect_to
def __call__(self, request, *args, **kwargs):
if request.user is not None and request.user.is_authenticated():
return redirect(self.redirect_to)
return self.view_function(request, *args, **kwargs)
@anonymous_required
def sign_up(request):
form = SignUpForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
user = form.sign_up(request)
messages.success(request, '아이디가 생성됐습니다.')
return redirect('/login')
return render(request, 'users/sign_up.html', {
'form': form,
})
@anonymous_required
def login(request):
form = LoginForm(request.POST or None)
if request.method == 'POST' and form.is_valid():
user = form.login(request)
if user:
auth_login(request, user)
return redirect('/')
return render(request, 'users/login.html', {
'form': form,
})
def logout(request):
if request.user is not None and request.user.is_authenticated():
auth_logout(request)
return redirect('/')
| mit | Python |
146ffb3afa24a7dddc9d19a26f6b52b0260f3f2c | Remove unused function | DirkHoffmann/indico,DirkHoffmann/indico,OmeGak/indico,indico/indico,DirkHoffmann/indico,ThiefMaster/indico,OmeGak/indico,pferreir/indico,mvidalgarcia/indico,mic4ael/indico,ThiefMaster/indico,mvidalgarcia/indico,pferreir/indico,pferreir/indico,OmeGak/indico,mvidalgarcia/indico,mic4ael/indico,OmeGak/indico,indico/indico,mic4ael/indico,ThiefMaster/indico,indico/indico,mic4ael/indico,mvidalgarcia/indico,ThiefMaster/indico,indico/indico,pferreir/indico,DirkHoffmann/indico | indico/modules/rb/models/utils.py | indico/modules/rb/models/utils.py | # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2013 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from functools import wraps
from indico.core.errors import IndicoError
def unimplemented(exceptions=(Exception,), message='Unimplemented'):
def _unimplemented(func):
@wraps(func)
def _wrapper(*args, **kw):
try:
return func(*args, **kw)
except exceptions:
raise IndicoError(str(message))
return _wrapper
return _unimplemented
def proxy_to_reservation_if_single_occurrence(f):
"""Forwards a method call to `self.reservation` if there is only one occurrence."""
@wraps(f)
def wrapper(self, *args, **kwargs):
if not kwargs.pop('propagate', True):
return f(self, *args, **kwargs)
resv_func = getattr(self.reservation, f.__name__)
if not self.reservation.is_repeating:
return resv_func(*args, **kwargs)
valid_occurrences = self.reservation.occurrences.filter_by(is_valid=True).limit(2).all()
if len(valid_occurrences) == 1 and valid_occurrences[0] == self:
# If we ever use this outside ReservationOccurrence we can probably get rid of the ==self check
return resv_func(*args, **kwargs)
return f(self, *args, **kwargs)
return wrapper
| # -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2013 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from datetime import datetime
from functools import wraps
from dateutil.relativedelta import MO, TU, WE, TH, FR
from dateutil.rrule import rrule, DAILY
from indico.core.errors import IndicoError
def unimplemented(exceptions=(Exception,), message='Unimplemented'):
def _unimplemented(func):
@wraps(func)
def _wrapper(*args, **kw):
try:
return func(*args, **kw)
except exceptions:
raise IndicoError(str(message))
return _wrapper
return _unimplemented
def next_work_day(dtstart=None, neglect_time=True):
if not dtstart:
dtstart = datetime.utcnow()
if neglect_time:
dtstart = datetime.combine(dtstart.date(), datetime.min.time())
return list(rrule(DAILY, count=1, byweekday=(MO, TU, WE, TH, FR), dtstart=dtstart))[0]
def proxy_to_reservation_if_single_occurrence(f):
"""Forwards a method call to `self.reservation` if there is only one occurrence."""
@wraps(f)
def wrapper(self, *args, **kwargs):
if not kwargs.pop('propagate', True):
return f(self, *args, **kwargs)
resv_func = getattr(self.reservation, f.__name__)
if not self.reservation.is_repeating:
return resv_func(*args, **kwargs)
valid_occurrences = self.reservation.occurrences.filter_by(is_valid=True).limit(2).all()
if len(valid_occurrences) == 1 and valid_occurrences[0] == self:
# If we ever use this outside ReservationOccurrence we can probably get rid of the ==self check
return resv_func(*args, **kwargs)
return f(self, *args, **kwargs)
return wrapper
| mit | Python |
86f3647531c65dbc2c940aa617625b2adc195273 | Make temporary test account respect new password rules | jsharkey13/isaac-selenium-testing,jsharkey13/isaac-selenium-testing | isaactest/utils/initialisation.py | isaactest/utils/initialisation.py | import selenium.webdriver
import time
from .log import log, INFO
from .i_selenium import new_tab, NoWebDriverException
from .isaac import TestUsers, User
from ..emails.guerrillamail import GuerrillaInbox, set_guerrilla_mail_address
def define_users():
"""Set up the TestUser object and add the temporary email address to it."""
Users = TestUsers.load()
Guerrilla = User("isaactest@sharklasers.com", "Temp",
"Test", "testing")
Users.Guerrilla = Guerrilla
Users.Guerrilla.new_email = "isaactesttwo@sharklasers.com"
Users.Guerrilla.new_password = "testing123"
return Users
def start_selenium(Users, ISAAC_WEB, GUERRILLAMAIL, WAIT_DUR, PATH_TO_DRIVER):
"""Start the Selenium WebDriver of choice.
Start a Selenium WebDriver then return it and a GuerrillaInbox object. If
a 'PATH_TO_CHROMEDRIVER' is specified, the WebDriver will be Chrome, otherwise
Firefox will be used.
- 'Users' should be the TestUser object returned by
'isaactest.utils.initialisation.define_users()'.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'GUERRILLAMAIL' is the string URL of GuerrillaMail.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
- 'PATH_TO_CHROMEDRIVER' is an optional argument telling Python where to
look for the ChromeDriver executable. If not specified, Firefox will
be used.
"""
# Selenium Start-up:
if "chrome" in PATH_TO_DRIVER:
driver = selenium.webdriver.Chrome(executable_path=PATH_TO_DRIVER)
elif "gecko" in PATH_TO_DRIVER:
driver = selenium.webdriver.Firefox(executable_path=PATH_TO_DRIVER)
else:
raise NoWebDriverException
driver.set_window_size(1920, 1080)
driver.maximize_window()
log(INFO, "Opened Selenium Driver for '%s'." % driver.name.title())
time.sleep(WAIT_DUR)
# Navigate to Staging:
driver.get(ISAAC_WEB)
log(INFO, "Got: %s" % ISAAC_WEB)
time.sleep(WAIT_DUR)
# Open GuerrillaMail:
new_tab(driver)
time.sleep(WAIT_DUR)
driver.get(GUERRILLAMAIL)
log(INFO, "Got: %s" % GUERRILLAMAIL)
# Set Guerrilla Mail email address:
time.sleep(WAIT_DUR)
Users.Guerrilla.email = set_guerrilla_mail_address(driver, Users.Guerrilla.email)
time.sleep(WAIT_DUR)
inbox = GuerrillaInbox(driver)
time.sleep(WAIT_DUR)
# Delete GuerrillaMail welcome and clear inbox:
inbox.delete_emails()
time.sleep(WAIT_DUR)
return driver, inbox
| import selenium.webdriver
import time
from .log import log, INFO
from .i_selenium import new_tab, NoWebDriverException
from .isaac import TestUsers, User
from ..emails.guerrillamail import GuerrillaInbox, set_guerrilla_mail_address
def define_users():
"""Set up the TestUser object and add the temporary email address to it."""
Users = TestUsers.load()
Guerrilla = User("isaactest@sharklasers.com", "Temp",
"Test", "test")
Users.Guerrilla = Guerrilla
Users.Guerrilla.new_email = "isaactesttwo@sharklasers.com"
Users.Guerrilla.new_password = "testing123"
return Users
def start_selenium(Users, ISAAC_WEB, GUERRILLAMAIL, WAIT_DUR, PATH_TO_DRIVER):
"""Start the Selenium WebDriver of choice.
Start a Selenium WebDriver then return it and a GuerrillaInbox object. If
a 'PATH_TO_CHROMEDRIVER' is specified, the WebDriver will be Chrome, otherwise
Firefox will be used.
- 'Users' should be the TestUser object returned by
'isaactest.utils.initialisation.define_users()'.
- 'ISAAC_WEB' is the string URL of the Isaac website to be tested.
- 'GUERRILLAMAIL' is the string URL of GuerrillaMail.
- 'WAIT_DUR' is the time in seconds to wait for JavaScript to run/load.
- 'PATH_TO_CHROMEDRIVER' is an optional argument telling Python where to
look for the ChromeDriver executable. If not specified, Firefox will
be used.
"""
# Selenium Start-up:
if "chrome" in PATH_TO_DRIVER:
driver = selenium.webdriver.Chrome(executable_path=PATH_TO_DRIVER)
elif "gecko" in PATH_TO_DRIVER:
driver = selenium.webdriver.Firefox(executable_path=PATH_TO_DRIVER)
else:
raise NoWebDriverException
driver.set_window_size(1920, 1080)
driver.maximize_window()
log(INFO, "Opened Selenium Driver for '%s'." % driver.name.title())
time.sleep(WAIT_DUR)
# Navigate to Staging:
driver.get(ISAAC_WEB)
log(INFO, "Got: %s" % ISAAC_WEB)
time.sleep(WAIT_DUR)
# Open GuerrillaMail:
new_tab(driver)
time.sleep(WAIT_DUR)
driver.get(GUERRILLAMAIL)
log(INFO, "Got: %s" % GUERRILLAMAIL)
# Set Guerrilla Mail email address:
time.sleep(WAIT_DUR)
Users.Guerrilla.email = set_guerrilla_mail_address(driver, Users.Guerrilla.email)
time.sleep(WAIT_DUR)
inbox = GuerrillaInbox(driver)
time.sleep(WAIT_DUR)
# Delete GuerrillaMail welcome and clear inbox:
inbox.delete_emails()
time.sleep(WAIT_DUR)
return driver, inbox
| mit | Python |
30103ef19635343b91ed590811639312d6f69795 | Add license text to a report test file | fastmonkeys/kuulemma,fastmonkeys/kuulemma,fastmonkeys/kuulemma,City-of-Helsinki/kuulemma,City-of-Helsinki/kuulemma,City-of-Helsinki/kuulemma | tests/views/hearing/text_xlsx_report.py | tests/views/hearing/text_xlsx_report.py | # -*- coding: utf-8 -*-
# Kuulemma
# Copyright (C) 2014, Fast Monkeys Oy
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
from flask import url_for
from tests.factories import HearingFactory
@pytest.mark.usefixtures('request_ctx')
def test_xlsx_report_url():
assert (
url_for('hearing.report_as_xlsx', slug='some-slug') ==
'/some-slug/raportti.xlsx'
)
@pytest.mark.usefixtures('database', 'request_ctx')
class TestHearingCsvReportOnSuccess(object):
@pytest.fixture
def hearing(self):
return HearingFactory(published=True)
@pytest.fixture
def response(self, client, hearing):
return client.get(
url_for('hearing.report_as_xlsx', slug=hearing.slug)
)
def test_returns_200(self, response):
assert response.status_code == 200
@pytest.mark.usefixtures('database', 'request_ctx')
class TestHearingCsvReportOnError(object):
def test_returns_404_for_non_existent_hearing(self, client):
response = client.get(
url_for('hearing.report_as_xlsx', slug='abcde')
)
assert response.status_code == 404
def test_returns_404_for_unpublished_hearing(self, client):
hearing = HearingFactory(published=False)
response = client.get(
url_for('hearing.show', hearing_id=hearing.id, slug=hearing.slug)
)
assert response.status_code == 404
| import pytest
from flask import url_for
from tests.factories import HearingFactory
@pytest.mark.usefixtures('request_ctx')
def test_xlsx_report_url():
assert (
url_for('hearing.report_as_xlsx', slug='some-slug') ==
'/some-slug/raportti.xlsx'
)
@pytest.mark.usefixtures('database', 'request_ctx')
class TestHearingCsvReportOnSuccess(object):
@pytest.fixture
def hearing(self):
return HearingFactory(published=True)
@pytest.fixture
def response(self, client, hearing):
return client.get(
url_for('hearing.report_as_xlsx', slug=hearing.slug)
)
def test_returns_200(self, response):
assert response.status_code == 200
@pytest.mark.usefixtures('database', 'request_ctx')
class TestHearingCsvReportOnError(object):
def test_returns_404_for_non_existent_hearing(self, client):
response = client.get(
url_for('hearing.report_as_xlsx', slug='abcde')
)
assert response.status_code == 404
def test_returns_404_for_unpublished_hearing(self, client):
hearing = HearingFactory(published=False)
response = client.get(
url_for('hearing.show', hearing_id=hearing.id, slug=hearing.slug)
)
assert response.status_code == 404
| agpl-3.0 | Python |
32c8f4e21d27d2c1aa470c16bd97d4e7f9efc135 | Add --dc and --open options | sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker,sknorr/suse-doc-style-checker | source/check.py | source/check.py | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
#
# This might check style & grammar one day. I'm hopeful. Kinda.
import sys, os, subprocess, shutil
from lxml import etree
openfile = False
dcfile = False
resultpath = "/tmp/"
arguments = sys.argv
def showusage():
sys.exit( """No arguments provided.
Usage: %s [OPTIONS] FILE
To see all options, do %s --help""" % ( arguments[0], arguments[0] ) )
if len( arguments ) < 2:
showusage()
if ( "--help" in arguments ) or ( "-h" in arguments ):
sys.exit( """Usage: %s [OPTIONS] FILE
Options:
--dc Use a DC file is input, this will invoke DAPS to create a bigfile
--open Open final report in $BROWSER
--version Print version number
--help Show this screen""" % arguments[0] )
if ( "--version" in arguments ) or ( "-v" in arguments ):
sys.exit( "Style Checker 0.1.0" )
if ("--open" in arguments ) or ( "-o" in arguments ):
openfile = True
arguments = list(filter(('--open').__ne__, arguments))
arguments = list(filter(('-o').__ne__, arguments))
if ("--dc" in arguments) or ( "-d" in arguments ):
dcfile = True
resultpath = "build/.tmp/"
arguments = list(filter(('--dc').__ne__, arguments))
arguments = list(filter(('-d').__ne__, arguments))
if len( arguments ) < 2:
showusage()
if not os.path.exists( arguments[-1] ):
sys.exit( "File %s does not exist.\n" % arguments[-1] )
inputfile = arguments[-1]
if dcfile == True:
inputfile = subprocess.check_output( [ 'daps', '-d', arguments[-1], 'bigfile' ] )
inputfile = ( inputfile.decode( 'UTF-8' ) )
inputfile = inputfile.replace( '\n', '' )
parser = etree.XMLParser(ns_clean=True,
remove_pis=False,
dtd_validation=False)
inputfile = etree.parse( inputfile )
transform = etree.XSLT( etree.parse( 'check.xsl', parser ) )
result = transform( inputfile )
root = result.getroot()
result.write( '/tmp/checkresult.xml',
xml_declaration=True,
encoding="UTF-8",
pretty_print=True)
shutil.copyfile( 'check.css', '/tmp/checkresult.css' )
if openfile == True:
subprocess.call( [ os.environ['BROWSER'] , '/tmp/checkresult.xml' ] ) | #!/usr/bin/python3
# -*- coding: UTF-8 -*-
#
# This might check style & grammar one day. I'm hopeful. Kinda.
import sys, os, subprocess, shutil
from lxml import etree
import webbrowser
if len( sys.argv ) < 2:
sys.exit( "No file provided.\nUsage: %s FILE" % sys.argv[0] )
if sys.argv[1] == "--help" or sys.argv[1] == "-h":
sys.exit( "Usage: %s FILE" % sys.argv[0] )
if sys.argv[1] == "--version" or sys.argv[1] == "-v":
sys.exit( "Style Checker 0.1.0" )
if not os.path.exists( sys.argv[1] ):
sys.exit( "File %s provided does not exist.\n" % sys.argv[1] )
parser = etree.XMLParser(ns_clean=True,
remove_pis=False,
#resolve_entities=False,
dtd_validation=False)
inputfile = etree.parse( sys.argv[1] )
transform = etree.XSLT( etree.parse( "check.xsl", parser ) )
result = transform( inputfile )
#estring = etree.tounicode( result, pretty_print=True )
#resultfile = open( '/tmp/checkresult.xml', 'w' )
#resultfile.write( estring )
#resultfile.close()
root = result.getroot()
print("root:", root.getchildren() )
result.write( '/tmp/checkresult.xml', xml_declaration=True, encoding="UTF-8", pretty_print=True)
shutil.copyfile( 'check.css', '/tmp/checkresult.css' )
firefox = webbrowser.get("firefox")
firefox.open_new_tab(url="/tmp/checkresult.xml")
# subprocess.call(['firefox', '/tmp/checkresult.xml'])
# EOF
| lgpl-2.1 | Python |
44a5d71858cbe3f1dd399af6991e9438a520a197 | refactor location permission resource | unicef/rhizome,unicef/rhizome,unicef/rhizome,unicef/rhizome | rhizome/api/resources/location_permission.py | rhizome/api/resources/location_permission.py | from rhizome.api.resources.base_model import BaseModelResource
from rhizome.models import LocationPermission
class LocationPermissionResource(BaseModelResource):
'''
**GET Request** This endpoint tells which locations a user is responsible for.
- *Required Parameters:*
'user_id'
- *Errors*
API returns a 500 error if a required parameter is not supplied
**POST Request**
- *Required Parameters:*
'user_id'
'location_id'
- *Errors*
API returns a 500 error if a required parameter is not supplied
'''
class Meta(BaseModelResource.Meta):
object_class = LocationPermission
resource_name = 'location_responsibility'
required_fields_for_post = ['user_id', 'top_lvl_location_id']
def apply_filters(self, request, applicable_filters):
"""
This is not the locations that the logged in user can see,
these are the locations that appear when you look at a particular
users page... otherwise we would say u_id = request.user.id
"""
applicable_filters['user_id'] = request.GET['user_id']
return self.get_object_list(request).filter(**applicable_filters)
| from rhizome.api.resources.base_model import BaseModelResource
from rhizome.models import LocationPermission
class LocationPermissionResource(BaseModelResource):
'''
**GET Request** This endpoint tells which locations a user is responsible for.
- *Required Parameters:*
'user_id'
- *Errors*
API returns a 500 error if a required parameter is not supplied
**POST Request**
- *Required Parameters:*
'user_id'
'location_id'
- *Errors*
API returns a 500 error if a required parameter is not supplied
'''
class Meta(BaseModelResource.Meta):
object_class = LocationPermission
resource_name = 'location_responsibility'
required_fields_for_post = ['user_id', 'top_lvl_location_id']
def get_object_list(self, request):
'''
In this resoruce we only override the get_object_list method so that
we return only the top level location data that the user can see.
'''
return LocationPermission.objects\
.filter(user_id=request.GET['user_id']).values()
| agpl-3.0 | Python |
fe3430fdaddf6d715fca2fe6ae6d3b45e0445cc6 | make sure dtype matches what labview sends | tomwphillips/polarimeter | polarimeter/labview.py | polarimeter/labview.py | """
labview.py is a module to interface with a Windows PC running a LabVIEW
VI that acquires data with an NI-DAQ.
Using TCP/IP the VI listens for incoming connections. The requesting
computer sends a message requesting a capture time. The Windows PC
acquires the data, writes it to file, then uses the WinSCP utility to
copy the file to /home/dcb/polarimeter/data/YYYY-MM-DD-HHMMSS.csv.
The requesting computer then loads the file, which contais the time
and signals of the two channels.
"""
import socket
import time
import numpy as np
socket.setdefaulttimeout(60) # seconds
def acquire(capture_time, IP='155.198.231.92', port=5020):
"""Aquires data for capture_time (seconds) via acquisition PC.
Returns time, and signals of hannel A and channel B.
"""
if capture_time <= 0:
raise ValueError('capture_time must be greater than zero.')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((IP, port))
sock.send('%.2f\r' % capture_time)
assert sock.recv(1024) == 'acquiring'
time.sleep(capture_time + 1)
filename = sock.recv(1024)
assert filename[-3:] == 'csv'
sock.close()
return np.loadtxt('data/signals/' + filename, delimiter=',',
dtype=np.dtype('d'), unpack=True)
| """
labview.py is a module to interface with a Windows PC running a LabVIEW
VI that acquires data with an NI-DAQ.
Using TCP/IP the VI listens for incoming connections. The requesting
computer sends a message requesting a capture time. The Windows PC
acquires the data, writes it to file, then uses the WinSCP utility to
copy the file to /home/dcb/polarimeter/data/YYYY-MM-DD-HHMMSS.csv.
The requesting computer then loads the file, which contais the time
and signals of the two channels.
"""
import socket
import time
import numpy as np
socket.setdefaulttimeout(60) # seconds
def acquire(capture_time, IP='155.198.231.92', port=5020):
"""Aquires data for capture_time (seconds) via acquisition PC.
Returns time, and signals of hannel A and channel B.
"""
if capture_time <= 0:
raise ValueError('capture_time must be greater than zero.')
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.connect((IP, port))
sock.send('%.2f\r' % capture_time)
assert sock.recv(1024) == 'acquiring'
time.sleep(capture_time + 1)
filename = sock.recv(1024)
assert filename[-3:] == 'csv'
sock.close()
data = np.loadtxt('data/signals/' + filename, delimiter=',')
return (data[:,0], data[:,1], data[:,2])
| mit | Python |
70b490fe3a04f9e50734abab4e22153533ae9096 | Update analogInOut.py | KrempelEv/krempelair,KrempelEv/krempelair,bittracker/krempelair,bittracker/krempelair,bittracker/krempelair,KrempelEv/krempelair | krempelair/lib/bus/analogInOut.py | krempelair/lib/bus/analogInOut.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import smbus
import logging as log
class analogInOut():
def __init__(self):
self._bus = smbus.SMBus(1)
LOG_FILENAME = 'krempelair.log'
log.basicConfig(filename=LOG_FILENAME,level=log.DEBUG)
def getValue(self, address, chanel):
var = self._bus.read_i2c_block_data(address,chanel,11) #Werte von Board in 11 stelliges Array schreiben
val = var[2]*256+var[1] #Berechnung der korrekten Zahlenwerte aus dem Array
log.debug("Analogwert von Adresse "+str(address)+ " mit Kanal " +str(chanel) +" mit Wert "+ str(val)) #Ausgabe in der Python Shell
return val
def setValue(self, address, chanel, value):
a=int(value)
HBy = int(a/256)
LBy = int(a-HBy*256)
field=[LBy,HBy]
self._bus.write_i2c_block_data(address,chanel,field)
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import smbus
import logging as log
class analogInOut():
def __init__(self):
self._bus = smbus.SMBus(1)
LOG_FILENAME = 'krempelair.log'
log.basicConfig(filename=LOG_FILENAME,level=logging.DEBUG)
def getValue(self, address, chanel):
var = self._bus.read_i2c_block_data(address,chanel,11) #Werte von Board in 11 stelliges Array schreiben
val = var[2]*256+var[1] #Berechnung der korrekten Zahlenwerte aus dem Array
log.debug("Analogwert von Adresse "+str(address)+ " mit Kanal " +str(chanel) +" mit Wert "+ str(val)) #Ausgabe in der Python Shell
return val
def setValue(self, address, chanel, value):
a=int(value)
HBy = int(a/256)
LBy = int(a-HBy*256)
field=[LBy,HBy]
self._bus.write_i2c_block_data(address,chanel,field)
| agpl-3.0 | Python |
db2a796e73a9963c58f601a0aa08b2aa4954a3c3 | Revise first/last to left/right | bowen0701/algorithms_data_structures | lc0034_find_first_and_last_position_of_element_in_sorted_array.py | lc0034_find_first_and_last_position_of_element_in_sorted_array.py | """Leetcode 34. Find left and right Position of Element in Sorted Array
Medium
URL: https://leetcode.com/problems/find-left-and-right-position-of-element-in-sorted-array
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class SolutionBinarySearchTwice(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(logn), where n is the length of nums.
Space complexity: O(1).
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
left, right = 0, len(nums) - 1
while left < right:
mid = left + (right - left) // 2
if nums[mid] < target:
left = mid + 1
else:
right = mid
if nums[left] != target:
return res
else:
res[0] = left
# Apply the 2nd binary search to search target's right position.
right = len(nums) - 1
while left < right:
# Make mid biased to the right.
mid = left + (right - left) // 2 + 1
if nums[mid] > target:
right = mid - 1
else:
left = mid
res[1] = right
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print SolutionBinarySearchTwice().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print SolutionBinarySearchTwice().searchRange(nums, target)
if __name__ == '__main__':
main()
| """Leetcode 34. Find First and Last Position of Element in Sorted Array
Medium
URL: https://leetcode.com/problems/find-first-and-last-position-of-element-in-sorted-array
Given an array of integers nums sorted in ascending order,
find the starting and ending position of a given target value.
Your algorithm's runtime complexity must be in the order of O(log n).
If the target is not found in the array, return [-1, -1].
Example 1:
Input: nums = [5,7,7,8,8,10], target = 8
Output: [3,4]
Example 2:
Input: nums = [5,7,7,8,8,10], target = 6
Output: [-1,-1]
"""
class Solution(object):
def searchRange(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
Time complexity: O(logn), where n is the length of nums.
Space complexity: O(1).
"""
# Apply to 2 binary searches to update result [-1, -1].
res = [-1, -1]
if not nums:
return res
# Apply the 1st binary search to search target's left position.
first, last = 0, len(nums) - 1
while first < last:
mid = first + (last - first) // 2
if nums[mid] < target:
first = mid + 1
else:
last = mid
if nums[first] != target:
return res
else:
res[0] = first
# Apply the 2nd binary search to search target's right position.
last = len(nums) - 1
while first < last:
# Make mid biased to the right.
mid = first + (last - first) // 2 + 1
if nums[mid] > target:
last = mid - 1
else:
first = mid
res[1] = last
return res
def main():
# Ans: [3,4]
nums = [5,7,7,8,8,10]
target = 8
print Solution().searchRange(nums, target)
# Ans: [-1,-1]
nums = [5,7,7,8,8,10]
target = 6
print Solution().searchRange(nums, target)
if __name__ == '__main__':
main()
| bsd-2-clause | Python |
b25b35e4396638388ad4b05b61b54c64e5e60008 | Update PedidoListar.py | AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb,AEDA-Solutions/matweb | backend/Models/Curso/PedidoListar.py | backend/Models/Curso/PedidoListar.py | from Framework.Pedido import Pedido
from Framework.ErroNoHTTP import ErroNoHTTP
class PedidoEditar(Pedido):
def __init__(self,variaveis_do_ambiente):
super(PedidoEditar, self).__init__(variaveis_do_ambiente)
try:
self.nome = self.corpo['nome']
self.codigo = self.corpo['codigo']
self.id_grau = self.corpo['id_grau']
self.id_campus = self.corpo['id_campus']
self.permanencia_minima = self.corpo['permanencia_minima']
self.permanencia_maxima = self.corpo['permanencia_maxima']
self.creditos_formatura = self.corpo['creditos_formatura']
self.creditos_optativos_conexa = self.corpo['creditos_optativos_conexa']
self.creditos_optativos_concentracao = self.corpo['creditos_optativos_concentracao']
self.creditos_livres_maximo = self.corpo['creditos_livres_maximo']
except:
raise ErroNoHTTP(400)
def getNome(self):
return self.nome
def getCodigo(self):
return self.codigo
def getId_grau(self):
return self.id_grau
def getId_campus(self):
return self.id_campus
def getPermanencia_minima(self):
return self.permanencia_minima
def getPermanencia_maxima(self):
return self.permanencia_maxima
def getCreditos_formatura(self):
return self.creditos_formatura
def getCreditos_optativos_conexa(self)
return self.creditos_optativos_conexa
def getCreditos_optativos_concentracao(self)
return self.creditos_optativos_concentracao
def getCreditos_livres_maximo(self)
return self.creditos_livres_maximo
| from Framework.Pedido import Pedido
from Framework.ErroNoHTTP import ErroNoHTTP
class PedidoEditar(Pedido):
def __init__(self,variaveis_do_ambiente):
super(PedidoEditar, self).__init__(variaveis_do_ambiente)
try:
self.nome = self.corpo['nome']
self.codigo = self.corpo['codigo']
self.id_grau = self.corpo['id_grau']
self.id_campus = self.corpo['id_campus']
self.permanencia_minima = self.corpo['permanencia_minima']
self.permanencia_maxima = self.corpo['permanencia_maxima']
self.creditos_formatura = self.corpo['creditos_formatura']
self.creditos_optativos_conexa = self.corpo['creditos_optativos_conexa']
self.creditos_optativos_concentracao = self.corpo['creditos_optativos_concentracao']
self.creditos_livres_maximo = self.corpo['creditos_livres_maximo']
except:
raise ErroNoHTTP(400)
def getNome(self):
return self.nome
def getCodigo(self):
return self.codigo
def getId_grau(self):
return self.id_grau
def getId_campus(self):
return self.id_campus
def getPermanencia_minima(self):
return self.permanencia_minima
def getPermanencia_maxima(self):
return self.permanencia_maxima
def getCreditos_formatura(self):
return self.creditos_formatura
def getCreditos_optativos_conexa(self)
return self.creditos_optativos_conexa
def getCreditos_optativos_concentracao(self)
return self.creditos_optativos_concentracao
def getCreditos_livres_maximo(self)
return self.creditos_livres_maximo
| mit | Python |
f51b77717ea9d75fa1c5c47bc6361f1a63e95c5c | Fix lint | svenstaro/python-web-boilerplate,svenstaro/python-web-boilerplate | boilerplateapp/helpers/decorators.py | boilerplateapp/helpers/decorators.py | """Contains a bunch of helpful decorators."""
import functools
def login_exempt(f):
"""All routes decorated with this will be exempt from authorization.
This means that using this decorator a function can be marked for anonymous access.
Example:
@api.route('/login', methods=['POST'])
@login_exempt
def login():
pass
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
return f(*args, **kwargs)
decorated_function.login_exempt = True
return decorated_function
| """Contains a bunch of helpful decorators."""
import functools
def login_exempt(f):
"""All routes decorated with this will be exempt from authorization.
This means that using this decorator a function can be marked for anonymous access.
Example:
@api.route('/login', methods=['POST'])
@login_exempt
def login():
pass
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
return f(*args, **kwargs)
decorated_function.login_exempt = True
return decorated_function
| mit | Python |
d977f9747e4309bc98e5f3d3c175293fa54544d8 | clear urls | danrg/RGT-tool,danrg/RGT-tool,danrg/RGT-tool | src/RGT/urls.py | src/RGT/urls.py | from django.conf.urls.defaults import patterns, include, url
from django.contrib import admin
from authentication.views import *
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'RGT.views.home'),
#url(r'^$', 'RGT.applicationForm.views.showAppForm'),
#url(r'^registerCloseBeta/$', 'RGT.applicationForm.views.registerUser'),
#url(r'^thx/$', 'RGT.applicationForm.views.showThankYouPage'),
url(r'^home/', 'RGT.views.home'),
url(r'^auth/register/', RegistrationView.as_view()),
url(r'^auth/login/', LoginView.as_view()),
url(r'^auth/logout/', LogoutRedirectView.as_view()),
url(r'^auth/verify/(?P<verifyEmailCode>[A-Za-z0-9]+)/$', 'RGT.authentication.registration.views.verify'),
url(r'^accounts/forgot/$', ForgotPasswordView.as_view()),
url(r'^accounts/recover/$', 'RGT.authentication.recoverPassword.views.recoverPass'),
url(r'^accounts/recover/(?P<passRecoverCode>[A-Za-z0-9]+)/$', 'RGT.authentication.recoverPassword.views.recoverPass'),
url(r'^accounts/change/', ChangePasswordView.as_view()),
url(r'^profile/', 'RGT.userProfile.views.displayUserProfile'),
url(r'^contact/', 'RGT.contact.views.contact'),
url(r'^grids/', include('RGT.gridMng.urls')),
url(r'^sessions/', include('RGT.gridMng.session.urls')),
)
| from django.conf.urls.defaults import patterns, include, url
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
from authentication.views import *
admin.autodiscover()
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'RGT.views.home', name='home'),
# url(r'^RGT/', include('RGT.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
url(r'^admin/', include(admin.site.urls)),
url(r'^$', 'RGT.views.home'),
#url(r'^$', 'RGT.applicationForm.views.showAppForm'),
#url(r'^registerCloseBeta/$', 'RGT.applicationForm.views.registerUser'),
#url(r'^thx/$', 'RGT.applicationForm.views.showThankYouPage'),
url(r'^home/', 'RGT.views.home'),
url(r'^auth/register/', RegistrationView.as_view()),
url(r'^auth/login/', LoginView.as_view()),
url(r'^auth/logout/', LogoutRedirectView.as_view()),
url(r'^auth/verify/(?P<verifyEmailCode>[A-Za-z0-9]+)/$', 'RGT.authentication.registration.views.verify'),
#url(r'^accounts/forgot/$', 'RGT.authentication.forgotPassword.views.forgotPass'),
url(r'^accounts/forgot/$', ForgotPasswordView.as_view()),
url(r'^accounts/recover/$', 'RGT.authentication.recoverPassword.views.recoverPass'),
url(r'^accounts/recover/(?P<passRecoverCode>[A-Za-z0-9]+)/$', 'RGT.authentication.recoverPassword.views.recoverPass'),
url(r'^accounts/change/', ChangePasswordView.as_view()),
url(r'^profile/', 'RGT.userProfile.views.displayUserProfile'),
url(r'^contact/', 'RGT.contact.views.contact'),
url(r'^grids/', include('RGT.gridMng.urls')),
url(r'^sessions/', include('RGT.gridMng.session.urls')),
)
| mit | Python |
8a8cac343fb36cf3c99a1455c208b1b4667e3e99 | Rename a function | thombashi/pathvalidate | pathvalidate/_common.py | pathvalidate/_common.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from .error import NullNameError
def _validate_null_string(text, error_msg="null name"):
if is_not_null_string(text):
return
if is_null_string(text):
raise NullNameError(error_msg)
raise TypeError("text must be a string: actual={}".format(type(text)))
def _preprocess(name):
return name.strip()
def is_null_string(value):
if value is None:
return True
try:
return len(value.strip()) == 0
except AttributeError:
return False
def is_not_null_string(value):
try:
return len(value.strip()) > 0
except AttributeError:
return False
def get_unprintable_ascii_char_list():
import six
import string
return [six.unichr(c) for c in range(256) if chr(c) not in string.printable]
unprintable_char_list = get_unprintable_ascii_char_list()
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import, unicode_literals
from .error import NullNameError
def _validate_null_string(text, error_msg="null name"):
if is_not_null_string(text):
return
if is_null_string(text):
raise NullNameError(error_msg)
raise TypeError("text must be a string: actual={}".format(type(text)))
def _preprocess(name):
return name.strip()
def is_null_string(value):
if value is None:
return True
try:
return len(value.strip()) == 0
except AttributeError:
return False
def is_not_null_string(value):
try:
return len(value.strip()) > 0
except AttributeError:
return False
def get_unprintable_char_list():
import six
import string
return [six.unichr(c) for c in range(256) if chr(c) not in string.printable]
unprintable_char_list = get_unprintable_char_list()
| mit | Python |
c4a8af10c2b6b8d7367cb4a22609ac29de4fb5f1 | Allow listing multiple subjects by ID | zooniverse/panoptes-cli | panoptes_cli/commands/subject.py | panoptes_cli/commands/subject.py | import click
from panoptes_cli.scripts.panoptes import cli
from panoptes_client import Subject
@cli.group()
def subject():
pass
@subject.command()
@click.option('--subject-set-id', type=int, required=False)
@click.option(
'--quiet',
'-q',
is_flag=True,
help='Only print subject IDs',
)
@click.argument('subject-ids', type=int, required=False, nargs=-1)
def ls(subject_set_id, quiet, subject_ids):
if subject_ids:
for subject_id in subject_ids:
subject = Subject.find(subject_id)
if quiet:
click.echo(subject.id)
else:
echo_subject(subject)
return
subjects = Subject.where(subject_set_id=subject_set_id)
if quiet:
click.echo(" ".join([s.id for s in subjects]))
else:
for subject in subjects:
echo_subject(subject)
def echo_subject(subject):
click.echo(
u'{} {}'.format(
subject.id,
' '.join(map(lambda l: list(l.values())[0], subject.locations))
)
)
| import click
from panoptes_cli.scripts.panoptes import cli
from panoptes_client import Subject
@cli.group()
def subject():
pass
@subject.command()
@click.option('--subject-set-id', type=int, required=False)
@click.option(
'--quiet',
'-q',
is_flag=True,
help='Only print subject IDs',
)
@click.argument('subject-id', type=int, required=False)
def ls(subject_set_id, quiet, subject_id):
if subject_id:
subject = Subject.find(subject_id)
if quiet:
click.echo(subject.id)
else:
echo_subject(subject)
return
subjects = Subject.where(subject_set_id=subject_set_id)
if quiet:
click.echo(" ".join([s.id for s in subjects]))
else:
for subject in subjects:
echo_subject(subject)
def echo_subject(subject):
click.echo(
u'{} {}'.format(
subject.id,
' '.join(map(lambda l: l.values()[0], subject.locations))
)
)
| apache-2.0 | Python |
099ae768056a4ab160179be89c8750a2bfc06b2c | Implement expr2bdd function and unique table | GtTmy/pyeda,sschnug/pyeda,sschnug/pyeda,karissa/pyeda,sschnug/pyeda,GtTmy/pyeda,karissa/pyeda,pombredanne/pyeda,pombredanne/pyeda,GtTmy/pyeda,cjdrake/pyeda,pombredanne/pyeda,cjdrake/pyeda,karissa/pyeda,cjdrake/pyeda | pyeda/test/test_bdd.py | pyeda/test/test_bdd.py | """
Test binary decision diagrams
"""
from pyeda.bdd import expr2bdd
from pyeda.expr import var
a, b, c = map(var, 'abc')
def test_expr2bdd():
f = a * b + a * c + b * c
bdd_f = expr2bdd(f)
assert bdd_f.root == a.var
assert bdd_f.low.root == b.var
assert bdd_f.high.root == b.var
assert bdd_f.low.low.root == 0
assert bdd_f.low.high.root == c.var
assert bdd_f.high.low.root == c.var
assert bdd_f.high.high.root == 1
assert bdd_f.low.high.low.root == 0
assert bdd_f.high.low.high.root == 1
| """
Test binary decision diagrams
"""
| bsd-2-clause | Python |
a7f8a2812c5e3728caf98a79451dccc40a97f8ab | change module name | bartscheers/tkp,transientskp/tkp,transientskp/tkp,mkuiack/tkp,mkuiack/tkp,bartscheers/tkp | tkp/classification/features/catalogs.py | tkp/classification/features/catalogs.py | """
Module that checks the database for source associations
If the database is not available or the database module cannot be
imported, functions will silently return None.
"""
from tkp.database.database import DataBase
from tkp.database.general import match_nearests_in_catalogs
def match_catalogs(transient):
"""Match transient source with nearest catalog source
Iterate through available catalogs, and return the nearest source
for each catalog. Each such source is a dictionary with keys
catsrcid, catsrcname, catid, catname, ra, decl, ra_err, decl_err,
dist_arcsec, assoc_r. See
:py:func:`~tkp.database.general.match_nearests_in_catalogs` for details.
The returned dictionary contains the catalog name as its key, and
a source as the corresponding value.
"""
# Hardcode the catalogs for now
#catalogs = {3: 'NVSS', 4: 'VLSS', 5: 'WENSS', 6: 'WENSS'}
# We check for all catalogs in the db (VLSS, WENSSm, WENSSp, NVSS, EXO)
database = DataBase()
results = {}
#for key, value in catalogs.iteritems():
# results[value] = match_nearests_in_catalogs(
# database.connection, transient.srcid,
# radius=1, catalogid=key, assoc_r=.1)
# if len(results[value]) > 0:
# results[value] = results[value][0]
# else:
# results[value] = {}
results = match_nearests_in_catalogs(transient.runcatid,radius=0.5,
deRuiter_r=3.717)
if len(results) > 0:
results = results[0]
else:
results = {}
return results
| """
Module that checks the database for source associations
If the database is not available or the database module cannot be
imported, functions will silently return None.
"""
from tkp.database.database import DataBase
from tkp.database.utils import match_nearests_in_catalogs
def match_catalogs(transient):
"""Match transient source with nearest catalog source
Iterate through available catalogs, and return the nearest source
for each catalog. Each such source is a dictionary with keys
catsrcid, catsrcname, catid, catname, ra, decl, ra_err, decl_err,
dist_arcsec, assoc_r. See
:py:func:`~tkp.database.utils.match_nearests_in_catalogs` for details.
The returned dictionary contains the catalog name as its key, and
a source as the corresponding value.
"""
# Hardcode the catalogs for now
#catalogs = {3: 'NVSS', 4: 'VLSS', 5: 'WENSS', 6: 'WENSS'}
# We check for all catalogs in the db (VLSS, WENSSm, WENSSp, NVSS, EXO)
database = DataBase()
results = {}
#for key, value in catalogs.iteritems():
# results[value] = match_nearests_in_catalogs(
# database.connection, transient.srcid,
# radius=1, catalogid=key, assoc_r=.1)
# if len(results[value]) > 0:
# results[value] = results[value][0]
# else:
# results[value] = {}
results = match_nearests_in_catalogs(transient.runcatid,radius=0.5,
deRuiter_r=3.717)
if len(results) > 0:
results = results[0]
else:
results = {}
return results
| bsd-2-clause | Python |
44a39603d8af2e8574d8ce4b667ed7d6931bf56a | Add DEBIAN_FRONTEND=noninteractive env var to apt | Fizzadar/pyinfra,Fizzadar/pyinfra | pyinfra/modules/apt.py | pyinfra/modules/apt.py | # pyinfra
# File: pyinfra/modules/apt.py
# Desc: manage apt packages & repositories
from pyinfra.api import operation, operation_env, server
@operation
def repo(name, present=True):
pass
@operation
@operation_env(DEBIAN_FRONTEND='noninteractive') # surpresses interactive prompts
def packages(packages, present=True, update=False, upgrade=False):
commands = []
if update:
commands.append('apt-get update')
if upgrade:
commands.append('apt-get upgrade')
current_packages = server.fact('DebPackages')
packages = [
package for package in packages
if package not in current_packages
]
if packages:
commands.append('apt-get install -y {}'.format(' '.join(packages)))
return commands
| # pyinfra
# File: pyinfra/modules/apt.py
# Desc: manage apt packages & repositories
from pyinfra.api import operation, server
@operation
def repo(name, present=True):
return []
@operation
def packages(packages, present=True, update=False, upgrade=False):
commands = []
if update:
commands.append('apt-get update')
if upgrade:
commands.append('apt-get upgrade')
current_packages = server.fact('DebPackages')
packages = [
package for package in packages
if package not in current_packages
]
if packages:
commands.append('apt-get install -y {}'.format(' '.join(packages)))
return commands
| mit | Python |
fe52f04aa2b15d656909c139dce6abc5acfedd01 | Remove sort function in Switch Server List Panel #5 | KunihikoKido/sublime-elasticsearch-client,KunihikoKido/sublime-elasticsearch-client | panel/switch_server_list_panel.py | panel/switch_server_list_panel.py |
class SwitchServerListPanel(object):
def __init__(self, window, servers):
self.window = window
self.servers = servers
self.choices = []
def on_done(self, index):
if index == -1:
return
self.callback(index)
def show(self, callback):
self.callback = callback
for server in self.servers:
self.choices.append([
"{index}/{doc_type}".format(**server),
"{base_url}".format(**server)
])
self.window.show_quick_panel(self.choices, self.on_done)
|
class SwitchServerListPanel(object):
def __init__(self, window, servers):
self.window = window
self.servers = servers
self.choices = []
def on_done(self, index):
if index == -1:
return
self.callback(index)
def show(self, callback):
self.callback = callback
for server in self.servers:
self.choices.append([
"{index}/{doc_type}".format(**server),
"{base_url}".format(**server)
])
self.choices.sort()
self.window.show_quick_panel(self.choices, self.on_done)
| mit | Python |
9b513c620b2c079c12370969929599f2a77a3b98 | Bump version number to 3.3 | zonca/petsc4py,zonca/petsc4py,zonca/petsc4py,zonca/petsc4py | src/__init__.py | src/__init__.py | # Author: Lisandro Dalcin
# Contact: dalcinl@gmail.com
# --------------------------------------------------------------------
"""
PETSc for Python
================
This package is an interface to PETSc libraries.
PETSc_ (the Portable, Extensible Toolkit for Scientific Computation)
is a suite of data structures and routines for the scalable (parallel)
solution of scientific applications modeled by partial differential
equations. It employs the MPI_ standard for all message-passing
communication.
.. _PETSc: http://www.mcs.anl.gov/petsc
.. _MPI: http://www.mpi-forum.org
"""
__author__ = 'Lisandro Dalcin'
__version__ = '3.3'
__credits__ = "PETSc Team <petsc-maint@mcs.anl.gov>"
# --------------------------------------------------------------------
def init(args=None, arch=None, comm=None):
"""
Initialize PETSc.
:Parameters:
- `args`: command-line arguments, usually the 'sys.argv' list.
- `arch`: specific configuration to use.
- `comm`: MPI commmunicator
.. note:: This function should be called only once, typically at
the very beginning of the bootstrap script of an application.
"""
import petsc4py.lib
PETSc = petsc4py.lib.ImportPETSc(arch)
args = petsc4py.lib.getInitArgs(args)
PETSc._initialize(args, comm)
# --------------------------------------------------------------------
def get_include():
"""
Return the directory in the package that contains header files.
Extension modules that need to compile against petsc4py should use
this function to locate the appropriate include directory. Using
Python distutils (or perhaps NumPy distutils)::
import petsc4py
Extension('extension_name', ...
include_dirs=[..., petsc4py.get_include()])
"""
from os.path import dirname, join
return join(dirname(__file__), 'include')
# --------------------------------------------------------------------
| # Author: Lisandro Dalcin
# Contact: dalcinl@gmail.com
# --------------------------------------------------------------------
"""
PETSc for Python
================
This package is an interface to PETSc libraries.
PETSc_ (the Portable, Extensible Toolkit for Scientific Computation)
is a suite of data structures and routines for the scalable (parallel)
solution of scientific applications modeled by partial differential
equations. It employs the MPI_ standard for all message-passing
communication.
.. _PETSc: http://www.mcs.anl.gov/petsc
.. _MPI: http://www.mpi-forum.org
"""
__author__ = 'Lisandro Dalcin'
__version__ = '1.2'
__credits__ = "PETSc Team <petsc-maint@mcs.anl.gov>"
# --------------------------------------------------------------------
def init(args=None, arch=None, comm=None):
"""
Initialize PETSc.
:Parameters:
- `args`: command-line arguments, usually the 'sys.argv' list.
- `arch`: specific configuration to use.
- `comm`: MPI commmunicator
.. note:: This function should be called only once, typically at
the very beginning of the bootstrap script of an application.
"""
import petsc4py.lib
PETSc = petsc4py.lib.ImportPETSc(arch)
args = petsc4py.lib.getInitArgs(args)
PETSc._initialize(args, comm)
# --------------------------------------------------------------------
def get_include():
"""
Return the directory in the package that contains header files.
Extension modules that need to compile against petsc4py should use
this function to locate the appropriate include directory. Using
Python distutils (or perhaps NumPy distutils)::
import petsc4py
Extension('extension_name', ...
include_dirs=[..., petsc4py.get_include()])
"""
from os.path import dirname, join
return join(dirname(__file__), 'include')
# --------------------------------------------------------------------
| bsd-2-clause | Python |
861055a8391734c76083f31cba6eb912c5b58337 | Update fetch-dependees.py to work with opam 2 | ocsigen/lwt,c-cube/lwt,ocsigen/lwt,ocsigen/lwt,c-cube/lwt,c-cube/lwt | src/util/fetch-dependees.py | src/util/fetch-dependees.py | #! /usr/bin/env python
# Retrieves source code of OPAM packages recursively depending on Lwt into a
# subdirectory ./dependees/, so you can grep through the code.
import os.path
import subprocess
DEPENDEES = "dependees"
def main():
packages = subprocess.check_output([
"opam", "list", "--all", "--depends-on=lwt", "--depopts", "--dev",
"--recursive", "--short", "--with-test", "--with-doc"])
packages = packages.strip().split("\n")
subprocess.check_call(["opam", "update"])
subprocess.check_call(["rm", "-rf", DEPENDEES])
for package in packages:
directory = os.path.join(DEPENDEES, package)
remove_command = ["rm", "-rf", directory]
source_command = ["opam", "source", "--dir=" + directory]
subprocess.check_call(remove_command)
try:
subprocess.check_call(source_command + ["--dev-repo", package])
except subprocess.CalledProcessError as e:
subprocess.check_call(remove_command)
try:
subprocess.check_call(source_command + [package])
except subprocess.CalledProcessError as e:
pass
if __name__ == "__main__":
main()
| #! /usr/bin/env python
# Retrieves source code of OPAM packages recursively depending on Lwt into a
# subdirectory ./dependees/, so you can grep through the code.
import os.path
import subprocess
DEPENDEES = "dependees"
def main():
packages = subprocess.check_output([
"opam", "list", "--unavailable", "--depends-on=lwt", "--depopts",
"--recursive", "--short"])
packages = packages.strip().split("\n")
subprocess.check_call(["opam", "update"])
subprocess.check_call(["rm", "-rf", DEPENDEES])
for package in packages:
directory = os.path.join(DEPENDEES, package)
remove_command = ["rm", "-rf", directory]
source_command = ["opam", "source", "--dir=" + directory]
subprocess.check_call(remove_command)
try:
subprocess.check_call(source_command + ["--dev-repo", package])
except subprocess.CalledProcessError as e:
subprocess.check_call(remove_command)
try:
subprocess.check_call(source_command + [package])
except subprocess.CalledProcessError as e:
pass
if __name__ == "__main__":
main()
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.