commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
9e041ef89f32f85919540c9be38f499807f7a18e | add list of lists | techytux/oertube,techytux/oertube,techytux/oertube | oertube/controllers.py | oertube/controllers.py | import os
from flask import Flask, request, Response
from flask import render_template, url_for, redirect, send_from_directory
from flask import send_file, make_response, abort
from oertube import app
# routing for API endpoints, generated from the models designated as API_MODELS
from oertube.core import api_manager
from oertube.models import *
for model_name in app.config['API_MODELS']:
model_class = app.config['API_MODELS'][model_name]
api_manager.create_api(model_class, methods=['GET', 'POST'])
session = api_manager.session
# routing for basic pages (pass routing onto the Angular app)
@app.route('/')
@app.route('/about')
@app.route('/blog')
def basic_pages(**kwargs):
return make_response(open('oertube/templates/index.html').read())
# routing for CRUD-style endpoints
# passes routing onto the angular frontend if the requested resource exists
from sqlalchemy.sql import exists
crud_url_models = app.config['CRUD_URL_MODELS']
@app.route('/<model_name>/')
@app.route('/<model_name>/<item_id>')
def rest_pages(model_name, item_id=None):
if model_name in crud_url_models:
model_class = crud_url_models[model_name]
if item_id is None or session.query(exists().where(
model_class.id == item_id)).scalar():
return make_response(open(
'oertube/templates/index.html').read())
abort(404)
@app.route('/lists')
def lists(item_id=None):
return redirect("http://editorial.mixd.tv/puls-highlights")
# special file handlers and error handlers
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'img/favicon.ico')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| import os
from flask import Flask, request, Response
from flask import render_template, url_for, redirect, send_from_directory
from flask import send_file, make_response, abort
from oertube import app
# routing for API endpoints, generated from the models designated as API_MODELS
from oertube.core import api_manager
from oertube.models import *
for model_name in app.config['API_MODELS']:
model_class = app.config['API_MODELS'][model_name]
api_manager.create_api(model_class, methods=['GET', 'POST'])
session = api_manager.session
# routing for basic pages (pass routing onto the Angular app)
@app.route('/')
@app.route('/about')
@app.route('/blog')
def basic_pages(**kwargs):
return make_response(open('oertube/templates/index.html').read())
# routing for CRUD-style endpoints
# passes routing onto the angular frontend if the requested resource exists
from sqlalchemy.sql import exists
crud_url_models = app.config['CRUD_URL_MODELS']
@app.route('/<model_name>/')
@app.route('/<model_name>/<item_id>')
def rest_pages(model_name, item_id=None):
if model_name in crud_url_models:
model_class = crud_url_models[model_name]
if item_id is None or session.query(exists().where(
model_class.id == item_id)).scalar():
return make_response(open(
'oertube/templates/index.html').read())
abort(404)
# special file handlers and error handlers
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'img/favicon.ico')
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
| mit | Python |
24680b61df921496a5035006628c8e9b3ef07ad9 | Implement funtion to get intraday data | RomelTorres/alpha_vantage | alpha-vantage/alphavantage.py | alpha-vantage/alphavantage.py | try:
# Python 3 import
from urllib.request import urlopen
except ImportError:
# Python 2.* import
from urllib2 import urlopen
from simplejson import loads
class AlphaVantage:
"""
This class is in charge of creating a python interface between the Alpha
Vantage restful API and your python application
"""
_ALPHA_VANTAGE_API_URL = "http://www.alphavantage.co/query?"
def __init__(self, key=None):
self.key = key
def _data_request(self, url):
""" Request data from the given url and return it as a json
object. It raises URLError
Keyword arguments:
url -- The url of the service
"""
response = urlopen(url)
url_response = response.read()
json_response = loads(url_response)
return json_response
def get_intraday(self, symbol, interval='15min', outputsize='compact'):
""" Return intraday time series in two json objects as data and
meta_data. It raises ValueError when problem arise
Keyword arguments:
symbol -- the symbol for the equity we want to get its data
interval -- time interval between two conscutive values,
supported values are '1min', '5min', '15min', '30min', '60min'
(default '15min')
outputsize -- The size of the call, supported values are
'compact' and 'full; the first returns the last 100 points in the
data series, and 'full' returns the full-length intraday times
series, commonly above 1MB (default 'compact')
"""
_INTRADAY = "TIME_SERIES_INTRADAY"
url = "{}function={}&symbol={}&interval={}&outputsize={}&apikey={}\
".format(AlphaVantage._ALPHA_VANTAGE_API_URL, _INTRADAY, symbol,
interval, outputsize, self.key)
json_response = self._data_request(url)
if 'Error Message' in json_response:
raise ValueError('ERROR getting data form api',
json_response['Error Message'])
data = json_response['Time Series ({})'.format(interval)]
meta_data = json_response['Meta Data']
return data, meta_data
if __name__ == '__main__':
av = AlphaVantage(key='486U')
data, meta_data = av.get_intraday()
print(data)
| try:
# Python 3 import
from urllib.request import urlopen
except ImportError:
# Python 2.* import
from urllib2 import urlopen
from simplejson import loads
class AlphaVantage:
"""
This class is in charge of creating a python interface between the Alpha
Vantage restful API and your python application
"""
def __init__(self, key=None):
self.key = key
def _data_request(self):
url = "http://www.alphavantage.co/query?function=TIME_SERIES_INTRADAY&symbol=MSFT&interval=1min&apikey={}".format(self.key)
response = urlopen(url)
json_response = loads(response.read())
print(json_response)
if __name__ == '__main__':
av = AlphaVantage(key='486U')
av._data_request()
| mit | Python |
19dd810c5acb35ce5d7565ee57a55ae725194bd1 | Add finalize method to Integration. | danbradham/mvp | mvp/integration.py | mvp/integration.py | # -*- coding: utf-8 -*-
class Integration(object):
name = None
description = None
icon = None
banner = None
requires_confirmation = False
enabled_by_default = False
columns = 1
def __init__(self):
self.set_enabled(self.enabled_by_default)
def fields(self):
'''Return a list of fields.
Example:
return [
{
'name': 'StringField',
'type': 'str',
'default': None,
'options': [...],
'required': False,
},
...
]
'''
return NotImplemented
def on_filename_changed(self, form, value):
return NotImplemented
def set_enabled(self, value):
'''Returns True if the integration was successfully enabled'''
if value:
return self._on_enable()
else:
return self._on_disable()
def _on_enable(self):
self.enabled = self.on_enable()
return self.enabled
def on_enable(self):
'''Return True to enable integration and False to disable'''
return True
def _on_disable(self):
self.enabled = not self.on_disable()
return self.enabled
def on_disable(self):
'''Return True to disable integration and False to enable'''
return True
def before_playblast(self, form, data):
'''Runs before playblasting.'''
return NotImplemented
def after_playblast(self, form, data):
'''Runs after playblasting.'''
return NotImplemented
def finalize(self, form, data):
'''Runs after entire playblast process is finished.
Unlike after_playblast, this method will only run ONCE after all
playblasting is finished. So, when playblasting multiple render layers
you can use this to execute after all of those render layers have
completed rendering.
Arguments:
form: The Form object including render options
data: List of renders that were output
'''
return NotImplemented
| # -*- coding: utf-8 -*-
class Integration(object):
name = None
description = None
icon = None
banner = None
requires_confirmation = False
enabled_by_default = False
columns = 1
def __init__(self):
self.set_enabled(self.enabled_by_default)
def fields(self):
'''Return a list of fields.
Example:
return [
{
'name': 'StringField',
'type': 'str',
'default': None,
'options': [...],
'required': False,
},
...
]
'''
return NotImplemented
def on_filename_changed(self, form, value):
return NotImplemented
def set_enabled(self, value):
'''Returns True if the integration was successfully enabled'''
if value:
return self._on_enable()
else:
return self._on_disable()
def _on_enable(self):
self.enabled = self.on_enable()
return self.enabled
def on_enable(self):
'''Return True to enable integration and False to disable'''
return True
def _on_disable(self):
self.enabled = not self.on_disable()
return self.enabled
def on_disable(self):
'''Return True to disable integration and False to enable'''
return True
def before_playblast(self, data):
return NotImplemented
def after_playblast(self, data):
return NotImplemented
| mit | Python |
b83a4bf53bfb893ca953c5301d9beee1f3dee320 | change wording for podcast privacy settings | gpodder/mygpo,gpodder/mygpo,gpodder/mygpo,gpodder/mygpo | mygpo/web/forms.py | mygpo/web/forms.py | from django import forms
from django.utils.translation import ugettext as _
from mygpo.api.models import Device, DEVICE_TYPES, SyncGroup
from mygpo.log import log
import re
class UserAccountForm(forms.Form):
email = forms.EmailField(label=_('Your Email Address'))
public = forms.BooleanField(required=False, label=_('May we use your subscriptions for the toplist and suggestions?'))
class DeviceForm(forms.Form):
name = forms.CharField(max_length=100, label=_('Name of this device'))
type = forms.ChoiceField(choices=DEVICE_TYPES, label=_('What kind of device is this?'))
uid = forms.CharField(max_length=50, label=_('What UID is configured on the physical device?'))
class PrivacyForm(forms.Form):
public = forms.BooleanField(required=False, label=_('May we include your subscription to this podcast in our (anonymous) statistics?'))
class SyncForm(forms.Form):
targets = forms.CharField()
def set_targets(self, sync_targets, label=''):
targets = self.sync_target_choices(sync_targets)
self.fields['targets'] = forms.ChoiceField(choices=targets, label=label)
def sync_target_choices(self, targets):
"""
returns a list of tuples that can be used as choices for a ChoiceField.
the first item in each tuple is a letter identifying the type of the
sync-target - either d for a Device, or g for a SyncGroup. This letter
is followed by the id of the target.
The second item in each tuple is the string-representation of the #
target.
"""
return [('%s%s' % ('d' if isinstance(t, Device) else 'g', t.id), t) for t in targets]
def get_target(self):
if not self.is_valid():
log('no target given in SyncForm')
raise ValueError(_('No device selected'))
target = self.cleaned_data['targets']
m = re.match('^([dg])(\d+)$', target)
if m == None:
log('invalid target %s given in SyncForm' % target)
raise ValueError(_('Invalid device selected: %s') % target)
if m.group(1) == 'd':
return Device.objects.get(pk=m.group(2))
else:
return SyncGroup.objects.get(pk=m.group(2))
| from django import forms
from django.utils.translation import ugettext as _
from mygpo.api.models import Device, DEVICE_TYPES, SyncGroup
from mygpo.log import log
import re
class UserAccountForm(forms.Form):
email = forms.EmailField(label=_('Your Email Address'))
public = forms.BooleanField(required=False, label=_('May we use your subscriptions for the toplist and suggestions?'))
class DeviceForm(forms.Form):
name = forms.CharField(max_length=100, label=_('Name of this device'))
type = forms.ChoiceField(choices=DEVICE_TYPES, label=_('What kind of device is this?'))
uid = forms.CharField(max_length=50, label=_('What UID is configured on the physical device?'))
class PrivacyForm(forms.Form):
public = forms.BooleanField(required=False, label=_('Should this be a public podcast?'))
class SyncForm(forms.Form):
targets = forms.CharField()
def set_targets(self, sync_targets, label=''):
targets = self.sync_target_choices(sync_targets)
self.fields['targets'] = forms.ChoiceField(choices=targets, label=label)
def sync_target_choices(self, targets):
"""
returns a list of tuples that can be used as choices for a ChoiceField.
the first item in each tuple is a letter identifying the type of the
sync-target - either d for a Device, or g for a SyncGroup. This letter
is followed by the id of the target.
The second item in each tuple is the string-representation of the #
target.
"""
return [('%s%s' % ('d' if isinstance(t, Device) else 'g', t.id), t) for t in targets]
def get_target(self):
if not self.is_valid():
log('no target given in SyncForm')
raise ValueError(_('No device selected'))
target = self.cleaned_data['targets']
m = re.match('^([dg])(\d+)$', target)
if m == None:
log('invalid target %s given in SyncForm' % target)
raise ValueError(_('Invalid device selected: %s') % target)
if m.group(1) == 'd':
return Device.objects.get(pk=m.group(2))
else:
return SyncGroup.objects.get(pk=m.group(2))
| agpl-3.0 | Python |
5225857a4a3e481871e64910cae7c579974869d6 | Use argparse for the output | datto/zfs-tests,stevenburgess/zfs-tests | MultiReceive.py | MultiReceive.py | import time
import datetime
import subprocess
import multiprocessing
import argparse
import TestConfig
import Configs
import ZfsApi
import Pid
import Common
import MonitorThread
import ReceiveThread
import Results
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--verbose', action="store_true",
help="The script will periodically print stats about TXGs and "
" receive speed")
args = parser.parse_args()
# Use TestConfig to ensure this computer is set up properly
TestConfig.check_all()
# This test case will use the test send file, check that it will work
TestConfig.check_testfile()
Pid.create_pid_file()
# Establish where this test will be writing its output
current_min = time.strftime("%Y%m%d%H%M%S")
zfs_receive_path = Configs.test_filesystem_path + '/runs/' + current_min
results_collector = Results.ResultsCollector(zfs_receive_path)
results_collector.gather_start_results()
if args.verbose:
monitor_thread = MonitorThread.MonitorThread(zfs_receive_path)
monitor_thread.start()
# Create the base FS that each thread will be receiveing into sub filesystem
ZfsApi.create_filesystem(zfs_receive_path)
start_time = time.time()
def receive_file(zfs_filesystem):
print("receiving on " + zfs_filesystem)
ZfsApi.zfs_recv(Configs.test_file_full_path, zfs_filesystem)
try:
zfs_filesystem_list = [zfs_receive_path + "/1", zfs_receive_path + "/2", zfs_receive_path + "/3", zfs_receive_path + "/4"]
workerPool = multiprocessing.Pool(processes = 4)
workerPool.map(receive_file, zfs_filesystem_list)
workerPool.close()
workerPool.join()
except KeyboardInterrupt:
pass
end_time = time.time()
results_collector.gather_end_results()
time_elapsed = end_time - start_time
print("that took " + str(datetime.timedelta(seconds=time_elapsed)))
property_dictionary = ZfsApi.get_filesystem_properties(zfs_receive_path, ['used'])
used_in_bytes = property_dictionary["used"]
used_in_mebibytes = Common.bytes_to_mebibyte(used_in_bytes)
print("received " + str(used_in_bytes))
bytes_per_second = used_in_mebibytes / time_elapsed
print("Speed: " + str(bytes_per_second) + " MiB/s")
# Clean up the PID file to allow other runs
Pid.destroy_pid_file()
| import time
import datetime
import subprocess
import multiprocessing
import TestConfig
import Configs
import ZfsApi
import Pid
import Common
import MonitorThread
import ReceiveThread
import Results
# Use TestConfig to ensure this computer is set up properly
TestConfig.check_all()
# This test case will use the test send file, check that it will work
TestConfig.check_testfile()
Pid.create_pid_file()
# Establish where this test will be writing its output
current_min = time.strftime("%Y%m%d%H%M%S")
zfs_receive_path = Configs.test_filesystem_path + '/runs/' + current_min
results_collector = Results.ResultsCollector(zfs_receive_path)
results_collector.gather_start_results()
monitor_thread = MonitorThread.MonitorThread(zfs_receive_path)
monitor_thread.start()
# Create the base FS that each thread will be receiveing into sub filesystem
ZfsApi.create_filesystem(zfs_receive_path)
start_time = time.time()
def receive_file(zfs_filesystem):
print("receiving on " + zfs_filesystem)
ZfsApi.zfs_recv(Configs.test_file_full_path, zfs_filesystem)
try:
zfs_filesystem_list = [zfs_receive_path + "/1", zfs_receive_path + "/2", zfs_receive_path + "/3", zfs_receive_path + "/4"]
workerPool = multiprocessing.Pool(processes = 4)
workerPool.map(receive_file, zfs_filesystem_list)
workerPool.close()
workerPool.join()
except KeyboardInterrupt:
pass
end_time = time.time()
results_collector.gather_end_results()
time_elapsed = end_time - start_time
print("that took " + str(datetime.timedelta(seconds=time_elapsed)))
property_dictionary = ZfsApi.get_filesystem_properties(zfs_receive_path, ['used'])
used_in_bytes = property_dictionary["used"]
used_in_mebibytes = Common.bytes_to_mebibyte(used_in_bytes)
print("received " + str(used_in_bytes))
bytes_per_second = used_in_mebibytes / time_elapsed
print("Speed: " + str(bytes_per_second) + " MiB/s")
# Clean up the PID file to allow other runs
Pid.destroy_pid_file()
| mit | Python |
82a7a79264e4667124d73c8eda00a6c44d69bb8a | Update the admin table to allow the insert of course cert settings LEARNER-1787 | mitocw/edx-platform,msegado/edx-platform,eduNEXT/edx-platform,pepeportela/edx-platform,arbrandes/edx-platform,stvstnfrd/edx-platform,Stanford-Online/edx-platform,stvstnfrd/edx-platform,ahmedaljazzar/edx-platform,stvstnfrd/edx-platform,TeachAtTUM/edx-platform,BehavioralInsightsTeam/edx-platform,BehavioralInsightsTeam/edx-platform,eduNEXT/edx-platform,eduNEXT/edx-platform,lduarte1991/edx-platform,edx-solutions/edx-platform,pepeportela/edx-platform,angelapper/edx-platform,ahmedaljazzar/edx-platform,kmoocdev2/edx-platform,lduarte1991/edx-platform,pabloborrego93/edx-platform,a-parhom/edx-platform,gymnasium/edx-platform,Stanford-Online/edx-platform,procangroup/edx-platform,gymnasium/edx-platform,proversity-org/edx-platform,TeachAtTUM/edx-platform,pepeportela/edx-platform,proversity-org/edx-platform,procangroup/edx-platform,lduarte1991/edx-platform,appsembler/edx-platform,ESOedX/edx-platform,Lektorium-LLC/edx-platform,pabloborrego93/edx-platform,Lektorium-LLC/edx-platform,CredoReference/edx-platform,appsembler/edx-platform,jolyonb/edx-platform,hastexo/edx-platform,mitocw/edx-platform,EDUlib/edx-platform,ahmedaljazzar/edx-platform,teltek/edx-platform,gsehub/edx-platform,angelapper/edx-platform,procangroup/edx-platform,cpennington/edx-platform,procangroup/edx-platform,mitocw/edx-platform,Lektorium-LLC/edx-platform,gsehub/edx-platform,BehavioralInsightsTeam/edx-platform,EDUlib/edx-platform,angelapper/edx-platform,kmoocdev2/edx-platform,jolyonb/edx-platform,Edraak/edraak-platform,gsehub/edx-platform,ESOedX/edx-platform,edx-solutions/edx-platform,hastexo/edx-platform,edx-solutions/edx-platform,arbrandes/edx-platform,pabloborrego93/edx-platform,eduNEXT/edunext-platform,edx/edx-platform,kmoocdev2/edx-platform,kmoocdev2/edx-platform,msegado/edx-platform,TeachAtTUM/edx-platform,teltek/edx-platform,philanthropy-u/edx-platform,edx-solutions/edx-platform,teltek/edx-platform,arbrandes/edx-platform,appsembler/edx-platform,gsehub/edx-platform,appsembler/edx-platform,jolyonb/edx-platform,Edraak/edraak-platform,Lektorium-LLC/edx-platform,eduNEXT/edunext-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,TeachAtTUM/edx-platform,eduNEXT/edx-platform,EDUlib/edx-platform,jolyonb/edx-platform,teltek/edx-platform,kmoocdev2/edx-platform,stvstnfrd/edx-platform,ahmedaljazzar/edx-platform,angelapper/edx-platform,edx/edx-platform,a-parhom/edx-platform,cpennington/edx-platform,ESOedX/edx-platform,Stanford-Online/edx-platform,a-parhom/edx-platform,CredoReference/edx-platform,eduNEXT/edunext-platform,CredoReference/edx-platform,msegado/edx-platform,msegado/edx-platform,ESOedX/edx-platform,philanthropy-u/edx-platform,msegado/edx-platform,gymnasium/edx-platform,proversity-org/edx-platform,lduarte1991/edx-platform,mitocw/edx-platform,eduNEXT/edunext-platform,hastexo/edx-platform,edx/edx-platform,EDUlib/edx-platform,a-parhom/edx-platform,cpennington/edx-platform,Edraak/edraak-platform,BehavioralInsightsTeam/edx-platform,Edraak/edraak-platform,CredoReference/edx-platform,pabloborrego93/edx-platform,gymnasium/edx-platform,Stanford-Online/edx-platform,hastexo/edx-platform,edx/edx-platform,arbrandes/edx-platform,proversity-org/edx-platform,pepeportela/edx-platform,cpennington/edx-platform | lms/djangoapps/certificates/admin.py | lms/djangoapps/certificates/admin.py | """
django admin pages for certificates models
"""
from config_models.admin import ConfigurationModelAdmin
from django import forms
from django.contrib import admin
from certificates.models import (
CertificateGenerationConfiguration,
CertificateGenerationCourseSetting,
CertificateHtmlViewConfiguration,
CertificateTemplate,
CertificateTemplateAsset,
GeneratedCertificate
)
from util.organizations_helpers import get_organizations
class CertificateTemplateForm(forms.ModelForm):
"""
Django admin form for CertificateTemplate model
"""
def __init__(self, *args, **kwargs):
super(CertificateTemplateForm, self).__init__(*args, **kwargs)
organizations = get_organizations()
org_choices = [(org["id"], org["name"]) for org in organizations]
org_choices.insert(0, ('', 'None'))
self.fields['organization_id'] = forms.TypedChoiceField(
choices=org_choices, required=False, coerce=int, empty_value=None
)
class Meta(object):
model = CertificateTemplate
fields = '__all__'
class CertificateTemplateAdmin(admin.ModelAdmin):
"""
Django admin customizations for CertificateTemplate model
"""
list_display = ('name', 'description', 'organization_id', 'course_key', 'mode', 'is_active')
form = CertificateTemplateForm
class CertificateTemplateAssetAdmin(admin.ModelAdmin):
"""
Django admin customizations for CertificateTemplateAsset model
"""
list_display = ('description', 'asset_slug',)
prepopulated_fields = {"asset_slug": ("description",)}
class GeneratedCertificateAdmin(admin.ModelAdmin):
"""
Django admin customizations for GeneratedCertificate model
"""
raw_id_fields = ('user',)
show_full_result_count = False
search_fields = ('course_id', 'user__username')
list_display = ('id', 'course_id', 'mode', 'user')
class CertificateGenerationCourseSettingAdmin(admin.ModelAdmin):
"""
Django admin customizations for CertificateGenerationCourseSetting model
"""
list_display = ('course_key', 'enabled')
search_fields = ('course_key',)
show_full_result_count = False
admin.site.register(CertificateGenerationConfiguration)
admin.site.register(CertificateGenerationCourseSetting, CertificateGenerationCourseSettingAdmin)
admin.site.register(CertificateHtmlViewConfiguration, ConfigurationModelAdmin)
admin.site.register(CertificateTemplate, CertificateTemplateAdmin)
admin.site.register(CertificateTemplateAsset, CertificateTemplateAssetAdmin)
admin.site.register(GeneratedCertificate, GeneratedCertificateAdmin)
| """
django admin pages for certificates models
"""
from config_models.admin import ConfigurationModelAdmin
from django import forms
from django.contrib import admin
from certificates.models import (
CertificateGenerationConfiguration,
CertificateGenerationCourseSetting,
CertificateHtmlViewConfiguration,
CertificateTemplate,
CertificateTemplateAsset,
GeneratedCertificate
)
from util.organizations_helpers import get_organizations
class CertificateTemplateForm(forms.ModelForm):
"""
Django admin form for CertificateTemplate model
"""
def __init__(self, *args, **kwargs):
super(CertificateTemplateForm, self).__init__(*args, **kwargs)
organizations = get_organizations()
org_choices = [(org["id"], org["name"]) for org in organizations]
org_choices.insert(0, ('', 'None'))
self.fields['organization_id'] = forms.TypedChoiceField(
choices=org_choices, required=False, coerce=int, empty_value=None
)
class Meta(object):
model = CertificateTemplate
fields = '__all__'
class CertificateTemplateAdmin(admin.ModelAdmin):
"""
Django admin customizations for CertificateTemplate model
"""
list_display = ('name', 'description', 'organization_id', 'course_key', 'mode', 'is_active')
form = CertificateTemplateForm
class CertificateTemplateAssetAdmin(admin.ModelAdmin):
"""
Django admin customizations for CertificateTemplateAsset model
"""
list_display = ('description', 'asset_slug',)
prepopulated_fields = {"asset_slug": ("description",)}
class GeneratedCertificateAdmin(admin.ModelAdmin):
"""
Django admin customizations for GeneratedCertificate model
"""
raw_id_fields = ('user',)
show_full_result_count = False
search_fields = ('course_id', 'user__username')
list_display = ('id', 'course_id', 'mode', 'user')
class CertificateGenerationCourseSettingAdmin(admin.ModelAdmin):
"""
Django admin customizations for CertificateGenerationCourseSetting model
"""
list_display = ('course_key',)
readonly_fields = ('course_key',)
search_fields = ('course_key',)
show_full_result_count = False
admin.site.register(CertificateGenerationConfiguration)
admin.site.register(CertificateGenerationCourseSetting, CertificateGenerationCourseSettingAdmin)
admin.site.register(CertificateHtmlViewConfiguration, ConfigurationModelAdmin)
admin.site.register(CertificateTemplate, CertificateTemplateAdmin)
admin.site.register(CertificateTemplateAsset, CertificateTemplateAssetAdmin)
admin.site.register(GeneratedCertificate, GeneratedCertificateAdmin)
| agpl-3.0 | Python |
2916048b8e0bb170685cbfa931cfa9120d0344de | Add project_fields option to output parameters | firedrakeproject/gusto,firedrakeproject/dcore | dcore/configuration.py | dcore/configuration.py | """
Some simple tools for making model configuration nicer.
"""
class Configuration(object):
def __init__(self, **kwargs):
for name, value in kwargs.iteritems():
self.__setattr__(name, value)
def __setattr__(self, name, value):
"""Cause setting an unknown attribute to be an error"""
if not hasattr(self, name):
raise AttributeError("'%s' object has no attribute '%s'" % (type(self).__name__, name))
object.__setattr__(self, name, value)
class TimesteppingParameters(Configuration):
"""
Timestepping parameters for dcore
"""
dt = None
alpha = 0.5
maxk = 2
maxi = 2
class OutputParameters(Configuration):
"""
Output parameters for dcore
"""
Verbose = False
dumpfreq = 10
dumplist = None
dirname = None
#: Should the output fields be interpolated or projected to
#: a linear space? Default is interpolation.
project_fields = False
class CompressibleParameters(Configuration):
"""
Physical parameters for 3d Compressible Euler
"""
g = 9.81
N = 0.01 # Brunt-Vaisala frequency (1/s)
cp = 1004.5 # SHC of dry air at const. pressure (J/kg/K)
R_d = 287. # Gas constant for dry air (J/kg/K)
kappa = 2.0/7.0 # R_d/c_p
p_0 = 1000.0*100.0 # reference pressure (Pa, not hPa)
k = None # vertical direction
Omega = None # rotation vector
class ShallowWaterParameters(Configuration):
"""
Physical parameters for 3d Compressible Euler
"""
g = 9.806
Omega = 7.292e-5 # rotation rate
| """
Some simple tools for making model configuration nicer.
"""
class Configuration(object):
def __init__(self, **kwargs):
for name, value in kwargs.iteritems():
self.__setattr__(name, value)
def __setattr__(self, name, value):
"""Cause setting an unknown attribute to be an error"""
if not hasattr(self, name):
raise AttributeError("'%s' object has no attribute '%s'" % (type(self).__name__, name))
object.__setattr__(self, name, value)
class TimesteppingParameters(Configuration):
"""
Timestepping parameters for dcore
"""
dt = None
alpha = 0.5
maxk = 2
maxi = 2
class OutputParameters(Configuration):
"""
Output parameters for dcore
"""
Verbose = False
dumpfreq = 10
dumplist = None
dirname = None
class CompressibleParameters(Configuration):
"""
Physical parameters for 3d Compressible Euler
"""
g = 9.81
N = 0.01 # Brunt-Vaisala frequency (1/s)
cp = 1004.5 # SHC of dry air at const. pressure (J/kg/K)
R_d = 287. # Gas constant for dry air (J/kg/K)
kappa = 2.0/7.0 # R_d/c_p
p_0 = 1000.0*100.0 # reference pressure (Pa, not hPa)
k = None # vertical direction
Omega = None # rotation vector
class ShallowWaterParameters(Configuration):
"""
Physical parameters for 3d Compressible Euler
"""
g = 9.806
Omega = 7.292e-5 # rotation rate
| mit | Python |
1f63f84fde3a557077c60a6466c4399fdfc3711d | Update version to 2.0b2-dev | jwinzer/OpenSlides,rolandgeider/OpenSlides,boehlke/OpenSlides,ostcar/OpenSlides,rolandgeider/OpenSlides,jwinzer/OpenSlides,FinnStutzenstein/OpenSlides,emanuelschuetze/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,CatoTH/OpenSlides,tsiegleauq/OpenSlides,emanuelschuetze/OpenSlides,ostcar/OpenSlides,boehlke/OpenSlides,boehlke/OpenSlides,FinnStutzenstein/OpenSlides,CatoTH/OpenSlides,boehlke/OpenSlides,emanuelschuetze/OpenSlides,jwinzer/OpenSlides,emanuelschuetze/OpenSlides,ostcar/OpenSlides,CatoTH/OpenSlides,normanjaeckel/OpenSlides,FinnStutzenstein/OpenSlides,OpenSlides/OpenSlides,rolandgeider/OpenSlides,OpenSlides/OpenSlides,CatoTH/OpenSlides,jwinzer/OpenSlides,normanjaeckel/OpenSlides,normanjaeckel/OpenSlides,FinnStutzenstein/OpenSlides,tsiegleauq/OpenSlides,tsiegleauq/OpenSlides | openslides/__init__.py | openslides/__init__.py | __author__ = 'OpenSlides Team <support@openslides.org>'
__description__ = 'Presentation and assembly system'
__version__ = '2.0b2-dev'
| __author__ = 'OpenSlides Team <support@openslides.org>'
__description__ = 'Presentation and assembly system'
__version__ = '2.0b1'
| mit | Python |
80e94bd64ca23d78e57fe0937a9924e4df08072a | Fix #445 -- UnicodeDecodeError in SQLSelectForm. | pevzi/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,ivelum/django-debug-toolbar,barseghyanartur/django-debug-toolbar,spookylukey/django-debug-toolbar,Endika/django-debug-toolbar,stored/django-debug-toolbar,jazzband/django-debug-toolbar,ivelum/django-debug-toolbar,stored/django-debug-toolbar,peap/django-debug-toolbar,stored/django-debug-toolbar,pevzi/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,sidja/django-debug-toolbar,guilhermetavares/django-debug-toolbar,tim-schilling/django-debug-toolbar,megcunningham/django-debug-toolbar,jazzband/django-debug-toolbar,seperman/django-debug-toolbar,jazzband/django-debug-toolbar,ivelum/django-debug-toolbar,pevzi/django-debug-toolbar,sidja/django-debug-toolbar,barseghyanartur/django-debug-toolbar,spookylukey/django-debug-toolbar,seperman/django-debug-toolbar,guilhermetavares/django-debug-toolbar,tim-schilling/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,megcunningham/django-debug-toolbar,megcunningham/django-debug-toolbar,guilhermetavares/django-debug-toolbar,peap/django-debug-toolbar,sidja/django-debug-toolbar,spookylukey/django-debug-toolbar,calvinpy/django-debug-toolbar,seperman/django-debug-toolbar,Endika/django-debug-toolbar,barseghyanartur/django-debug-toolbar,calvinpy/django-debug-toolbar,django-debug-toolbar/django-debug-toolbar,Endika/django-debug-toolbar,tim-schilling/django-debug-toolbar,peap/django-debug-toolbar,ChristosChristofidis/django-debug-toolbar,calvinpy/django-debug-toolbar | debug_toolbar/forms.py | debug_toolbar/forms.py | from __future__ import unicode_literals
import json
import hashlib
from django import forms
from django.conf import settings
from django.db import connections
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.core.exceptions import ValidationError
from debug_toolbar.utils.sql import reformat_sql
class SQLSelectForm(forms.Form):
"""
Validate params
sql: The sql statement with interpolated params
raw_sql: The sql statement with placeholders
params: JSON encoded parameter values
duration: time for SQL to execute passed in from toolbar just for redisplay
hash: the hash of (secret + sql + params) for tamper checking
"""
sql = forms.CharField()
raw_sql = forms.CharField()
params = forms.CharField()
alias = forms.CharField(required=False, initial='default')
duration = forms.FloatField()
hash = forms.CharField()
def __init__(self, *args, **kwargs):
initial = kwargs.get('initial', None)
if initial is not None:
initial['hash'] = self.make_hash(initial)
super(SQLSelectForm, self).__init__(*args, **kwargs)
for name in self.fields:
self.fields[name].widget = forms.HiddenInput()
def clean_raw_sql(self):
value = self.cleaned_data['raw_sql']
if not value.lower().strip().startswith('select'):
raise ValidationError("Only 'select' queries are allowed.")
return value
def clean_params(self):
value = self.cleaned_data['params']
try:
return json.loads(value)
except ValueError:
raise ValidationError('Is not valid JSON')
def clean_alias(self):
value = self.cleaned_data['alias']
if value not in connections:
raise ValidationError("Database alias '%s' not found" % value)
return value
def clean_hash(self):
hash = self.cleaned_data['hash']
if hash != self.make_hash(self.data):
raise ValidationError('Tamper alert')
return hash
def reformat_sql(self):
return reformat_sql(self.cleaned_data['sql'])
def make_hash(self, data):
params = (force_text(settings.SECRET_KEY) +
force_text(data['sql']) + force_text(data['params']))
return hashlib.sha1(params.encode('utf-8')).hexdigest()
@property
def connection(self):
return connections[self.cleaned_data['alias']]
@cached_property
def cursor(self):
return self.connection.cursor()
| from __future__ import unicode_literals
import json
import hashlib
from django import forms
from django.conf import settings
from django.db import connections
from django.utils.encoding import force_text
from django.utils.functional import cached_property
from django.core.exceptions import ValidationError
from debug_toolbar.utils.sql import reformat_sql
class SQLSelectForm(forms.Form):
"""
Validate params
sql: The sql statement with interpolated params
raw_sql: The sql statement with placeholders
params: JSON encoded parameter values
duration: time for SQL to execute passed in from toolbar just for redisplay
hash: the hash of (secret + sql + params) for tamper checking
"""
sql = forms.CharField()
raw_sql = forms.CharField()
params = forms.CharField()
alias = forms.CharField(required=False, initial='default')
duration = forms.FloatField()
hash = forms.CharField()
def __init__(self, *args, **kwargs):
initial = kwargs.get('initial', None)
if initial is not None:
initial['hash'] = self.make_hash(initial)
super(SQLSelectForm, self).__init__(*args, **kwargs)
for name in self.fields:
self.fields[name].widget = forms.HiddenInput()
def clean_raw_sql(self):
value = self.cleaned_data['raw_sql']
if not value.lower().strip().startswith('select'):
raise ValidationError("Only 'select' queries are allowed.")
return value
def clean_params(self):
value = self.cleaned_data['params']
try:
return json.loads(value)
except ValueError:
raise ValidationError('Is not valid JSON')
def clean_alias(self):
value = self.cleaned_data['alias']
if value not in connections:
raise ValidationError("Database alias '%s' not found" % value)
return value
def clean_hash(self):
hash = self.cleaned_data['hash']
if hash != self.make_hash(self.data):
raise ValidationError('Tamper alert')
return hash
def reformat_sql(self):
return reformat_sql(self.cleaned_data['sql'])
def make_hash(self, data):
params = force_text(settings.SECRET_KEY) + data['sql'] + data['params']
return hashlib.sha1(params.encode('utf-8')).hexdigest()
@property
def connection(self):
return connections[self.cleaned_data['alias']]
@cached_property
def cursor(self):
return self.connection.cursor()
| bsd-3-clause | Python |
bb0fdba3db129d7eed38622e01c4881ec42ba6a3 | Fix indentation | lyuboraykov/pizza-delivery,lyuboraykov/pizza-delivery,lyuboraykov/pizza-delivery,lyuboraykov/pizza-delivery,lyuboraykov/pizza-delivery | core/lib/pizzas_repository.py | core/lib/pizzas_repository.py | import sys
sys.path.append('../gen-py')
from pizza_delivery import ttypes
from lib.db import db
class PizzasRepository(object):
@classmethod
def get_available_pizzas(cls):
pizzas_dict = db.get('/pizzas', None)
pizzas = {}
for pizza_id, pizza_dict in pizzas_dict.iteritems():
if not pizza_dict:
continue
pizzas[pizza_id.encode('utf-8')] = cls._get_pizza_from_dict(pizza_dict)
return pizzas
@classmethod
def _get_pizza_from_dict(cls, pizza_dict):
pizza = ttypes.Pizza(int(pizza_dict['id']),
pizza_dict['imageUrl'],
[p.encode('utf-8') for p in pizza_dict['products']])
return pizza
| import sys
sys.path.append('../gen-py')
from pizza_delivery import ttypes
from lib.db import db
class PizzasRepository(object):
@classmethod
def get_available_pizzas(cls):
pizzas_dict = db.get('/pizzas', None)
pizzas = {}
for pizza_id, pizza_dict in pizzas_dict.iteritems():
if not pizza_dict:
continue
pizzas[pizza_id.encode('utf-8')] = cls._get_pizza_from_dict(pizza_dict)
return pizzas
@classmethod
def _get_pizza_from_dict(cls, pizza_dict):
pizza = ttypes.Pizza(int(pizza_dict['id']),
pizza_dict['imageUrl'],
[p.encode('utf-8') for p in pizza_dict['products']])
return pizza
| mit | Python |
1396f52bb0aae7f6b50c3c41b66ac56c1afa0006 | Fix tweeton bug | joshcvt/natinal | notifiers/TwitterNotifier.py | notifiers/TwitterNotifier.py | from NotifierClass import Notifier
import twitter
from datetime import datetime, timedelta
import time
import threading
class TwitterNotifier(Notifier):
def __init__(self,cfgParser,insec):
self.header = insec
try:
self.screenname = cfgParser.get(insec,"username").strip()
except:
self.screenname = ''
self.conskey = cfgParser.get(insec,"conskey").strip()
self.conssecret = cfgParser.get(insec,"conssecret").strip()
self.acctokenkey = cfgParser.get(insec,"acctokenkey").strip()
self.acctokensecret = cfgParser.get(insec,"acctokensecret").strip()
try:
self.tweeton = cfgParser.get(insec,"tweeton").strip().split(",")
except:
self.tweeton = ["win","loss","tie","neutral"]
try:
self.wintext = cfgParser.get(insec,"wintext").strip()
except:
self.wintext = ""
try:
self.losstext = cfgParser.get(insec,"losstext").strip()
except:
self.losstext = ""
try:
self.gamelink = cfgParser.getboolean(insec,"link")
except:
self.gamelink = False
try:
self.scoretext = cfgParser.get(insec,"scoretext").strip().lower()
if not (self.scoretext in ["before","after"]):
raise Exception("scoretext not before or after")
except:
self.scoretext = "before"
def pushResults(self,newres):
# the only thing this cares about is the final, and *that* only matters once x minutes have passed.
if "finals" in newres:
for finalDict in newres["finals"]:
if "result" in finalDict:
if (finalDict["result"] == "win" and "win" in self.tweeton):
restext = self.wintext
elif (finalDict["result"] == "loss" and "loss" in self.tweeton):
restext = self.losstext
else:
restext = "TIE GAME"
if (self.scoretext == "before"):
msg = finalDict["final"] + ". " + restext
if self.gamelink:
msg += " " + self.gamelink
else:
msg = restext
if self.gamelink:
msg += self.gamelink + " "
msg += finalDict["final"]
if (finalDict["result"] in self.tweeton):
self._tweet(msg)
def _tweet(self,message):
api = twitter.Api(consumer_key=self.conskey, consumer_secret=self.conssecret,
access_token_key=self.acctokenkey, access_token_secret=self.acctokensecret)
api.PostUpdate(message)
| from NotifierClass import Notifier
import twitter
from datetime import datetime, timedelta
import time
import threading
class TwitterNotifier(Notifier):
def __init__(self,cfgParser,insec):
self.header = insec
try:
self.screenname = cfgParser.get(insec,"username").strip()
except:
self.screenname = ''
self.conskey = cfgParser.get(insec,"conskey").strip()
self.conssecret = cfgParser.get(insec,"conssecret").strip()
self.acctokenkey = cfgParser.get(insec,"acctokenkey").strip()
self.acctokensecret = cfgParser.get(insec,"acctokensecret").strip()
try:
self.tweeton = cfgParser.get(insec,"tweeton").strip().split(",")
except:
self.tweeton = ["win","loss","tie","neutral"]
try:
self.wintext = cfgParser.get(insec,"wintext").strip()
except:
self.wintext = ""
try:
self.losstext = cfgParser.get(insec,"losstext").strip()
except:
self.losstext = ""
try:
self.gamelink = cfgParser.getboolean(insec,"link")
except:
self.gamelink = False
try:
self.scoretext = cfgParser.get(insec,"scoretext").strip().lower()
if not (self.scoretext in ["before","after"]):
raise Exception("scoretext not before or after")
except:
self.scoretext = "before"
def pushResults(self,newres):
# the only thing this cares about is the final, and *that* only matters once x minutes have passed.
if "finals" in newres:
for finalDict in newres["finals"]:
if "result" in finalDict:
if (finalDict["result"] == "win" and "win" in self.tweeton):
restext = self.wintext
elif (finalDict["result"] == "loss" and "loss" in self.tweeton):
restext = self.losstext
if (self.scoretext == "before"):
msg = finalDict["final"] + ". " + restext
if self.gamelink:
msg += " " + self.gamelink
else:
msg = restext
if self.gamelink:
msg += self.gamelink + " "
msg += finalDict["final"]
self._tweet(msg)
def _tweet(self,message):
api = twitter.Api(consumer_key=self.conskey, consumer_secret=self.conssecret,
access_token_key=self.acctokenkey, access_token_secret=self.acctokensecret)
api.PostUpdate(message)
| mit | Python |
4718ee6ecd3fffea2f314d2929fc18dea86fc370 | add cython_anchor | CharlesShang/FastMaskRCNN,CharlesShang/FastMaskRCNN,CharlesShang/FastMaskRCNN,CharlesShang/FastMaskRCNN | libs/boxes/__init__.py | libs/boxes/__init__.py | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from . import cython_nms
from . import cython_bbox
import nms
import timer
from .anchor import anchors
from .anchor import anchors_plane
from .roi import roi_cropping
from .roi import roi_cropping
from . import cython_anchor | # --------------------------------------------------------
# Fast R-CNN
# Copyright (c) 2015 Microsoft
# Licensed under The MIT License [see LICENSE for details]
# Written by Ross Girshick
# --------------------------------------------------------
from . import cython_nms
from . import cython_bbox
import nms
import timer
from .anchor import anchors
from .anchor import anchors_plane
from .roi import roi_cropping
from .roi import roi_cropping | apache-2.0 | Python |
69358c5dce13f4a031b51d254824b6b3d4355847 | Move some code from typesystem.__init__ | ssarangi/numba,numba/numba,numba/numba,jriehl/numba,stefanseefeld/numba,shiquanwang/numba,IntelLabs/numba,sklam/numba,pombredanne/numba,cpcloud/numba,stonebig/numba,gmarkall/numba,shiquanwang/numba,seibert/numba,gmarkall/numba,cpcloud/numba,jriehl/numba,gdementen/numba,pombredanne/numba,GaZ3ll3/numba,stonebig/numba,numba/numba,ssarangi/numba,ssarangi/numba,numba/numba,pitrou/numba,GaZ3ll3/numba,sklam/numba,gdementen/numba,cpcloud/numba,pombredanne/numba,IntelLabs/numba,seibert/numba,GaZ3ll3/numba,stefanseefeld/numba,stuartarchibald/numba,stuartarchibald/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,jriehl/numba,jriehl/numba,shiquanwang/numba,sklam/numba,pombredanne/numba,gdementen/numba,pitrou/numba,gdementen/numba,GaZ3ll3/numba,stefanseefeld/numba,stuartarchibald/numba,seibert/numba,stefanseefeld/numba,GaZ3ll3/numba,stefanseefeld/numba,sklam/numba,seibert/numba,pitrou/numba,stonebig/numba,gmarkall/numba,IntelLabs/numba,numba/numba,stuartarchibald/numba,IntelLabs/numba,pitrou/numba,gmarkall/numba,jriehl/numba,sklam/numba,pombredanne/numba,pitrou/numba,gmarkall/numba,ssarangi/numba,cpcloud/numba,cpcloud/numba,gdementen/numba,ssarangi/numba,seibert/numba,stuartarchibald/numba | numba/typesystem/__init__.py | numba/typesystem/__init__.py | from basetypes import *
from exttypes import *
from closuretypes import *
from ssatypes import *
from templatetypes import *
from typemapper import *
from shorthands import *
__all__ = minitypes.__all__ + [
'O', 'b1', 'i1', 'i2', 'i4', 'i8', 'u1', 'u2', 'u4', 'u8',
'f4', 'f8', 'f16', 'c8', 'c16', 'c32', 'template',
]
| from basetypes import *
from exttypes import *
from closuretypes import *
from ssatypes import *
from templatetypes import *
from typemapper import *
__all__ = minitypes.__all__ + [
'O', 'b1', 'i1', 'i2', 'i4', 'i8', 'u1', 'u2', 'u4', 'u8',
'f4', 'f8', 'f16', 'c8', 'c16', 'c32', 'template',
]
#------------------------------------------------------------------------
# Utilities
#------------------------------------------------------------------------
def is_obj(type):
return type.is_object or type.is_array
native_type_dict = {}
for native_type in minitypes.native_integral:
native_type_dict[(native_type.itemsize, native_type.signed)] = native_type
def promote_to_native(int_type):
return native_type_dict[int_type.itemsize, int_type.signed]
def promote_closest(context, int_type, candidates):
"""
promote_closest(Py_ssize_t, [int_, long_, longlong]) -> longlong
"""
for candidate in candidates:
promoted = context.promote_types(int_type, candidate)
if promoted.itemsize == candidate.itemsize and promoted.signed == candidate.signed:
return candidate
return candidates[-1]
#------------------------------------------------------------------------
# Type shorthands
#------------------------------------------------------------------------
O = object_
b1 = bool_
i1 = int8
i2 = int16
i4 = int32
i8 = int64
u1 = uint8
u2 = uint16
u4 = uint32
u8 = uint64
f4 = float32
f8 = float64
f16 = float128
c8 = complex64
c16 = complex128
c32 = complex256
| bsd-2-clause | Python |
3ac9c53ffc6feb926ebde8dfe930f9235b2b6817 | Use explicit status | avanov/Rhetoric,avanov/Rhetoric | rhetoric/config/rendering.py | rhetoric/config/rendering.py | from django.http import HttpResponse
from django.core.serializers.json import DjangoJSONEncoder
from django.shortcuts import render
json_encode = DjangoJSONEncoder().encode
class JsonRendererFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, request, view_response):
response = request.response
response.content_type = 'application/json; charset=utf-8'
response.content = json_encode(view_response)
return HttpResponse(response.content,
content_type=response.content_type,
status=response.status_code
)
class StringRendererFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, request, view_response):
response = request.response
response.content_type = 'text/plain; charset=utf-8'
response.content = view_response
return response
class DjangoTemplateRendererFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, request, context_dict):
response = request.response
httpresponse_kwargs = {
'content_type': response['Content-Type'],
'status': response.status_code
}
return render(request, self.name, context_dict, **httpresponse_kwargs)
BUILTIN_RENDERERS = {
'json': JsonRendererFactory,
'string': StringRendererFactory,
'.html': DjangoTemplateRendererFactory,
}
class RenderingConfiguratorMixin(object):
def add_renderer(self, name, factory):
self.renderers[name] = factory
def get_renderer(self, name):
try:
template_suffix = name.rindex(".")
except ValueError:
# period is not found
renderer_name = name
else:
renderer_name = name[template_suffix:]
try:
return self.renderers[renderer_name](name)
except KeyError:
raise ValueError('No such renderer factory {}'.format(renderer_name))
| from django.http import HttpResponse
from django.core.serializers.json import DjangoJSONEncoder
from django.shortcuts import render
json_encode = DjangoJSONEncoder().encode
class JsonRendererFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, request, view_response):
response = request.response
response.content_type = 'application/json; charset=utf-8'
response.content = json_encode(view_response)
return HttpResponse(response.content, content_type=response.content_type)
class StringRendererFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, request, view_response):
response = request.response
response.content_type = 'text/plain; charset=utf-8'
response.content = view_response
return response
class DjangoTemplateRendererFactory(object):
def __init__(self, name):
self.name = name
def __call__(self, request, context_dict):
response = request.response
httpresponse_kwargs = {
'content_type': response['Content-Type'],
'status': response.status_code
}
return render(request, self.name, context_dict, **httpresponse_kwargs)
BUILTIN_RENDERERS = {
'json': JsonRendererFactory,
'string': StringRendererFactory,
'.html': DjangoTemplateRendererFactory,
}
class RenderingConfiguratorMixin(object):
def add_renderer(self, name, factory):
self.renderers[name] = factory
def get_renderer(self, name):
try:
template_suffix = name.rindex(".")
except ValueError:
# period is not found
renderer_name = name
else:
renderer_name = name[template_suffix:]
try:
return self.renderers[renderer_name](name)
except KeyError:
raise ValueError('No such renderer factory {}'.format(renderer_name))
| mit | Python |
8b90343deb066df5173e860796f6eb399f3a9ffb | Update tests to version 1.1.0 | jmluy/xpython,exercism/python,behrtam/xpython,N-Parsons/exercism-python,pheanex/xpython,pheanex/xpython,exercism/xpython,exercism/xpython,exercism/python,behrtam/xpython,smalley/python,jmluy/xpython,N-Parsons/exercism-python,smalley/python | exercises/rotational-cipher/rotational_cipher_test.py | exercises/rotational-cipher/rotational_cipher_test.py | import unittest
import rotational_cipher
# Tests adapted from `problem-specifications//canonical-data.json` @ v1.1.0
class RotationalCipher(unittest.TestCase):
def test_rotate_a_by_0(self):
self.assertEqual(rotational_cipher.rotate('a', 0), 'a')
def test_rotate_a_by_1(self):
self.assertEqual(rotational_cipher.rotate('a', 1), 'b')
def test_rotate_a_by_26(self):
self.assertEqual(rotational_cipher.rotate('a', 26), 'a')
def test_rotate_m_by_13(self):
self.assertEqual(rotational_cipher.rotate('m', 13), 'z')
def test_rotate_n_by_13_with_wrap_around_alphabet(self):
self.assertEqual(rotational_cipher.rotate('n', 13), 'a')
def test_rotate_capital_letters(self):
self.assertEqual(rotational_cipher.rotate('OMG', 5), 'TRL')
def test_rotate_spaces(self):
self.assertEqual(rotational_cipher.rotate('O M G', 5), 'T R L')
def test_rotate_numbers(self):
self.assertEqual(
rotational_cipher.rotate('Testing 1 2 3 testing', 4),
'Xiwxmrk 1 2 3 xiwxmrk')
def test_rotate_punctuation(self):
self.assertEqual(
rotational_cipher.rotate("Let's eat, Grandma!", 21),
"Gzo'n zvo, Bmviyhv!")
def test_rotate_all_letters(self):
self.assertEqual(
rotational_cipher.rotate("The quick brown fox jumps"
" over the lazy dog.", 13),
"Gur dhvpx oebja sbk whzcf bire gur ynml qbt.")
if __name__ == '__main__':
unittest.main()
| import unittest
import rotational_cipher
# test cases adapted from `x-common//canonical-data.json` @ version: 1.0.0
class RotationalCipher(unittest.TestCase):
def test_rotate_a_by_1(self):
self.assertEqual(rotational_cipher.rotate('a', 1), 'b')
def test_rotate_a_by_26(self):
self.assertEqual(rotational_cipher.rotate('a', 26), 'a')
def test_rotate_a_by_0(self):
self.assertEqual(rotational_cipher.rotate('a', 0), 'a')
def test_rotate_m_by_13(self):
self.assertEqual(rotational_cipher.rotate('m', 13), 'z')
def test_rotate_n_by_13_with_wrap_around_alphabet(self):
self.assertEqual(rotational_cipher.rotate('n', 13), 'a')
def test_rotate_capital_letters(self):
self.assertEqual(rotational_cipher.rotate('OMG', 5), 'TRL')
def test_rotate_spaces(self):
self.assertEqual(rotational_cipher.rotate('O M G', 5), 'T R L')
def test_rotate_numbers(self):
self.assertEqual(
rotational_cipher.rotate('Testing 1 2 3 testing', 4),
'Xiwxmrk 1 2 3 xiwxmrk')
def test_rotate_punctuation(self):
self.assertEqual(
rotational_cipher.rotate("Let's eat, Grandma!", 21),
"Gzo'n zvo, Bmviyhv!")
def test_rotate_all_letters(self):
self.assertEqual(
rotational_cipher.rotate("The quick brown fox jumps"
" over the lazy dog.", 13),
"Gur dhvpx oebja sbk whzcf bire gur ynml qbt.")
if __name__ == '__main__':
unittest.main()
| mit | Python |
6bdcd8df3ca419cc9dcadc9d234e33133f489778 | Add test for find-qca-path. | bgyori/bioagents,sorgerlab/bioagents | tests/qca_test.py | tests/qca_test.py | import json
import unittest
import requests
from ndex.beta.path_scoring import PathScoring
from bioagents.qca import QCA
from kqml.kqml_list import KQMLList
from tests.util import ekb_from_text
from bioagents.qca.qca_module import QCA_Module
# BELOW ARE OLD QCA TESTS
def test_improved_path_ranking():
qca = QCA()
sources = ["E2F1"]
targets = ["PTEN"]
qca_results2 = qca.find_causal_path(targets, sources)
print(qca_results2)
assert len(qca_results2) > 0
def test_scratch():
source_names = ["AKT1", "AKT2", "AKT3"]
target_names = ["CCND1"]
results_list = []
directed_path_query_url = \
'http://general.bigmech.ndexbio.org/directedpath/query'
# Assemble REST url
uuid_prior = "84f321c6-dade-11e6-86b1-0ac135e8bacf"
target = ",".join(target_names)
source = ",".join(source_names)
max_number_of_paths = 200
url = '%s?source=%s&target=%s&uuid=%s&server=%s&pathnum=%s' % (
directed_path_query_url,
source,
target,
uuid_prior,
'www.ndexbio.org',
str(max_number_of_paths)
)
r = requests.post(url)
result_json = json.loads(r.content)
edge_results = result_json.get("data").get("forward_english")
path_scoring = PathScoring()
A_all_scores = []
for i, edge in enumerate(edge_results):
print len(edge)
top_edge = None
for ranked_edges in path_scoring.cx_edges_to_tuples(edge, "A"):
if top_edge is None:
top_edge = ranked_edges
else:
if ranked_edges[1] < top_edge[1]:
top_edge = ranked_edges
A_all_scores.append(("A" + str(i), top_edge[1]))
print(A_all_scores)
race_results = path_scoring.calculate_average_position(A_all_scores, [])
print(race_results)
print(results_list)
def test_find_qca_path():
content = KQMLList('FIND-QCA-PATH')
content.sets('target', ekb_from_text('MAP2K1'))
content.sets('source', ekb_from_text('BRAF'))
qca_mod = QCA_Module(testing=True)
resp = qca_mod.respond_find_qca_path(content)
assert resp is not None, "No response received."
assert resp.head() is "SUCCESS", \
"QCA failed task for reason: %s" % resp.gets('reason')
assert resp.get('paths') is not None, "Did not find paths."
return
if __name__ == '__main__':
unittest.main()
| import json
import unittest
import requests
from ndex.beta.path_scoring import PathScoring
from bioagents.qca import QCA
# BELOW ARE OLD QCA TESTS
def test_improved_path_ranking():
qca = QCA()
sources = ["E2F1"]
targets = ["PTEN"]
qca_results2 = qca.find_causal_path(targets, sources)
print(qca_results2)
assert len(qca_results2) > 0
def test_scratch():
source_names = ["AKT1", "AKT2", "AKT3"]
target_names = ["CCND1"]
results_list = []
directed_path_query_url = \
'http://general.bigmech.ndexbio.org/directedpath/query'
# Assemble REST url
uuid_prior = "84f321c6-dade-11e6-86b1-0ac135e8bacf"
target = ",".join(target_names)
source = ",".join(source_names)
max_number_of_paths = 200
url = '%s?source=%s&target=%s&uuid=%s&server=%s&pathnum=%s' % (
directed_path_query_url,
source,
target,
uuid_prior,
'www.ndexbio.org',
str(max_number_of_paths)
)
r = requests.post(url)
result_json = json.loads(r.content)
edge_results = result_json.get("data").get("forward_english")
path_scoring = PathScoring()
A_all_scores = []
for i, edge in enumerate(edge_results):
print len(edge)
top_edge = None
for ranked_edges in path_scoring.cx_edges_to_tuples(edge, "A"):
if top_edge is None:
top_edge = ranked_edges
else:
if ranked_edges[1] < top_edge[1]:
top_edge = ranked_edges
A_all_scores.append(("A" + str(i), top_edge[1]))
print(A_all_scores)
race_results = path_scoring.calculate_average_position(A_all_scores, [])
print(race_results)
print(results_list)
if __name__ == '__main__':
unittest.main()
| bsd-2-clause | Python |
3fb07d4d9d2adfcee4ed9550750b7c474e5b8ccc | fix sql migrations not being committed | pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot,pajlada/pajbot,pajlada/tyggbot,pajlada/tyggbot,pajlada/pajbot,pajlada/pajbot | pajbot/migration/db.py | pajbot/migration/db.py | from contextlib import contextmanager
class DatabaseMigratable:
def __init__(self, conn):
self.conn = conn
@contextmanager
def create_resource(self):
with self.conn.cursor() as cursor:
try:
cursor.execute("CREATE TABLE IF NOT EXISTS schema_version(revision_id INT NOT NULL)")
yield cursor
cursor.execute("COMMIT")
except:
cursor.execute("ROLLBACK")
raise
def get_current_revision(self, cursor):
cursor.execute("SELECT revision_id FROM schema_version")
row = cursor.fetchone()
if row is not None:
return row[0]
else:
return None
def set_revision(self, cursor, id):
cursor.execute("DELETE FROM schema_version")
cursor.execute("INSERT INTO schema_version(revision_id) VALUES (%s)", (id,))
| from contextlib import contextmanager
class DatabaseMigratable:
def __init__(self, conn):
self.conn = conn
@contextmanager
def create_resource(self):
with self.conn.cursor() as cursor:
# begins a transaction automatically
cursor.execute("CREATE TABLE IF NOT EXISTS schema_version(revision_id INT NOT NULL)")
yield cursor
def get_current_revision(self, cursor):
cursor.execute("SELECT revision_id FROM schema_version")
row = cursor.fetchone()
if row is not None:
return row[0]
else:
return None
def set_revision(self, cursor, id):
cursor.execute("DELETE FROM schema_version")
cursor.execute("INSERT INTO schema_version(revision_id) VALUES (%s)", (id,))
| mit | Python |
293101edfd5226a4804c9b01360acb5c6dca4342 | Bump version | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/__init__.py | radar/__init__.py | __version__ = '2.45.3'
| __version__ = '2.45.2'
| agpl-3.0 | Python |
704b10469628c9432c030ada3e8126594f8cf774 | Bump version | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/__init__.py | radar/__init__.py | __version__ = '2.45.1'
| __version__ = '2.45.0'
| agpl-3.0 | Python |
a6ed415e887930322e821ba646e43755e15ac157 | add attribute accuracy to semantic_segmentation_loss | pfnet/chainercv,chainer/chainercv,yuyu2172/chainercv,yuyu2172/chainercv,chainer/chainercv | chainercv/links/loss/semantic_segmentation_loss.py | chainercv/links/loss/semantic_segmentation_loss.py | import chainer
from chainer import cuda
import chainer.functions as F
from chainer import reporter
import numpy as np
from chainercv.evaluations import eval_semantic_segmentation
class PixelwiseSoftmaxClassifier(chainer.Chain):
def __init__(self, model, ignore_label=-1, class_weight=None,
compute_accuracy=True):
super(PixelwiseSoftmaxClassifier, self).__init__(predictor=model)
self.n_class = model.n_class
self.ignore_label = ignore_label
if class_weight is not None:
self.class_weight = np.asarray(class_weight, dtype=np.float32)
else:
self.class_weight = class_weight
self.compute_accuracy = compute_accuracy
def to_cpu(self):
super(PixelwiseSoftmaxClassifier, self).to_cpu()
if self.class_weight is not None:
self.class_weight = cuda.to_cpu(self.class_weight)
def to_gpu(self):
super(PixelwiseSoftmaxClassifier, self).to_gpu()
if self.class_weight is not None:
self.class_weight = cuda.to_gpu(self.class_weight)
def __call__(self, x, t):
self.y = self.predictor(x)
self.loss = F.softmax_cross_entropy(
self.y, t, class_weight=self.class_weight,
ignore_label=self.ignore_label)
reporter.report({'loss': self.loss}, self)
self.accuracy = None
if self.compute_accuracy:
label = self.xp.argmax(self.y.data, axis=1)
self.accuracy = eval_semantic_segmentation(
label, t.data, self.n_class)
reporter.report({
'pixel_accuracy': self.xp.mean(self.accuracy[0]),
'mean_pixel_accuracy': self.xp.mean(self.accuracy[1]),
'mean_iou': self.xp.mean(self.accuracy[2]),
'frequency_weighted_iou': self.xp.mean(self.accuracy[3])
}, self)
return self.loss
| import chainer
from chainer import cuda
import chainer.functions as F
from chainer import reporter
import numpy as np
from chainercv.evaluations import eval_semantic_segmentation
class PixelwiseSoftmaxClassifier(chainer.Chain):
def __init__(self, model, ignore_label=-1, class_weight=None,
compute_accuracy=True):
super(PixelwiseSoftmaxClassifier, self).__init__(predictor=model)
self.n_class = model.n_class
self.ignore_label = ignore_label
if class_weight is not None:
self.class_weight = np.asarray(class_weight, dtype=np.float32)
else:
self.class_weight = class_weight
self.compute_accuracy = compute_accuracy
def to_cpu(self):
super(PixelwiseSoftmaxClassifier, self).to_cpu()
if self.class_weight is not None:
self.class_weight = cuda.to_cpu(self.class_weight)
def to_gpu(self):
super(PixelwiseSoftmaxClassifier, self).to_gpu()
if self.class_weight is not None:
self.class_weight = cuda.to_gpu(self.class_weight)
def __call__(self, x, t):
self.y = self.predictor(x)
self.loss = F.softmax_cross_entropy(
self.y, t, class_weight=self.class_weight,
ignore_label=self.ignore_label)
reporter.report({'loss': self.loss}, self)
if self.compute_accuracy:
label = self.xp.argmax(self.y.data, axis=1)
pas, mpas, mious, fwious =\
eval_semantic_segmentation(label, t.data, self.n_class)
reporter.report({
'pixel_accuracy': self.xp.mean(pas),
'mean_pixel_accuracy': self.xp.mean(mpas),
'mean_iou': self.xp.mean(mious),
'frequency_weighted_iou': self.xp.mean(fwious)
}, self)
return self.loss
| mit | Python |
ad04cf539a689af9609d8c7eea925e2fcbc0fa70 | Add limit and where options to rebuild_text_index | NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor,NUKnightLab/cityhallmonitor | cityhallmonitor/management/commands/rebuild_text_index.py | cityhallmonitor/management/commands/rebuild_text_index.py | import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
def add_arguments(self, parser):
parser.add_argument('--limit', type=int,
help='Process up to LIMIT documents')
parser.add_argument('--where',
help='WHERE condition to filter documents')
def handle(self, *args, **options):
logger.info(
'Rebuilding text index, limit=%(limit)s, where="%(where)s"' \
% options)
if options['where']:
qs = Document.objects.extra(where=[options['where']])
else:
qs = Document.objects.all()
if options['limit']:
qs = qs[:options['limit']]
for i,d in enumerate(qs, start=1):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done, processed %d documents\n' % i)
| import logging
from django.core.management.base import BaseCommand
from cityhallmonitor.models import Document
logger = logging.getLogger(__name__)
class Command(BaseCommand):
help = 'For each document, force an update of its related fields and its postgres text index'
logger.info('Rebuilding text index')
def handle(self, *args, **options):
for i,d in enumerate(Document.objects.all()):
d._set_dependent_fields()
d.save(update_text=True)
if i % 1000 == 0:
logger.debug("Processed %i documents" % i)
logger.info('Done\n')
| mit | Python |
dca4357f0ef91125378f5a576fb18b4ef2202b8e | update cloudcare urls | qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq | corehq/apps/cloudcare/urls.py | corehq/apps/cloudcare/urls.py | from django.conf.urls import patterns, url, include
from corehq.apps.cloudcare.views import (
EditCloudcareUserPermissionsView,
CloudcareMain,
form_context, get_cases, filter_cases, get_apps_api, get_app_api,
get_fixtures, get_sessions, get_session_context, get_ledgers, render_form,
sync_db_api, default,
)
app_urls = patterns('corehq.apps.cloudcare.views',
url(r'^view/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/forms-(?P<form_id>[\w-]+)/context/$',
form_context, name='cloudcare_form_context'),
url(r'^(?P<urlPath>.*)$', CloudcareMain.as_view(), name='cloudcare_main'),
)
api_urls = patterns('corehq.apps.cloudcare.views',
url(r'^cases/$', get_cases, name='cloudcare_get_cases'),
url(r'^cases/module/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/$',
filter_cases, name='cloudcare_filter_cases'),
url(r'^cases/module/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/parent/(?P<parent_id>[\w-]+)/$',
filter_cases, name='cloudcare_filter_cases_with_parent'),
url(r'^apps/$', get_apps_api, name='cloudcare_get_apps'),
url(r'^apps/(?P<app_id>[\w-]*)/$', get_app_api, name='cloudcare_get_app'),
url(r'^fixtures/(?P<user_id>[\w-]*)/$', get_fixtures, name='cloudcare_get_fixtures'),
url(r'^fixtures/(?P<user_id>[\w-]*)/(?P<fixture_id>[:\w-]*)$', get_fixtures,
name='cloudcare_get_fixtures'),
url(r'^sessions/$', get_sessions, name='cloudcare_get_sessions'),
url(r'^sessions/(?P<session_id>[\w-]*)/$', get_session_context, name='cloudcare_get_session_context'),
url(r'^ledgers/$', get_ledgers, name='cloudcare_get_ledgers'),
url(r'^render_form/$', render_form, name='cloudcare_render_form'),
url(r'^sync_db/$', sync_db_api, name='cloudcare_sync_db'),
)
# used in settings urls
settings_urls = patterns('corehq.apps.cloudcare.views',
url(r'^app/', EditCloudcareUserPermissionsView.as_view(), name=EditCloudcareUserPermissionsView.urlname),
)
urlpatterns = patterns('corehq.apps.cloudcare.views',
url(r'^$', default, name='cloudcare_default'),
url(r'^apps/', include(app_urls)),
url(r'^api/', include(api_urls)),
)
| from django.conf.urls import patterns, url, include
from corehq.apps.cloudcare.views import (
EditCloudcareUserPermissionsView,
CloudcareMain,
)
app_urls = patterns('corehq.apps.cloudcare.views',
url(r'^view/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/forms-(?P<form_id>[\w-]+)/context/$',
'form_context', name='cloudcare_form_context'),
url(r'^(?P<urlPath>.*)$', CloudcareMain.as_view(), name='cloudcare_main'),
)
api_urls = patterns('corehq.apps.cloudcare.views',
url(r'^cases/$', 'get_cases', name='cloudcare_get_cases'),
url(r'^cases/module/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/$',
'filter_cases', name='cloudcare_filter_cases'),
url(r'^cases/module/(?P<app_id>[\w-]+)/modules-(?P<module_id>[\w-]+)/parent/(?P<parent_id>[\w-]+)/$',
'filter_cases', name='cloudcare_filter_cases_with_parent'),
url(r'^apps/$', 'get_apps_api', name='cloudcare_get_apps'),
url(r'^apps/(?P<app_id>[\w-]*)/$', 'get_app_api', name='cloudcare_get_app'),
url(r'^fixtures/(?P<user_id>[\w-]*)/$', 'get_fixtures', name='cloudcare_get_fixtures'),
url(r'^fixtures/(?P<user_id>[\w-]*)/(?P<fixture_id>[:\w-]*)$', 'get_fixtures',
name='cloudcare_get_fixtures'),
url(r'^sessions/$', 'get_sessions', name='cloudcare_get_sessions'),
url(r'^sessions/(?P<session_id>[\w-]*)/$', 'get_session_context', name='cloudcare_get_session_context'),
url(r'^ledgers/$', 'get_ledgers', name='cloudcare_get_ledgers'),
url(r'^render_form/$', 'render_form', name='cloudcare_render_form'),
url(r'^sync_db/$', 'sync_db_api', name='cloudcare_sync_db'),
)
# used in settings urls
settings_urls = patterns('corehq.apps.cloudcare.views',
url(r'^app/', EditCloudcareUserPermissionsView.as_view(), name=EditCloudcareUserPermissionsView.urlname),
)
urlpatterns = patterns('corehq.apps.cloudcare.views',
url(r'^$', 'default', name='cloudcare_default'),
url(r'^apps/', include(app_urls)),
url(r'^api/', include(api_urls)),
)
| bsd-3-clause | Python |
0480867b7ba9ae519d7ef7ab37bfb9c9a1553287 | Bump version | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/__init__.py | radar/__init__.py | __version__ = '2.5.1'
| __version__ = '2.5.0'
| agpl-3.0 | Python |
66795b04e27a6de4f0d0ed4d52064cd3ba6e76b5 | Bump version | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | radar/__init__.py | radar/__init__.py | __version__ = '2.11.3'
| __version__ = '2.11.2'
| agpl-3.0 | Python |
c5cec60bc6b4c3e785bd616bcb5d5cff5770e408 | Add PPV tests along all LOS; correct lwidth comparisons | e-koch/TurbuStat,Astroua/TurbuStat | turbustat/simulator/tests/test_ppv.py | turbustat/simulator/tests/test_ppv.py |
from ..make_cube import make_ppv
from ..gen_field import make_3dfield
import pytest
import numpy as np
import numpy.testing as npt
import astropy.units as u
import astropy.constants as c
try:
from spectral_cube import SpectralCube
SPECCUBE_INSTALL = True
except ImportError:
SPECCUBE_INSTALL = False
@pytest.mark.skipif("not SPECCUBE_INSTALL")
@pytest.mark.parametrize('axis', [0, 1, 2])
# @pytest.mark.parametrize('axis', [0])
def test_ppv(axis):
'''
Ensure the column density matches the expected value in the output cube
'''
# Number of samples to take along each non-projected dimension
size = 2
twod_slice = [slice(0, size), slice(0, size)]
threed_slice = [slice(0, size), slice(0, size)]
threed_slice.insert(axis, slice(None))
# Need a large enough field to have good statistics
velocity = make_3dfield(128, powerlaw=3.5, amp=5.e3) * u.m / u.s
density = np.ones_like(velocity.value) * u.cm**-3
cube_hdu = make_ppv(velocity[threed_slice], density[threed_slice],
los_axis=axis,
vel_disp=np.std(velocity, axis=axis)[twod_slice].max(),
T=100 * u.K,
return_hdu=True,
chan_width=500 * u.m / u.s)
cube = SpectralCube.read(cube_hdu)
NHI_exp = (1 * u.cm**-3) * (1 * u.pc).to(u.cm)
# Moment 0 in K km/s
mom0 = cube.moment0().to(u.K * u.km / u.s)
# Convert to cm^-2
NHI_cube = mom0 * 1.823e18 * u.cm**-2 / (u.K * u.km / u.s)
assert NHI_exp.unit == NHI_cube.unit
# Expected is 3.0854e18. Check if it is within 1e16
npt.assert_allclose(NHI_exp.value, NHI_cube.value, rtol=1e-4)
# Rough comparison of line width to the velocity field std.
# Very few samples, so this is only a rough check
v_therm = np.sqrt(c.k_B * 100 * u.K / (1.4 * c.m_p)).to(u.km / u.s)
# Correct the measured line widths for thermal broadening
lwidth = np.sqrt(cube.linewidth_sigma().to(u.km / u.s)**2 - v_therm**2)
vel_std = np.std(velocity, axis=0)[twod_slice].to(u.km / u.s)
npt.assert_allclose(vel_std.value, lwidth.value, atol=v_therm.value)
# Compare centroids
raw_centroid = ((velocity[threed_slice] * density[threed_slice]).sum(axis) /
(density[threed_slice]).sum(axis)).to(u.km / u.s)
mom1 = cube.moment1().to(u.km / u.s)
npt.assert_allclose(raw_centroid.value, mom1.value, atol=v_therm.value)
|
from ..make_cube import make_ppv
from ..gen_field import make_3dfield
import pytest
import numpy as np
import numpy.testing as npt
import astropy.units as u
try:
from spectral_cube import SpectralCube
SPECCUBE_INSTALL = True
except ImportError:
SPECCUBE_INSTALL = False
@pytest.mark.skipif("not SPECCUBE_INSTALL")
def test_ppv():
'''
Ensure the column density matches the expected value in the output cube
'''
# Need a large enough field to have good statistics
velocity = make_3dfield(128, powerlaw=3.5, amp=5.e3) * u.m / u.s
density = np.ones_like(velocity.value) * u.cm**-3
cube_hdu = make_ppv(velocity[:, :2, :2], density[:, :2, :2],
vel_disp=np.std(velocity), T=100 * u.K,
return_hdu=True,
chan_width=500 * u.m / u.s)
cube = SpectralCube.read(cube_hdu)
NHI_exp = (1 * u.cm**-3) * (1 * u.pc).to(u.cm)
# Moment 0 in K km/s
mom0 = cube.moment0().to(u.K * u.km / u.s)
# Convert to cm^-2
NHI_cube = mom0 * 1.823e18 * u.cm**-2 / (u.K * u.km / u.s)
assert NHI_exp.unit == NHI_cube.unit
npt.assert_allclose(NHI_exp.value, NHI_cube.value, rtol=1e-4)
# Rough comparison of line width to the velocity field std.
# Very few samples, so this is only a rough check
lwidth = cube.linewidth_sigma().to(u.km / u.s)
vel_std = np.std(velocity).to(u.km / u.s)
npt.assert_allclose(vel_std.value, lwidth.value, rtol=0.2)
# Compare centroids
raw_centroid = ((velocity[:, :2, :2] * density[:, :2, :2]).sum(0) /
(density[:, :2, :2]).sum(0)).to(u.km / u.s)
mom1 = cube.moment1().to(u.km / u.s)
npt.assert_allclose(raw_centroid.value, mom1.value, rtol=1e-3)
| mit | Python |
61c1c5eb2bf62fe41dde2643356731cbf6b1b435 | Add TODO to file_verification() | ronrest/convenience_py,ronrest/convenience_py | convenience/file_convenience/file_verification.py | convenience/file_convenience/file_verification.py | import hashlib
def file_verification(file, v_type, v_val):
# TODO: do dummy proofing for input values.
v_types = {"md5": hashlib.md5,
"sha1": hashlib.sha1,
"sha256": hashlib.sha256,
"sha512": hashlib.sha512
}
with open(file, 'rb') as fileObj:
content = fileObj.read()
hash = v_types[v_type](content).hexdigest()
# TODO: filesize verification.
# Check filesize
# statinfo = os.stat(filename)
# statinfo.st_size == expected_bytes
return hash == v_val
| import hashlib
def file_verification(file, v_type, v_val):
v_types = {"md5": hashlib.md5,
"sha1": hashlib.sha1,
"sha256": hashlib.sha256,
"sha512": hashlib.sha512
}
with open(file, 'rb') as fileObj:
content = fileObj.read()
hash = v_types[v_type](content).hexdigest()
# TODO: filesize verification.
# Check filesize
# statinfo = os.stat(filename)
# statinfo.st_size == expected_bytes
return hash == v_val
| apache-2.0 | Python |
bb46136c70f63f6a34ac36b418f4984b62dae0c2 | Bring in json_safe_payload method (#3643) | facebookresearch/ParlAI,facebookresearch/ParlAI,facebookresearch/ParlAI,facebookresearch/ParlAI,facebookresearch/ParlAI | parlai/core/message.py | parlai/core/message.py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
File for Message object and associated functions.
The Message object's key function is to prevent users from editing fields in an action
or observation dict unintentionally.
"""
from __future__ import annotations
from typing import Any, Dict
UNSAFE_FIELDS = {'metrics'}
class Message(dict):
"""
Class for observations and actions in ParlAI.
Functions like a dict, but triggers a RuntimeError when calling __setitem__ for a
key that already exists in the dict.
"""
def __setitem__(self, key, val):
if key in self:
raise RuntimeError(
'Message already contains key `{}`. If this was intentional, '
'please use the function `force_set(key, value)`.'.format(key)
)
super().__setitem__(key, val)
def force_set(self, key, val):
super().__setitem__(key, val)
def copy(self):
return type(self)(self)
@classmethod
def padding_example(cls) -> Message:
"""
Create a Message for batch padding.
"""
return cls({'batch_padding': True, 'episode_done': True})
def is_padding(self) -> bool:
"""
Determine if a message is a padding example or not.
"""
return bool(self.get('batch_padding'))
def json_safe_payload(self) -> Dict[str, Any]:
"""
Prepare a Message for delivery to a client via json.
Useful for chat-services, external libraries, and mephisto delivery.
Works by stripping known unsafe fields from the message, and converting
the object to a dict.
"""
return {k: v for k, v in self.items() if k not in UNSAFE_FIELDS}
| #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
File for Message object and associated functions.
The Message object's key function is to prevent users from editing fields in an action
or observation dict unintentionally.
"""
from __future__ import annotations
class Message(dict):
"""
Class for observations and actions in ParlAI.
Functions like a dict, but triggers a RuntimeError when calling __setitem__ for a
key that already exists in the dict.
"""
def __setitem__(self, key, val):
if key in self:
raise RuntimeError(
'Message already contains key `{}`. If this was intentional, '
'please use the function `force_set(key, value)`.'.format(key)
)
super().__setitem__(key, val)
def force_set(self, key, val):
super().__setitem__(key, val)
def copy(self):
return type(self)(self)
@classmethod
def padding_example(cls) -> Message:
"""
Create a Message for batch padding.
"""
return cls({'batch_padding': True, 'episode_done': True})
def is_padding(self) -> bool:
"""
Determine if a message is a padding example or not.
"""
return bool(self.get('batch_padding'))
| mit | Python |
c970cab38d846c4774aee52e52c23ed2452af96a | Remove unused and buggy import | openfisca/openfisca-france-data,openfisca/openfisca-france-data,openfisca/openfisca-france-data | openfisca_france_data/tests/base.py | openfisca_france_data/tests/base.py | # -*- coding: utf-8 -*-
from openfisca_core.tools import assert_near
from openfisca_france.tests.base import get_cached_composed_reform, get_cached_reform
from .. import france_data_tax_benefit_system
__all__ = [
'assert_near',
'france_data_tax_benefit_system',
'get_cached_composed_reform',
'get_cached_reform',
]
| # -*- coding: utf-8 -*-
from openfisca_core.tools import assert_near
from openfisca_france.tests.base import get_cached_composed_reform, get_cached_reform
from .. import france_data_tax_benefit_system, FranceDataTaxBenefitSystem
__all__ = [
'assert_near',
'france_data_tax_benefit_system',
'FranceDataTaxBenefitSystem',
'get_cached_composed_reform',
'get_cached_reform',
]
| agpl-3.0 | Python |
78df33ee8ecb167f3d88ef326be1b5f501b8eb00 | fix doc | chainer/chainercv,pfnet/chainercv,chainer/chainercv,yuyu2172/chainercv,yuyu2172/chainercv | chainercv/utils/testing/generate_random_bbox.py | chainercv/utils/testing/generate_random_bbox.py | import numpy as np
def generate_random_bbox(n, img_size, min_length, max_length):
"""Generate valid bounding boxes with random position and shape.
Args:
n (int): The number of bounding boxes.
img_size (tuple): A tuple of length 2. The width and the height
of the image on which bounding boxes locate.
min_length (float): The minimum length of edges of bounding boxes.
max_length (float): The maximum length of edges of bounding boxes.
Return:
numpy.ndarray:
Coordinates of bounding boxes. Its shape is :math:`(R, 4)`. \
Here, :math:`R` equals :obj:`n`.
The second axis contains :math:`x_{min}, y_{min}, x_{max}, y_{max}`,
where
:math:`x_{min} + min\_length \\leq x_{max} < x_{min} + max\_length`
and
:math:`y_{min} + min\_length \\leq y_{max} < y_{min} + max\_length`.
"""
W, H = img_size
x_min = np.random.uniform(0, W - max_length, size=(n,))
y_min = np.random.uniform(0, H - max_length, size=(n,))
x_max = x_min + np.random.uniform(min_length, max_length, size=(n,))
y_max = y_min + np.random.uniform(min_length, max_length, size=(n,))
bbox = np.stack((x_min, y_min, x_max, y_max), axis=1).astype(np.float32)
return bbox
| import numpy as np
def generate_random_bbox(n, img_size, min_length, max_length):
"""Generate valid bounding boxes with random position and shape.
Args:
n (int): The number of bounding boxes.
img_size (tuple): A tuple of length 2. The width and the height
of the image on which bounding boxes locate.
min_length (int): The minimum length of edges of bounding boxes.
max_length (int): The maximum length of edges of bounding boxes.
Return:
numpy.ndarray:
Coordinates of bounding boxes. Its shape is :math:`(R, 4)`. \
Here, :math:`R` equals :obj:`n`.
The second axis contains :obj:`x_min, y_min, x_max, y_max`,
where :obj:`x_min + min_length <= x_max < x_min + max_length` and
:obj:`y_min + min_length <= y_max < y_min + max_length`.
"""
W, H = img_size
x_min = np.random.uniform(0, W - max_length, size=(n,))
y_min = np.random.uniform(0, H - max_length, size=(n,))
x_max = x_min + np.random.uniform(min_length, max_length, size=(n,))
y_max = y_min + np.random.uniform(min_length, max_length, size=(n,))
bbox = np.stack((x_min, y_min, x_max, y_max), axis=1).astype(np.float32)
return bbox
| mit | Python |
fd3f5334f8614fdc7ea581fffd7626aa32e974f7 | Bump version to 0.7.1 | jsvine/pdfplumber | pdfplumber/_version.py | pdfplumber/_version.py | version_info = (0, 7, 1)
__version__ = ".".join(map(str, version_info))
| version_info = (0, 7, 0)
__version__ = ".".join(map(str, version_info))
| mit | Python |
02edaec8eae6a67cb24a1948d2de3b40d3922b50 | Fix syntax error | Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki,Majavapaja/Mursushakki | deployment/cosmosdb.py | deployment/cosmosdb.py | import logging
from azure.cosmos.cosmos_client import CosmosClient
from azure.cosmos.errors import HTTPFailure
from config import Mongo
log = logging.getLogger(__name__)
def configure_database_with_shared_throughput(db_name, master_key, url_connection):
client = CosmosClient(url_connection=url_connection, auth={"masterKey": master_key})
db = configure_db(client, db_name, {"offerThroughput": Mongo.throughput})
set_db_throughput(client, db["_self"], Mongo.throughput)
def configure_collections(db_name, collection_names, master_key, url_connection):
client = CosmosClient(url_connection=url_connection, auth={"masterKey": master_key})
db = configure_db(client, db_name)
for collection_name in collection_names:
configure_collection(client, db, collection_name, desired_throughput=Mongo.collection_throughput)
def set_db_throughput(client, db_self desired_throughput):
offer = list(client.QueryOffers(f"SELECT * FROM c WHERE c.resource = '{db_self}'"))[0]
current_throughput = offer["content"]["offerThroughput"]
if current_throughput == desired_throughput:
log.info("Shared database throughput is up to date")
else:
log.info(f"Updating shared database throughput from {current_throughput} to {desired_throughput}")
offer["content"]["offerThroughput"] = desired_throughput
offer = client.ReplaceOffer(offer["_self"], offer)
def configure_db(client, name, options=None):
try:
return client.CreateDatabase({"id": name}, options)
except HTTPFailure as e:
if e.status_code != 409:
raise e
return client.ReadDatabase(f"dbs/{name}")
def configure_collection(client, db, collection_name, desired_throughput):
try:
collection = client.CreateContainer(db["_self"], {"id": collection_name}, {"offerThroughput": desired_throughput})
log.info(f"Created collection {collection_name}")
return collection
except HTTPFailure as e:
if e.status_code != 409:
raise e
log.info(f"Collection {collection_name} already exists")
collections = list(client.ReadContainers(db["_self"]))
collection = [c for c in collections if c["id"] == collection_name][0]
offer = list(client.QueryOffers(f"SELECT * FROM c WHERE c.resource = '{collection['_self']}'"))[0]
current_throughput = offer["content"]["offerThroughput"]
if current_throughput == desired_throughput:
log.info("Collection throughput up to date")
else:
log.info(f"Updating throughput from {current_throughput} to {desired_throughput}")
offer["content"]["offerThroughput"] = desired_throughput
offer = client.ReplaceOffer(offer["_self"], offer)
return collection
| import logging
from azure.cosmos.cosmos_client import CosmosClient
from azure.cosmos.errors import HTTPFailure
from config import Mongo
log = logging.getLogger(__name__)
def configure_database_with_shared_throughput(db_name, master_key, url_connection):
client = CosmosClient(url_connection=url_connection, auth={"masterKey": master_key}))
db = configure_db(client, db_name, {"offerThroughput": Mongo.throughput})
set_db_throughput(client, db["_self"], Mongo.throughput)
def configure_collections(db_name, collection_names, master_key, url_connection):
client = CosmosClient(url_connection=url_connection, auth={"masterKey": master_key})
db = configure_db(client, db_name)
for collection_name in collection_names:
configure_collection(client, db, collection_name, desired_throughput=Mongo.collection_throughput)
def set_db_throughput(client, db_self desired_throughput):
offer = list(client.QueryOffers(f"SELECT * FROM c WHERE c.resource = '{db_self}'"))[0]
current_throughput = offer["content"]["offerThroughput"]
if current_throughput == desired_throughput:
log.info("Shared database throughput is up to date")
else:
log.info(f"Updating shared database throughput from {current_throughput} to {desired_throughput}")
offer["content"]["offerThroughput"] = desired_throughput
offer = client.ReplaceOffer(offer["_self"], offer)
def configure_db(client, name, options=None):
try:
return client.CreateDatabase({"id": name}, options)
except HTTPFailure as e:
if e.status_code != 409:
raise e
return client.ReadDatabase(f"dbs/{name}")
def configure_collection(client, db, collection_name, desired_throughput):
try:
collection = client.CreateContainer(db["_self"], {"id": collection_name}, {"offerThroughput": desired_throughput})
log.info(f"Created collection {collection_name}")
return collection
except HTTPFailure as e:
if e.status_code != 409:
raise e
log.info(f"Collection {collection_name} already exists")
collections = list(client.ReadContainers(db["_self"]))
collection = [c for c in collections if c["id"] == collection_name][0]
offer = list(client.QueryOffers(f"SELECT * FROM c WHERE c.resource = '{collection['_self']}'"))[0]
current_throughput = offer["content"]["offerThroughput"]
if current_throughput == desired_throughput:
log.info("Collection throughput up to date")
else:
log.info(f"Updating throughput from {current_throughput} to {desired_throughput}")
offer["content"]["offerThroughput"] = desired_throughput
offer = client.ReplaceOffer(offer["_self"], offer)
return collection
| mit | Python |
365e06e98bcaa0b1d664b0c39252dd5605c8ab09 | Update importchannel management command to support local import | rtibbles/kolibri,learningequality/kolibri,jonboiser/kolibri,indirectlylit/kolibri,MingDai/kolibri,aronasorman/kolibri,jtamiace/kolibri,jonboiser/kolibri,jayoshih/kolibri,jonboiser/kolibri,learningequality/kolibri,lyw07/kolibri,christianmemije/kolibri,rtibbles/kolibri,benjaoming/kolibri,indirectlylit/kolibri,mrpau/kolibri,aronasorman/kolibri,DXCanas/kolibri,rtibbles/kolibri,indirectlylit/kolibri,DXCanas/kolibri,jtamiace/kolibri,christianmemije/kolibri,indirectlylit/kolibri,jamalex/kolibri,learningequality/kolibri,jtamiace/kolibri,benjaoming/kolibri,benjaoming/kolibri,DXCanas/kolibri,benjaoming/kolibri,MingDai/kolibri,aronasorman/kolibri,jamalex/kolibri,aronasorman/kolibri,mrpau/kolibri,christianmemije/kolibri,jtamiace/kolibri,jamalex/kolibri,lyw07/kolibri,mrpau/kolibri,DXCanas/kolibri,rtibbles/kolibri,jayoshih/kolibri,jamalex/kolibri,jayoshih/kolibri,lyw07/kolibri,jonboiser/kolibri,jayoshih/kolibri,learningequality/kolibri,mrpau/kolibri,lyw07/kolibri,christianmemije/kolibri,MingDai/kolibri,MingDai/kolibri | kolibri/content/management/commands/importchannel.py | kolibri/content/management/commands/importchannel.py | import logging as logger
from django.core.management.base import CommandError
from kolibri.tasks.management.commands.base import AsyncCommand
from ...utils import annotation, paths, transfer
logging = logger.getLogger(__name__)
# constants to specify the transfer method to be used
DOWNLOAD_METHOD = "download"
COPY_METHOD = "copy"
class Command(AsyncCommand):
def add_arguments(self, parser):
# let's save the parser in case we need to print a help statement
self._parser = parser
# see `importcontent` management command for explanation of how we're using subparsers
subparsers = parser.add_subparsers(dest='command', help="The following subcommands are available.")
network_subparser = subparsers.add_parser(
name='network',
cmd=self,
help="Download the given channel through the network."
)
network_subparser.add_argument('channel_id', type=str)
local_subparser = subparsers.add_parser(
name='local',
cmd=self,
help='Copy the content from the given folder.'
)
local_subparser.add_argument('channel_id', type=str)
local_subparser.add_argument('directory', type=str)
def download_channel(self, channel_id):
logging.info("Downloading data for channel id {}".format(channel_id))
self._transfer(DOWNLOAD_METHOD, channel_id)
def copy_channel(self, channel_id, path):
logging.info("Copying in data for channel id {}".format(channel_id))
self._transfer(COPY_METHOD, channel_id, path=path)
def _transfer(self, method, channel_id, path=None):
dest = paths.get_content_database_file_path(channel_id)
# determine where we're downloading/copying from, and create appropriate transfer object
if method == DOWNLOAD_METHOD:
url = paths.get_content_database_file_url(channel_id)
logging.debug("URL to fetch: {}".format(url))
filetransfer = transfer.FileDownload(url, dest)
elif method == COPY_METHOD:
srcpath = paths.get_content_database_file_path(channel_id, datafolder=path)
filetransfer = transfer.FileCopy(srcpath, dest)
logging.debug("Destination: {}".format(dest))
progress_extra_data = {
"channel_id": channel_id,
}
with filetransfer:
with self.start_progress(total=filetransfer.total_size) as progress_update:
for chunk in filetransfer:
progress_update(len(chunk), progress_extra_data)
annotation.update_channel_metadata_cache()
def handle_async(self, *args, **options):
if options['command'] == 'network':
self.download_channel(options["channel_id"])
elif options['command'] == 'local':
self.copy_channel(options["channel_id"], options["directory"])
else:
self._parser.print_help()
raise CommandError("Please give a valid subcommand. You gave: {}".format(options["command"]))
| import logging as logger
from kolibri.tasks.management.commands.base import AsyncCommand
from ...utils import annotation, paths, transfer
logging = logger.getLogger(__name__)
class Command(AsyncCommand):
def add_arguments(self, parser):
parser.add_argument("channel_id", type=str)
def handle_async(self, *args, **options):
channel_id = options["channel_id"]
logging.info("Downloading data for channel id {}".format(channel_id))
url = paths.get_content_database_file_url(channel_id)
dest = paths.get_content_database_file_path(channel_id)
logging.debug("URL to fetch: {}".format(url))
logging.debug("Destination: {}".format(dest))
with transfer.FileDownload(url, dest) as download:
with self.start_progress(total=download.total_size) as progress_update:
for chunk in download:
progress_extra_data = {
"channel_id": channel_id,
}
progress_update(len(chunk), progress_extra_data)
annotation.update_channel_metadata_cache()
| mit | Python |
d8377a8143a0ee20d2e90cefa5d60f48991d0e6b | Fix SpiHardwareLightBank.py | rec/echomesh,rec/echomesh,rec/echomesh,rec/echomesh,rec/echomesh,rec/echomesh | code/python/echomesh/light/SpiHardwareLightBank.py | code/python/echomesh/light/SpiHardwareLightBank.py | from __future__ import absolute_import, division, print_function, unicode_literals
from echomesh.base import Config
from echomesh.light import SetupDebianSpiLights
from echomesh.light.SpiLightBank import SpiLightBank
from echomesh.util import Log
LOGGER = Log.logger(__name__)
_LATCH_BYTE_COUNT = 3
_LATCH = bytearray(0 for i in xrange(_LATCH_BYTE_COUNT))
_INTERNAL_LATCH_BYTE_COUNT = 0
class SpiHardwareLightBank(SpiLightBank):
def RGB(r, g, b):
return r, g, b
def GRB(r, g, b):
return g, r, b
def BRG(r, g, b):
return b, r, g
def __init__(self, count=None):
assert SetupDebianSpiLights.lights_enabled(), "Lighting is not enabled."
super(SpiHardwareLightBank, self).__init__(count=count)
order = Config.get('light', 'hardware', 'rgb_order')
self.order = getattr(SpiHardwareLightBank, order.upper(), None)
if not self.order:
LOGGER.error("Didn't understand order %s", order)
self._clear, self._bank = self._light_array(), self._light_array()
def _light_array(self):
count = Config.get('light', 'count')
b = bytearray(0 for i in xrange(count + _INTERNAL_LATCH_BYTE_COUNT))
if False: # TODO
for i in xrange(_INTERNAL_LATCH_BYTE_COUNT):
b[-1 - i] = 0
return b
def _write(self, lights):
self._device.write(lights)
self._device.flush()
if _LATCH_BYTE_COUNT:
self._device.write(_LATCH)
self._device.flush()
def clear(self):
with self.lock:
self._write(self._clear)
def _before_thread_start(self):
super(SpiHardwareLightBank, self)._before_thread_start()
self._device = open('/dev/spidev0.0', 'wb')
def _after_thread_pause(self):
super(SpiHardwareLightBank, self)._after_thread_pause()
self._device.close()
self._device = None
def _display_lights(self, lights, brightness):
for i, light in enumerate(lights):
if light is None:
light = [0x80, 0x80, 0x80]
else:
light = self.order(*int(min(0x80 + 0x7F * x * brightness, 0xFF)
for x in light))
self._bank[3 * i:3 * (i + 1)] = light
self._write(self.pattern)
| from __future__ import absolute_import, division, print_function, unicode_literals
from echomesh.base import Config
from echomesh.light import SetupDebianSpiLights
from echomesh.light.SpiLightBank import SpiLightBank
from echomesh.util import Log
LOGGER = Log.logger(__name__)
_LATCH_BYTE_COUNT = 3
_LATCH = bytearray(0 for i in xrange(_LATCH_BYTE_COUNT))
_INTERNAL_LATCH_BYTE_COUNT = 0
class SpiHardwareLightBank(SpiLightBank):
def RGB(r, g, b):
return r, g, b
def GRB(r, g, b):
return g, r, b
def BRG(r, g, b):
return b, r, g
def __init__(self, count=None):
assert SetupDebianSpiLights.lights_enabled(), "Lighting is not enabled."
super(SpiHardwareLightBank, self).__init__(count=count)
order = Config.get('light', 'hardware', 'rgb_order')
self.order = getattr(LightBank, order.upper(), None)
if not self.order:
LOGGER.error("Didn't understand order %s", order)
self._clear, self._bank = self._light_array(), self._light_array()
def _light_array(self):
count = Config.get('light', 'count')
b = bytearray(0 for i in xrange(count + _INTERNAL_LATCH_BYTE_COUNT))
if False: # TODO
for i in xrange(_INTERNAL_LATCH_BYTE_COUNT):
b[-1 - i] = 0
return b
def _write(self, lights):
self._device.write(lights)
self._device.flush()
if _LATCH_BYTE_COUNT:
self._device.write(_LATCH)
self._device.flush()
def clear(self):
with self.lock:
self._write(self._clear)
def _before_thread_start(self):
super(SpiHardwareLightBank, self)._before_thread_start()
self._device = open('/dev/spidev0.0', 'wb')
def _after_thread_pause(self):
super(SpiHardwareLightBank, self)._after_thread_pause()
self._device.close()
self._device = None
def _display_lights(self, lights, brightness):
for i, light in enumerate(lights):
if light is None:
light = [0x80, 0x80, 0x80]
else:
light = self.order(*int(min(0x80 + 0x7F * x * brightness, 0xFF)
for x in light))
self._bank[3 * i:3 * (i + 1)] = light
self._write(self.pattern)
| mit | Python |
1f6db6ae815b3a62a91971f07ddd8eb3890e20bd | remove unused imports | wwf5067/statsmodels,bavardage/statsmodels,jstoxrocky/statsmodels,cbmoore/statsmodels,bzero/statsmodels,YihaoLu/statsmodels,saketkc/statsmodels,cbmoore/statsmodels,adammenges/statsmodels,astocko/statsmodels,waynenilsen/statsmodels,bavardage/statsmodels,huongttlan/statsmodels,hainm/statsmodels,Averroes/statsmodels,saketkc/statsmodels,cbmoore/statsmodels,phobson/statsmodels,edhuckle/statsmodels,adammenges/statsmodels,rgommers/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,pprett/statsmodels,wzbozon/statsmodels,pprett/statsmodels,waynenilsen/statsmodels,wwf5067/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels,jstoxrocky/statsmodels,DonBeo/statsmodels,gef756/statsmodels,phobson/statsmodels,hlin117/statsmodels,statsmodels/statsmodels,wdurhamh/statsmodels,Averroes/statsmodels,kiyoto/statsmodels,yarikoptic/pystatsmodels,wesm/statsmodels,YihaoLu/statsmodels,ChadFulton/statsmodels,jstoxrocky/statsmodels,gef756/statsmodels,wzbozon/statsmodels,nguyentu1602/statsmodels,phobson/statsmodels,gef756/statsmodels,YihaoLu/statsmodels,bashtage/statsmodels,bzero/statsmodels,wdurhamh/statsmodels,rgommers/statsmodels,detrout/debian-statsmodels,YihaoLu/statsmodels,nvoron23/statsmodels,phobson/statsmodels,alekz112/statsmodels,statsmodels/statsmodels,huongttlan/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,hlin117/statsmodels,wzbozon/statsmodels,ChadFulton/statsmodels,adammenges/statsmodels,yl565/statsmodels,yl565/statsmodels,saketkc/statsmodels,saketkc/statsmodels,jseabold/statsmodels,statsmodels/statsmodels,DonBeo/statsmodels,bashtage/statsmodels,cbmoore/statsmodels,bzero/statsmodels,nvoron23/statsmodels,musically-ut/statsmodels,DonBeo/statsmodels,kiyoto/statsmodels,bavardage/statsmodels,jstoxrocky/statsmodels,detrout/debian-statsmodels,bavardage/statsmodels,bavardage/statsmodels,bert9bert/statsmodels,yarikoptic/pystatsmodels,statsmodels/statsmodels,kiyoto/statsmodels,gef756/statsmodels,wzbozon/statsmodels,pprett/statsmodels,hainm/statsmodels,ChadFulton/statsmodels,musically-ut/statsmodels,DonBeo/statsmodels,hainm/statsmodels,yarikoptic/pystatsmodels,huongttlan/statsmodels,wkfwkf/statsmodels,statsmodels/statsmodels,alekz112/statsmodels,wesm/statsmodels,josef-pkt/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,rgommers/statsmodels,bashtage/statsmodels,rgommers/statsmodels,hlin117/statsmodels,detrout/debian-statsmodels,pprett/statsmodels,phobson/statsmodels,jseabold/statsmodels,astocko/statsmodels,detrout/debian-statsmodels,edhuckle/statsmodels,bsipocz/statsmodels,bzero/statsmodels,musically-ut/statsmodels,hainm/statsmodels,wzbozon/statsmodels,bashtage/statsmodels,yl565/statsmodels,wkfwkf/statsmodels,edhuckle/statsmodels,edhuckle/statsmodels,rgommers/statsmodels,alekz112/statsmodels,bzero/statsmodels,DonBeo/statsmodels,statsmodels/statsmodels,astocko/statsmodels,wdurhamh/statsmodels,Averroes/statsmodels,ChadFulton/statsmodels,josef-pkt/statsmodels,wkfwkf/statsmodels,wkfwkf/statsmodels,huongttlan/statsmodels,ChadFulton/statsmodels,waynenilsen/statsmodels,josef-pkt/statsmodels,yl565/statsmodels,bert9bert/statsmodels,astocko/statsmodels,nvoron23/statsmodels,edhuckle/statsmodels,waynenilsen/statsmodels,bashtage/statsmodels,YihaoLu/statsmodels,gef756/statsmodels,wdurhamh/statsmodels,bsipocz/statsmodels,nguyentu1602/statsmodels,wdurhamh/statsmodels,bert9bert/statsmodels,josef-pkt/statsmodels,bsipocz/statsmodels,nvoron23/statsmodels,wwf5067/statsmodels,wesm/statsmodels,ChadFulton/statsmodels,jseabold/statsmodels,hlin117/statsmodels,yl565/statsmodels,cbmoore/statsmodels,kiyoto/statsmodels,jseabold/statsmodels,nvoron23/statsmodels,adammenges/statsmodels,bert9bert/statsmodels,bsipocz/statsmodels,bert9bert/statsmodels,kiyoto/statsmodels,jseabold/statsmodels,bashtage/statsmodels,wwf5067/statsmodels,Averroes/statsmodels | lib/neuroimaging/algorithms/statistics/regression.py | lib/neuroimaging/algorithms/statistics/regression.py | """
This module provides various regression analysis techniques to model the
relationship between the dependent and independent variables.
"""
import gc
import numpy as N
from neuroimaging import traits
class LinearModelIterator(traits.HasTraits):
iterator = traits.Any()
outputs = traits.List()
def __init__(self, iterator, outputs=[], **keywords):
self.iterator = iter(iterator)
self.outputs = [iter(output) for output in outputs]
traits.HasTraits.__init__(self, **keywords)
def model(self, **keywords):
"""
This method should take the iterator at its current state and
return a LinearModel object.
"""
return None
def fit(self, **keywords):
"""
Go through an iterator, instantiating model and passing data,
going through outputs.
"""
tmp = []
for data in self.iterator:
tmp.append(data)
for data in tmp: #self.iterator:
shape = data.shape[1:]
data = data.reshape(data.shape[0], N.product(shape))
model = self.model()
results = model.fit(data, **keywords)
for output in self.outputs:
out = output.extract(results)
if output.nout > 1:
out.shape = (output.nout,) + shape
else:
out.shape = shape
iter(output)
output.next(data=out)
del(results); gc.collect()
class RegressionOutput(traits.HasTraits):
"""
A generic output for regression. Key feature is that it has
an \'extract\' method which is called on an instance of
Results.
"""
Tmax = traits.Float(100.)
Tmin = traits.Float(-100.)
Fmax = traits.Float(100.)
def __init__(self, iterator, **keywords):
self.iterator = iter(iterator)
traits.HasTraits.__init__(**keywords)
def __iter__(self):
return self
def extract(self, results):
return 0.
| """
This module provides various regression analysis techniques to model the
relationship between the dependent and independent variables.
"""
import gc
import numpy as N
import numpy.linalg as L
from neuroimaging import traits
from scipy.sandbox.models.regression import OLSModel, WLSModel, ARModel, Results
class LinearModelIterator(traits.HasTraits):
iterator = traits.Any()
outputs = traits.List()
def __init__(self, iterator, outputs=[], **keywords):
self.iterator = iter(iterator)
self.outputs = [iter(output) for output in outputs]
traits.HasTraits.__init__(self, **keywords)
def model(self, **keywords):
"""
This method should take the iterator at its current state and
return a LinearModel object.
"""
return None
def fit(self, **keywords):
"""
Go through an iterator, instantiating model and passing data,
going through outputs.
"""
tmp = []
for data in self.iterator:
tmp.append(data)
for data in tmp: #self.iterator:
shape = data.shape[1:]
data = data.reshape(data.shape[0], N.product(shape))
model = self.model()
results = model.fit(data, **keywords)
for output in self.outputs:
out = output.extract(results)
if output.nout > 1:
out.shape = (output.nout,) + shape
else:
out.shape = shape
iter(output)
output.next(data=out)
del(results); gc.collect()
class RegressionOutput(traits.HasTraits):
"""
A generic output for regression. Key feature is that it has
an \'extract\' method which is called on an instance of
Results.
"""
Tmax = traits.Float(100.)
Tmin = traits.Float(-100.)
Fmax = traits.Float(100.)
def __init__(self, iterator, **keywords):
self.iterator = iter(iterator)
traits.HasTraits.__init__(**keywords)
def __iter__(self):
return self
def extract(self, results):
return 0.
| bsd-3-clause | Python |
ef4d5abc8c664d349bfd791bb6763b23cb0b920e | Correct mistakes in androidServer.py | jpedrorl/LightSwitch,jpedrorl/LightSwitch | androidServer.py | androidServer.py |
import socket
import sys
import threading
class Server:
def __init__(self, port, relay):
self.port = port
self.running = False
self.thread = threading.Thread(target = self.__startServer)
self.thread.setDaemon(True) # dies with main thread
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.bind(('', port))
self.relay = relay
def __startServer(self):
self.sock.listen(1)
while self.running:
conn, addr = self.sock.accept()
print "connected to ", addr
isConnected = True
while(isConnected):
try:
buf = conn.recv(8)
if ord(buf[0]) == 1:
self.relay.switch()
except(socket.error, IndexError):
isConnected = False
print "disconnected from ", addr
if(isConnected):
conn.close()
def run(self):
self.running = True
self.thread.start()
def stop(self):
self.running = False
|
import socket
import sys
import threading
class Server:
def __init__(self, port, relay):
self.port = port
self.running = False
self.thread = threading.Thread(target = self.__startServer)
self.thread.setDaemon(True) # dies with main thread
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.bind(('', port))
self.relay = relay
def __startServer(self):
self.sock.listen(1)
while self.running:
conn, addr = self.sock.accept()
print "connected to " + addr
isConnected = True
while(isConnected):
try:
buf = conn.recv(8)
if ord(buf[0]) == 1:
relay.switch()
except(socket.error, IndexError):
isConnected = False
print "disconnected from " + addr
if(isConnected):
conn.close()
def run(self):
self.running = True
self.thread.start()
def stop(self):
self.running = False
| mit | Python |
16bc8c8b27012867224fe472a53cbe23c6824c58 | test `Video.to_json()` with actual data | pculture/vidscraper,pculture/vidscraper | vidscraper/tests/unit/test_video.py | vidscraper/tests/unit/test_video.py | # Copyright 2012 - Participatory Culture Foundation
#
# This file is part of vidscraper.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import json
from vidscraper.tests.unit.test_youtube import CARAMELL_DANSEN_ATOM_DATA
from vidscraper.videos import Video
class VideoTestCase(unittest.TestCase):
def test_items(self):
video = Video("http://www.youtube.com/watch?v=J_DV9b0x7v4")
# Make sure items can be iterated over and that there's one
# for every field.
for i, item in enumerate(video.items()):
self.assertEqual(item[0], Video._all_fields[i])
def test_items_with_fields(self):
fields = ['title', 'user']
video = Video("http://www.youtube.com/watch?v=J_DV9b0x7v4",
fields=fields)
# Make sure items can be iterated over and that there's one
# for every field.
for i, item in enumerate(video.items()):
self.assertEqual(item[0], fields[i])
def test_to_json(self):
video = Video("http://www.youtube.com/watch?v=J_DV9b0x7v4")
# we load the video data this way to avoid depending on the network
video_data = CARAMELL_DANSEN_ATOM_DATA.copy()
video_data['tags'] = list(video_data['tags'])
video._apply(video_data)
data_json = video.to_json()
# verify that the data we expect is in the JSON output
self.assertTrue(video.title in data_json)
self.assertTrue(video.publish_datetime.isoformat() in data_json)
# Verify that we can load the json back into Python.
data = json.loads(data_json)
# Verify that the data is restored correctly
for field, value in video_data.items():
if field == 'publish_datetime':
self.assertEqual(data[field], value.isoformat())
else:
self.assertEqual(data[field], value)
| # Copyright 2012 - Participatory Culture Foundation
#
# This file is part of vidscraper.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest
import json
from vidscraper.videos import Video
class VideoTestCase(unittest.TestCase):
def test_items(self):
video = Video("http://www.youtube.com/watch?v=J_DV9b0x7v4")
# Make sure items can be iterated over and that there's one
# for every field.
for i, item in enumerate(video.items()):
self.assertEqual(item[0], Video._all_fields[i])
def test_items_with_fields(self):
fields = ['title', 'user']
video = Video("http://www.youtube.com/watch?v=J_DV9b0x7v4",
fields=fields)
# Make sure items can be iterated over and that there's one
# for every field.
for i, item in enumerate(video.items()):
self.assertEqual(item[0], fields[i])
def test_to_json(self):
video = Video("http://www.youtube.com/watch?v=J_DV9b0x7v4")
data_json = video.to_json()
# Verify that we can load the json back into Python.
json.loads(data_json)
| bsd-3-clause | Python |
25fa54fa570d279cc215e1bbde73534349647236 | fix minor error | vincentadam87/gatsby-hackathon-seizure,vincentadam87/gatsby-hackathon-seizure | code/python/seizures/submission/SubmissionFile.py | code/python/seizures/submission/SubmissionFile.py | import os
from pandas import DataFrame, read_csv
from pandas.DataFrame import to_csv
from seizures.features.FeatureExtractBase import FeatureExtractBase
from seizures.prediction.PredictorBase import PredictorBase
class SubmissionFile():
"""
Class to generate submission files
@author Heiko
"""
@staticmethod
def get_filename_frame():
"""
Returns a data-frame with all filenames of the sample submission file
"""
me = os.path.dirname(os.path.realpath(__file__))
data_dir = os.sep.join(me.split(os.sep)[:-4]) + os.sep + "data"
fname = data_dir + os.sep + "sampleSubmission.csv"
return read_csv(fname)["clip"]
def generate_submission(self, predictor_seizure, predictor_early,
feature_extractor, output_fname="output.csv"):
"""
Generates a submission file for a given pair of predictors, which is already
trained (i.e. fit method was called). Loops over all filenames in
Parameters:
predictor_seizure - Instance of PredictorBase, fitted on seizure
predictor_early - Instance of PredictorBase, fitted on early
feature_extractor - Instance of FeatureExtractBase, to extract test features
output_fname - Optional filename for result submission file
"""
# make sure given objects are valid
assert(isinstance(predictor_seizure, PredictorBase))
assert(isinstance(predictor_early, PredictorBase))
assert(isinstance(feature_extractor, FeatureExtractBase))
# load filenames
fnames = SubmissionFile.get_filename_frame()
# predict on test data
result = DataFrame(columns=('clip', 'seizure', 'early'))
for fname in enumerate(fnames):
print "Predicting on " + fname
# extract data and predict
X = feature_extractor.extract_test(fname)
pred_seizure = predictor_seizure.predict(X)
pred_early = predictor_seizure.predictor_early(X)
result.append({'clip':fname, 'seizure':pred_seizure, 'early':pred_early})
to_csv(output_fname, result)
| import os
from pandas import DataFrame, read_csv
from pandas.DataFrame import to_csv
from seizures.features.FeatureExtractBase import FeatureExtractBase
from seizures.prediction.PredictorBase import PredictorBase
class SubmissionFile():
"""
Class to generate submission files
@author Heiko
"""
@staticmethod
def get_filename_frame():
"""
Returns a data-frame with all filenames of the sample submission file
"""
me = os.path.dirname(os.path.realpath(__file__))
data_dir = os.sep.join(me.split(os.sep)[:-4]) + os.sep + "data"
fname = data_dir + os.sep + "sampleSubmission.csv"
return read_csv(fname)["clip"]
def generate_submission(self, predictor_seizure, predictor_early,
feature_extractor, output_fname="output.csv"):
"""
Generates a submission file for a given pair of predictors, which is already
trained (i.e. fit method was called). Loops over all filenames in
Parameters:
predictor_seizure - Instance of PredictorBase, fitted on seizure
predictor_early - Instance of PredictorBase, fitted on early
feature_extractor - Instance of FeatureExtractBase, to extract test features
output_fname - Optional filename for result submission file
"""
# make sure given objects are valid
assert(isinstance(predictor_seizure, PredictorBase))
assert(isinstance(predictor_early, PredictorBase))
assert(isinstance(feature_extractor, FeatureExtractBase))
# load filenames
fnames = SubmissionFile.get_filename_frame()
# predict on test data
result = DataFrame(columns=('clip', 'seizure', 'early'))
for fname in enumerate(fnames):
print "Predicting on " + fname
# extract data and predict
X = feature_extractor.extract_test(fname)
pred_seizure = predictor_seizure.predict(X)
pred_early = predictor_seizure.predictor_early(X)
result.append({'clip':fname, 'seizure':pred_seizure, 'early':pred_early})
to_csv(output_fname, result)
if __name__ == "__main__":
| bsd-2-clause | Python |
3a2003f05ade8316bd2ff7d98f679d66740bf009 | Add API test for node recover action | stackforge/senlin,stackforge/senlin,openstack/senlin,openstack/senlin,openstack/senlin | senlin/tests/tempest/api/nodes/test_node_action.py | senlin/tests/tempest/api/nodes/test_node_action.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.api import utils
class TestNodeActionCheck(base.BaseSenlinTest):
def setUp(self):
super(TestNodeActionCheck, self).setUp()
profile_id = utils.create_a_profile(self)
self.addCleanup(utils.delete_a_profile, self, profile_id)
self.node_id = utils.create_a_node(self, profile_id)
self.addCleanup(utils.delete_a_node, self, self.node_id)
@decorators.idempotent_id('ae124bfe-9fcf-4e87-91b7-319102efbdcc')
def test_node_action_Check(self):
params = {
'check': {
}
}
# Trigger node action
res = self.client.trigger_action('nodes', self.node_id, params=params)
# Verfiy resp code, body and location in headers
self.assertEqual(202, res['status'])
self.assertIn('actions', res['location'])
action_id = res['location'].split('/actions/')[1]
self.wait_for_status('actions', action_id, 'SUCCEEDED')
class TestNodeActionRecover(base.BaseSenlinTest):
def setUp(self):
super(TestNodeActionRecover, self).setUp()
profile_id = utils.create_a_profile(self)
self.addCleanup(utils.delete_a_profile, self, profile_id)
self.node_id = utils.create_a_node(self, profile_id)
self.addCleanup(utils.delete_a_node, self, self.node_id)
@decorators.idempotent_id('217af65a-4029-40ce-a833-74faeac8c1f5')
def test_node_action_recover(self):
params = {
"recover": {
"operation": "REBUILD"
}
}
# Trigger node action
res = self.client.trigger_action('nodes', self.node_id, params=params)
# Verfiy resp code, body and location in headers
self.assertEqual(202, res['status'])
self.assertIn('actions', res['location'])
action_id = res['location'].split('/actions/')[1]
self.wait_for_status('actions', action_id, 'SUCCEEDED')
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.lib import decorators
from senlin.tests.tempest.api import base
from senlin.tests.tempest.api import utils
class TestNodeAction(base.BaseSenlinTest):
def setUp(self):
super(TestNodeAction, self).setUp()
profile_id = utils.create_a_profile(self)
self.addCleanup(utils.delete_a_profile, self, profile_id)
self.node_id = utils.create_a_node(self, profile_id)
self.addCleanup(utils.delete_a_node, self, self.node_id)
@decorators.idempotent_id('ae124bfe-9fcf-4e87-91b7-319102efbdcc')
def test_node_action_trigger(self):
params = {
'check': {
}
}
# Trigger node action
res = self.client.trigger_action('nodes', self.node_id, params=params)
# Verfiy resp code, body and location in headers
self.assertEqual(202, res['status'])
self.assertIn('actions', res['location'])
action_id = res['location'].split('/actions/')[1]
self.wait_for_status('actions', action_id, 'SUCCEEDED')
| apache-2.0 | Python |
1bf98e80b828af508cbd73efabc1eaa6b75b19c5 | Remove unused imports | Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService,Duke-GCB/DukeDSHandoverService | d4s2_api_v2/tests_serializers.py | d4s2_api_v2/tests_serializers.py | from django.test import TestCase
from d4s2_api_v2.serializers import DDSDeliveryPreviewSerializer
class DeliveryPreviewSerializerTestCase(TestCase):
def setUp(self):
self.data = {
'project_id': 'project-1234',
'from_user_id': 'user-5678',
'to_user_id': 'user-9999',
'user_message': '',
'transfer_id': ''
}
def test_validates(self):
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertTrue(serializer.is_valid())
def test_ignores_delivery_email_text(self):
self.data['delivery_email_text'] = 'Hello world'
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertTrue(serializer.is_valid())
self.assertNotIn('delivery_email_text', serializer.validated_data)
def test_invalid_without_user_message_field(self):
del self.data['user_message']
self.assertNotIn('user_message', self.data)
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertFalse(serializer.is_valid())
def test_invalid_without_transfer_id_field(self):
del self.data['transfer_id']
self.assertNotIn('transfer_id', self.data)
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertFalse(serializer.is_valid())
| from django.test import TestCase
from d4s2_api_v2.models import DDSDeliveryPreview
from d4s2_api_v2.serializers import DDSDeliveryPreviewSerializer
from mock import MagicMock
class DeliveryPreviewSerializerTestCase(TestCase):
def setUp(self):
self.data = {
'project_id': 'project-1234',
'from_user_id': 'user-5678',
'to_user_id': 'user-9999',
'user_message': '',
'transfer_id': ''
}
def test_validates(self):
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertTrue(serializer.is_valid())
def test_ignores_delivery_email_text(self):
self.data['delivery_email_text'] = 'Hello world'
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertTrue(serializer.is_valid())
self.assertNotIn('delivery_email_text', serializer.validated_data)
def test_invalid_without_user_message_field(self):
del self.data['user_message']
self.assertNotIn('user_message', self.data)
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertFalse(serializer.is_valid())
def test_invalid_without_transfer_id_field(self):
del self.data['transfer_id']
self.assertNotIn('transfer_id', self.data)
serializer = DDSDeliveryPreviewSerializer(data=self.data)
self.assertFalse(serializer.is_valid())
| mit | Python |
bd9b3ce8fe22e039554f5e86744aa4828d3c5ed8 | set default auto field | centralniak/cmsplugin_gallery,centralniak/cmsplugin_gallery | tests/settings.py | tests/settings.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
HELPER_SETTINGS = {
'INSTALLED_APPS': [
'easy_thumbnails',
'filer',
'mptt',
],
'ALLOWED_HOSTS': ['localhost'],
'CMS_LANGUAGES': {
1: [{
'code': 'en',
'name': 'English',
}]
},
'LANGUAGE_CODE': 'en',
'DEFAULT_AUTO_FIELD': 'django.db.models.AutoField',
'SECRET_KEY': 'herozyz',
'GALLERY_PLUGIN_MODULE_NAME': 'UI',
'CMSPLUGIN_GALLERY_TEMPLATES': [
('cmsplugin_gallery/gallery.html', 'gallery.html'),
]
}
def run():
from djangocms_helper import runner
runner.cms('cmsplugin_gallery')
if __name__ == '__main__':
run()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
HELPER_SETTINGS = {
'INSTALLED_APPS': [
'easy_thumbnails',
'filer',
'mptt',
],
'ALLOWED_HOSTS': ['localhost'],
'CMS_LANGUAGES': {
1: [{
'code': 'en',
'name': 'English',
}]
},
'LANGUAGE_CODE': 'en',
'SECRET_KEY': 'herozyz',
'GALLERY_PLUGIN_MODULE_NAME': 'UI',
'CMSPLUGIN_GALLERY_TEMPLATES': [
('cmsplugin_gallery/gallery.html', 'gallery.html'),
]
}
def run():
from djangocms_helper import runner
runner.cms('cmsplugin_gallery')
if __name__ == '__main__':
run()
| bsd-2-clause | Python |
e0b9ab8cd745c9f43fb1d7a484a2649b30e4b8e6 | include auth | wq/django-rest-pandas,wq/django-rest-pandas,wq/django-rest-pandas,wq/django-rest-pandas | tests/settings.py | tests/settings.py | SECRET_KEY = '1234'
INSTALLED_APPS = (
'django.contrib.contenttypes',
'django.contrib.auth',
'tests.testapp',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
ROOT_URLCONF = "tests.urls"
| SECRET_KEY = '1234'
INSTALLED_APPS = (
'django.contrib.contenttypes',
'tests.testapp',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
ROOT_URLCONF = "tests.urls"
| mit | Python |
bcda52191b504e7157e1439c6ee1cc3a1ddf7fca | Add __all__ and license in __init__.py | joshua-stone/DerPyBooru | derpibooru/__init__.py | derpibooru/__init__.py | # Copyright (c) 2014, Joshua Stone
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .search import Search
from .lists import Lists
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__"
]
__title__ = "PyDerpibooru"
__summary__ = "Python wrapper for Derpibooru's API"
__uri__ = "https://github.com/joshua-stone"
__version__ = "0.1"
__author__ = "Joshua Stone"
__email__ = "joshua.gage.stone@gmail.com"
__license__ = "Simplified BSD License"
__copyright__ = "Copyright 2014 Joshua Stone"
| from .search import Search
from .lists import Lists
| bsd-2-clause | Python |
34bfc972b783d8a7035d43b3d14d128f01375982 | Add Print filter. | fhirschmann/penchy,fhirschmann/penchy | penchy/jobs/filters.py | penchy/jobs/filters.py | """
This module provides filters.
"""
from penchy.jobs.elements import Filter
class Tamiflex(Filter):
pass
class HProf(Filter):
pass
class Dacapo(Filter):
pass
class Send(Filter):
pass
class Receive(Filter):
pass
class Print(Filter):
"""
Prints everything fed to it on stdout.
"""
def run(self, **kwargs):
print kwargs | """
This module provides filters.
"""
from penchy.jobs.elements import Filter
class Tamiflex(Filter):
pass
class HProf(Filter):
pass
class Dacapo(Filter):
pass
class Send(Filter):
pass
class Receive(Filter):
pass
class Print(Filter):
pass | mit | Python |
f2700febc193353a8c88ce365cbd58b152fe64b2 | Create email_lib.py | aescoda/TFG | email_lib.py | email_lib.py | # -*- coding: utf-8 -*-
from email.mime.text import MIMEText
from smtplib import SMTP
sender_email = os.environ.get('MAIL_USER', None)
sender_pass = os.environ.get('MAIL_PASS', None)
server_email = os.environ.get('MAIL_SERVER', None)
port_email = os.environ.get('MAIL_PORT', None)
SSL_email = os.environ.get('MAIL_SSL', None)
def prueba():
print "INCLUIDO"
| # -*- coding: utf-8 -*-
from email.mime.text import MIMEText
from smtplib import SMTP
sender_email = os.environ.get('MAIL_USER', None)
sender_pass = os.environ.get('MAIL_PASS', None)
server_email = os.environ.get('MAIL_SERVER', None)
port_email = os.environ.get('MAIL_PORT', None)
SSL_email = os.environ.get('MAIL_SSL', None)
def prueba():
print "INCLUIDO"
def email_alert(recipient, iccid, customer):
message = "<br>Hola %s,<br><br> Se ha detectado una alerta de seguridad de cambio de IMEI en la SIM con iccid = %s<br><br> Si este cambio de IMEI ha sido voluntario, por favor, ignore este mensaje. De lo contrario por favor hago acceda al siguiente link para localizar su SIM y desactivarla: <br><br> <center><a href='https://jasper-alert.herokuapp.com/response'> Localizar y desactivar SIM </a><br><br></center> Muchas gracias,<br><br> Equipo de Cisco Jasper<br>" % (customer, iccid)
message = MIMEText(message, "html", "uft-8")
message["From"] = sender_email
message["To"] = recipient
message["Subject"] = "IMEI change alert"
smtp = SMTP(server_email, port_email)
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
print "Conexion exitosa con Gmail"
print "Concectado a Gmail"
#
smtp.login(sender_email, sender_pass)
smtp.sendmail(sender_email, recipient, message)
smtp.quit()
print "fin"
return "Mensaje enviado"
def email_action (recipient, customer, coordenadas, iccid)
message = "<br>Hola %s,<br><br> Se ha detectado una alerta de seguridad de cambio de IMEI en la SIM con iccid = %s<br><br> Si este cambio de IMEI ha sido voluntario, por favor, ignore este mensaje. De lo contrario por favor hago acceda al siguiente link para localizar su SIM y desactivarla: <br><br> <center><a href='https://jasper-alert.herokuapp.com/response'> Localizar y desactivar SIM </a><br><br></center> Muchas gracias,<br><br> Equipo de Cisco Jasper<br>" % (customer, iccid)
message = MIMEText(message, "html", "uft-8")
message["From"] = sender_email
message["To"] = recipient
message["Subject"] = "Alert responsed"
smtp = SMTP(server_email, port_email)
smtp.ehlo()
smtp.starttls()
smtp.ehlo()
print "Conexion exitosa con Gmail"
print "Concectado a Gmail"
#
smtp.login(sender_email, sender_pass)
smtp.sendmail(sender_email, recipient, message)
smtp.quit()
print "fin"
return "Mensaje enviado"
| apache-2.0 | Python |
151599602b9d626ebcfe5ae6960ea216b767fec2 | Update distutils patch to monkeypatch all paths from sys.path to ensure that distutils is never imported except from the same path as setuptools. Assert that 'distutils' is not already in sys.modules. | pypa/setuptools,pypa/setuptools,pypa/setuptools | setuptools/distutils_patch.py | setuptools/distutils_patch.py | """
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
import sys
import importlib
import contextlib
from os.path import dirname
@contextlib.contextmanager
def patch_sys_path():
orig = sys.path[:]
sys.path[:] = [dirname(dirname(__file__))]
try:
yield
finally:
sys.path[:] = orig
if 'distutils' in sys.path:
raise RuntimeError("Distutils must not be imported before setuptools")
with patch_sys_path():
importlib.import_module('distutils')
| """
Ensure that the local copy of distutils is preferred over stdlib.
See https://github.com/pypa/setuptools/issues/417#issuecomment-392298401
for more motivation.
"""
import sys
import importlib
from os.path import dirname
sys.path.insert(0, dirname(dirname(__file__)))
importlib.import_module('distutils')
sys.path.pop(0)
| mit | Python |
9d1abf3a7d91826f9e499b2fc1490d2656a45d18 | add rest 接口测试 | DXCChina/pms,WX-DongXing/pms,WX-DongXing/pms,DXCChina/pms,DXCChina/pms,WX-DongXing/pms,WX-DongXing/pms,DXCChina/pms | api/test_user.py | api/test_user.py | from hypothesis import given, example
from hypothesis.strategies import tuples, booleans, lists, text, integers
import random
import string
from app import app
from model import user
from flask import json
def test_user_model():
username = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', 6))
email = username + '@' + 'qq.com'
password = username
user.save({'username': username, 'email': email, 'password': password})
account = user.findOneByName(username)
assert account['username'] == username
id = account['id']
password = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', 6))
user.change_password(id, password)
username = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', 6))
email = username + '@' + 'qq.com'
user.update({'username': username, 'email': email, 'id': id})
assert user.findOneById(id)['password'] == password
assert user.findOneByEmail(email)['email'] == email
tester = app.test_client()
def test_get_user():
response = tester.get('/api/user')
assert response.data == b'{\n "msg": "Missing JWT in headers and cookies"\n}\n'
def test_login():
response = tester.post(
'/api/login',
data=json.dumps({
'username': 'aaa@aaa.qaaa',
'password': 'aaa@aaa.qaaa'
}),
content_type='application/json')
data = json.loads(response.data)
assert data['access_token']
| from hypothesis import given, example
from hypothesis.strategies import tuples, booleans, lists, text, integers
import random
import string
from app import app
from model import user
def test_user_model():
username = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', 6))
email = username + '@' + 'qq.com'
password = username
user.save({'username': username, 'email': email, 'password': password})
account = user.findOneByName(username)
assert account['username'] == username
id = account['id']
password = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', 6))
user.change_password(id, password)
username = ''.join(random.sample('abcdefghijklmnopqrstuvwxyz', 6))
email = username + '@' + 'qq.com'
user.update({'username': username, 'email': email, 'id': id})
assert user.findOneById(id)['password'] == password
assert user.findOneByEmail(email)['email'] == email
# def test_get_user():
# tester = app.test_client()
# response = tester.get('/api/user')
# assert response.data == b'{\n "msg": "Missing JWT in headers and cookies"\n}\n'
| mit | Python |
b5d1e9f209d143169195514f80b74e1350799b03 | FIX quickshift nosetest with different xyz color conversion | michaelpacer/scikit-image,SamHames/scikit-image,chintak/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,keflavich/scikit-image,Midafi/scikit-image,Hiyorimi/scikit-image,bennlich/scikit-image,newville/scikit-image,WarrenWeckesser/scikits-image,keflavich/scikit-image,warmspringwinds/scikit-image,jwiggins/scikit-image,ofgulban/scikit-image,emon10005/scikit-image,Britefury/scikit-image,warmspringwinds/scikit-image,pratapvardhan/scikit-image,dpshelio/scikit-image,paalge/scikit-image,ajaybhat/scikit-image,bsipocz/scikit-image,juliusbierk/scikit-image,SamHames/scikit-image,paalge/scikit-image,ClinicalGraphics/scikit-image,michaelaye/scikit-image,chintak/scikit-image,pratapvardhan/scikit-image,vighneshbirodkar/scikit-image,oew1v07/scikit-image,GaZ3ll3/scikit-image,ClinicalGraphics/scikit-image,SamHames/scikit-image,youprofit/scikit-image,youprofit/scikit-image,Midafi/scikit-image,chriscrosscutler/scikit-image,dpshelio/scikit-image,vighneshbirodkar/scikit-image,almarklein/scikit-image,juliusbierk/scikit-image,jwiggins/scikit-image,bsipocz/scikit-image,rjeli/scikit-image,almarklein/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,Hiyorimi/scikit-image,ajaybhat/scikit-image,rjeli/scikit-image,robintw/scikit-image,chintak/scikit-image,paalge/scikit-image,Britefury/scikit-image,michaelpacer/scikit-image,GaZ3ll3/scikit-image,rjeli/scikit-image,almarklein/scikit-image,SamHames/scikit-image,robintw/scikit-image,almarklein/scikit-image,bennlich/scikit-image,oew1v07/scikit-image,newville/scikit-image,chintak/scikit-image,blink1073/scikit-image,blink1073/scikit-image,chriscrosscutler/scikit-image,WarrenWeckesser/scikits-image,emon10005/scikit-image | skimage/segmentation/tests/test_quickshift.py | skimage/segmentation/tests/test_quickshift.py | import numpy as np
from numpy.testing import assert_equal, assert_array_equal
from nose.tools import assert_true
from skimage._shared.testing import assert_greater
from skimage.segmentation import quickshift
def test_grey():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21))
img[:10, 10:] = 0.2
img[10:, :10] = 0.4
img[10:, 10:] = 0.6
img += 0.1 * rnd.normal(size=img.shape)
seg = quickshift(img, kernel_size=2, max_dist=3, random_seed=0,
convert2lab=False, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
# that mostly respect the 4 regions:
for i in range(4):
hist = np.histogram(img[seg == i], bins=[0, 0.1, 0.3, 0.5, 1])[0]
assert_greater(hist[i], 20)
def test_color():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = quickshift(img, random_seed=0, max_dist=30, kernel_size=10, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
assert_array_equal(seg[:10, :10], 1)
assert_array_equal(seg[10:, :10], 2)
assert_array_equal(seg[:10, 10:], 0)
assert_array_equal(seg[10:, 10:], 3)
seg2 = quickshift(img, kernel_size=1, max_dist=2, random_seed=0,
convert2lab=False, sigma=0)
# very oversegmented:
assert_equal(len(np.unique(seg2)), 7)
# still don't cross lines
assert_true((seg2[9, :] != seg2[10, :]).all())
assert_true((seg2[:, 9] != seg2[:, 10]).all())
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
| import numpy as np
from numpy.testing import assert_equal, assert_array_equal
from nose.tools import assert_true
from skimage._shared.testing import assert_greater
from skimage.segmentation import quickshift
def test_grey():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21))
img[:10, 10:] = 0.2
img[10:, :10] = 0.4
img[10:, 10:] = 0.6
img += 0.1 * rnd.normal(size=img.shape)
seg = quickshift(img, kernel_size=2, max_dist=3, random_seed=0,
convert2lab=False, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
# that mostly respect the 4 regions:
for i in range(4):
hist = np.histogram(img[seg == i], bins=[0, 0.1, 0.3, 0.5, 1])[0]
assert_greater(hist[i], 20)
def test_color():
rnd = np.random.RandomState(0)
img = np.zeros((20, 21, 3))
img[:10, :10, 0] = 1
img[10:, :10, 1] = 1
img[10:, 10:, 2] = 1
img += 0.01 * rnd.normal(size=img.shape)
img[img > 1] = 1
img[img < 0] = 0
seg = quickshift(img, random_seed=0, max_dist=30, kernel_size=10, sigma=0)
# we expect 4 segments:
assert_equal(len(np.unique(seg)), 4)
assert_array_equal(seg[:10, :10], 0)
assert_array_equal(seg[10:, :10], 3)
assert_array_equal(seg[:10, 10:], 1)
assert_array_equal(seg[10:, 10:], 2)
seg2 = quickshift(img, kernel_size=1, max_dist=2, random_seed=0,
convert2lab=False, sigma=0)
# very oversegmented:
assert_equal(len(np.unique(seg2)), 7)
# still don't cross lines
assert_true((seg2[9, :] != seg2[10, :]).all())
assert_true((seg2[:, 9] != seg2[:, 10]).all())
if __name__ == '__main__':
from numpy import testing
testing.run_module_suite()
| bsd-3-clause | Python |
81b69ed9fe844c4313badc8335fb2751d3800aef | Remove requests parameter | BakeCode/performance-testing,BakeCode/performance-testing | performance/routine.py | performance/routine.py | from performance import web
class Tool:
def __init__(self, config):
if not isinstance(config, Config):
raise TypeError('No performance.routine.Config object')
self.config = config
def run(self):
pass
class Config:
def __init__(self, host, requests_per_client=10, clients_count=1):
self.host = host
self.requests = []
self.requests_per_client = requests_per_client
self.clients_count = clients_count
def add_request(self, request):
if not isinstance(request, web.Request):
raise TypeError('No performance.web.Request object')
self.requests.append(request)
| from performance import web
class Tool:
def __init__(self, config):
if not isinstance(config, Config):
raise TypeError('No performance.routine.Config object')
self.config = config
def run(self):
pass
class Config:
def __init__(self, host, requests=None, requests_per_client=10, clients_count=1):
self.host = host
if requests is None:
self.requests = []
else:
self.requests = requests
self.requests_per_client = requests_per_client
self.clients_count = clients_count
def add_request(self, request):
if not isinstance(request, web.Request):
raise TypeError('No performance.web.Request object')
self.requests.append(request)
| mit | Python |
90188224e13cf8bcaf234be4da107bf1b0bb45ac | Update micro benchmark: generator; Add more loop variant instruction to the hot loop | wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy,wdv4758h/ZipPy | graal/edu.uci.python.benchmark/src/micro/generator.py | graal/edu.uci.python.benchmark/src/micro/generator.py | # zwei 12/17/13
# subscribe simple generator
import time
def generator(n):
for i in range(n):
yield i * 2
def call_generator(num, iteration):
item = 0
for t in range(iteration):
num += t % 5
for i in generator(num):
item = i + item % 5
return item
def measure():
print("Start timing...")
start = time.time()
num = 1000
last_item = call_generator(num, 10000) #1000000
print("Last item ", last_item)
duration = "%.3f\n" % (time.time() - start)
print("generator: " + duration)
#warm up
for run in range(1000):
call_generator(10, 100)
measure() | # zwei 12/17/13
# subscribe simple generator
import time
def gen(n):
for i in range(n):
yield i
def call_generator(num, iteration):
for t in range(iteration):
for i in gen(num):
item = i
return item
def measure():
print("Start timing...")
start = time.time()
num = 1000
last_item = call_generator(num, 100000) #1000000
print("Last item ", last_item)
duration = "%.3f\n" % (time.time() - start)
print("generator: " + duration)
#warm up
for run in range(1000):
call_generator(10, 10000)
measure() | bsd-3-clause | Python |
9028deadf3ebf6146ada17afdf71de68953206b0 | Fix plugin issue | RazvanRotari/iaP,RazvanRotari/iaP,RazvanRotari/iaP,RazvanRotari/iaP,RazvanRotari/iaP | services/inserter/plugins/categories_extractor.py | services/inserter/plugins/categories_extractor.py | from nltk import word_tokenize
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
import string
def tokenize(sentence):
sentence = sentence.lower()
tokens = word_tokenize(sentence)
blacklisted_words = stopwords.words('english') + list(string.punctuation)
return [word for word in tokens if word not in blacklisted_words]
def normalize_tokens(tokens):
wnl = WordNetLemmatizer()
return [wnl.lemmatize(word) for word in tokens]
def should_process(item):
if item.categories:
return len(item.categories) == 0
else:
return True
#Do the processing here. Needs to return an item
def process(item):
title = item.title
if not title:
return item
tokens = tokenize(title)
item.categories = normalize_tokens(tokens)
return item
| from nltk import word_tokenize
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
import string
def tokenize(sentence):
sentence = sentence.lower()
tokens = word_tokenize(sentence)
blacklisted_words = stopwords.words('english') + list(string.punctuation)
return [word for word in tokens if word not in blacklisted_words]
def normalize_tokens(tokens):
wnl = WordNetLemmatizer()
return [wnl.lemmatize(word) for word in tokens]
def should_process(item):
<<<<<<< HEAD
if item.categories:
return len(item.categories) == 0
else:
return True
=======
return len(item.categories) == 0
>>>>>>> 59d396b6571e17f7fda201e23c4481cde3f83914
#Do the processing here. Needs to return an item
def process(item):
title = item.title
if not title:
return item
tokens = tokenize(title)
item.categories = normalize_tokens(tokens)
return item
| mit | Python |
744fd29b2fb8f7201305037817c6c15aa07fb330 | Handle function-based middleware in null_technical_500_response | django-extensions/django-extensions,django-extensions/django-extensions,django-extensions/django-extensions | django_extensions/management/technical_response.py | django_extensions/management/technical_response.py | # -*- coding: utf-8 -*-
import threading
import six
from django.core.handlers.wsgi import WSGIHandler
tld = threading.local()
tld.wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
leaking exceptions. If an uncaught exception is raised, the wrapper calls technical_500_response()
to create a response for django's debug view.
Runserver_plus overrides the django debug view's technical_500_response() function to allow for
an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
try:
# Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals.get('self'), WSGIHandler):
tld.wsgi_tb = tb
elif tld.wsgi_tb:
tb = tld.wsgi_tb
except AttributeError:
pass
six.reraise(exc_type, exc_value, tb)
| # -*- coding: utf-8 -*-
import threading
import six
from django.core.handlers.wsgi import WSGIHandler
tld = threading.local()
tld.wsgi_tb = None
def null_technical_500_response(request, exc_type, exc_value, tb, status_code=500):
"""Function to override django.views.debug.technical_500_response.
Django's convert_exception_to_response() wrapper is called on each 'Middleware' object to avoid
leaking exceptions. If and uncaught exception is raised, the wrapper calls technical_500_response()
to create a response for django's debug view.
Runserver_plus overrides the django debug view's technical_500_response() function to allow for
an enhanced WSGI debugger view to be displayed. However, because Django calls
convert_exception_to_response() on each object in the stack of Middleware objects, re-raising an
error quickly pollutes the traceback displayed.
Runserver_plus only needs needs traceback frames relevant to WSGIHandler Middleware objects, so
only store the traceback if it is for a WSGIHandler. If an exception is not raised here, Django
eventually throws an error for not getting a valid response object for its debug view.
"""
try:
# Store the most recent tb for WSGI requests. The class can be found in the second frame of the tb
if isinstance(tb.tb_next.tb_frame.f_locals['self'], WSGIHandler):
tld.wsgi_tb = tb
elif tld.wsgi_tb:
tb = tld.wsgi_tb
except AttributeError:
pass
six.reraise(exc_type, exc_value, tb)
| mit | Python |
de23099e04d0a5823d6917f6f991d66e25b9002b | Add support for rendering with a URL prefix | hyperair/django-medusa | django_medusa/management/commands/staticsitegen.py | django_medusa/management/commands/staticsitegen.py | from django.conf import settings
from django.core.management.base import BaseCommand
from django.core.urlresolvers import set_script_prefix
from django_medusa.renderers import StaticSiteRenderer
from django_medusa.utils import get_static_renderers
class Command(BaseCommand):
can_import_settings = True
help = 'Looks for \'renderers.py\' in each INSTALLED_APP, which defines '\
'a class for processing one or more URL paths into static files.'
def handle(self, *args, **options):
StaticSiteRenderer.initialize_output()
url_prefix = getattr(settings, 'MEDUSA_URL_PREFIX')
if url_prefix is not None:
set_script_prefix(url_prefix)
for Renderer in get_static_renderers():
r = Renderer()
r.generate()
StaticSiteRenderer.finalize_output()
| from django.core.management.base import BaseCommand
from django_medusa.renderers import StaticSiteRenderer
from django_medusa.utils import get_static_renderers
class Command(BaseCommand):
can_import_settings = True
help = 'Looks for \'renderers.py\' in each INSTALLED_APP, which defines '\
'a class for processing one or more URL paths into static files.'
def handle(self, *args, **options):
StaticSiteRenderer.initialize_output()
for Renderer in get_static_renderers():
r = Renderer()
r.generate()
StaticSiteRenderer.finalize_output()
| mit | Python |
9ec79f738b1fcd5002590b5fc31a302b241561e2 | implement option to disable interactive mode | morgenst/PyAnalysisTools,morgenst/PyAnalysisTools,morgenst/PyAnalysisTools | run_scripts/print_cutflow.py | run_scripts/print_cutflow.py | #!/usr/bin/env python
import ROOT
ROOT.PyConfig.IgnoreCommandLineOptions = True
import os
import sys
from PyAnalysisTools.AnalysisTools.CutFlowAnalyser import CutflowAnalyser as ca
from PyAnalysisTools.AnalysisTools.CutFlowAnalyser import ExtendedCutFlowAnalyser as eca
from PyAnalysisTools.base import *
try:
from tabulate.tabulate import tabulate_formats
except ImportError:
from tabulate import tabulate_formats
def main(argv):
parser = get_default_argparser("Cutflow printer")
add_input_args(parser)
add_output_args(parser)
add_process_args(parser)
add_selection_args(parser)
parser.add_argument("--format", "-f", type=str, choices=map(str, tabulate_formats),
help="format of printed table")
parser.add_argument("--systematics", "-s", nargs="+", default=["Nominal"], help="systematics")
parser.add_argument("--no_merge", "-n", action='store_true', default=False, help="switch off merging")
parser.add_argument("--raw", "-r", action="store_true", default=False, help="print raw cutflow")
parser.add_argument('--disable_sm_total', '-dsm', default=False, action='store_true',
help="disable summing sm total")
parser.add_argument('--disable_signal_plots', '-dsp', action='store_true', default=False, help='Disable plots for '
'signal efficiency')
parser.add_argument('-disable_interactive', '-di', action='store_true', default=False, help="Disable interactive"
"mode")
args = default_init(parser)
args.file_list = [os.path.abspath(f) for f in args.input_file_list]
if args.selection_config is None:
cutflow_analyser = ca(**vars(args))
else:
cutflow_analyser = eca(**vars(args))
cutflow_analyser.execute()
cutflow_analyser.print_cutflow_table()
if __name__ == '__main__':
main(sys.argv[1:])
| #!/usr/bin/env python
import ROOT
ROOT.PyConfig.IgnoreCommandLineOptions = True
import os
import sys
from PyAnalysisTools.AnalysisTools.CutFlowAnalyser import CutflowAnalyser as ca
from PyAnalysisTools.AnalysisTools.CutFlowAnalyser import ExtendedCutFlowAnalyser as eca
from PyAnalysisTools.base import *
try:
from tabulate.tabulate import tabulate_formats
except ImportError:
from tabulate import tabulate_formats
def main(argv):
parser = get_default_argparser("Cutflow printer")
add_input_args(parser)
add_output_args(parser)
add_process_args(parser)
add_selection_args(parser)
parser.add_argument("--format", "-f", type=str, choices=map(str, tabulate_formats),
help="format of printed table")
parser.add_argument("--systematics", "-s", nargs="+", default=["Nominal"], help="systematics")
parser.add_argument("--no_merge", "-n", action='store_true', default=False, help="switch off merging")
parser.add_argument("--raw", "-r", action="store_true", default=False, help="print raw cutflow")
parser.add_argument('--disable_sm_total', '-dsm', default=False, action='store_true',
help="disable summing sm total")
parser.add_argument('--disable_signal_plots', '-dsp', action='store_true', default=False, help='Disable plots for '
'signal efficiency')
args = default_init(parser)
args.file_list = [os.path.abspath(f) for f in args.input_file_list]
if args.selection_config is None:
cutflow_analyser = ca(**vars(args))
else:
cutflow_analyser = eca(**vars(args))
cutflow_analyser.execute()
cutflow_analyser.print_cutflow_table()
if __name__ == '__main__':
main(sys.argv[1:])
| mit | Python |
a3d24859d23ef9d299c75fe5ebdfcf187451041b | make sure it works, well sort of... | akrherz/pyWWA,akrherz/pyWWA | meso_afd.py | meso_afd.py | # Process special AFD for BMX
import sys, re
import traceback
import StringIO
import secret
from pyIEM import nws_text
from pyxmpp.jid import JID
from pyxmpp.jabber.simple import send_message
errors = StringIO.StringIO()
raw = sys.stdin.read()
import pg
postgisdb = pg.connect(secret.dbname, secret.dbhost, user=secret.dbuser)
def calldb(sql):
try:
postgisdb.query(sql)
except:
errors.write("\n-----------\nSQL: %s\n" % (sql,) )
traceback.print_exc(file=errors)
errors.write("\n-----------\n")
def querydb(sql):
try:
return postgisdb.query(sql).dictresult()
except:
errors.write("\n-----------\nSQL: %s\n" % (sql,) )
traceback.print_exc(file=errors)
errors.write("\n-----------\n")
return []
def sendJabberMessage(jabberTxt):
jid=JID("iembot_ingestor@%s/Ingestor" % (secret.chatserver,) )
recpt=JID("iembot@%s/Echobot" % (secret.chatserver,) )
send_message(jid, secret.iembot_ingest_password, recpt, jabberTxt, 'Ba')
def process(raw):
afos = sys.argv[1]
pil = afos[:3]
wfo = afos[3:]
raw = raw.replace("'", "\\'")
tokens = re.findall("\.UPDATE\.\.\.MESOSCALE UPDATE", raw)
if (len(tokens) == 0):
return
sql = "INSERT into text_products(product) values ('%s')" % (raw,)
calldb(sql)
sql = "select last_value from text_products_id_seq"
rs = querydb(sql)
id = rs[0]['last_value']
mess = "%s: %s issues Mesoscale %s http://mesonet.agron.iastate.edu/p.php?id=%s" % \
(wfo, wfo, pil, id)
sendJabberMessage(mess)
errors.seek(0)
print errors.read()
if __name__ == "__main__":
process(raw)
| # Process special AFD for BMX
import sys, re
import traceback
import StringIO
import secret
from pyIEM import nws_text
from pyxmpp.jid import JID
from pyxmpp.jabber.simple import send_message
errors = StringIO.StringIO()
raw = sys.stdin.read()
import pg
postgisdb = pg.connect(secret.dbname, secret.dbhost, user=secret.dbuser)
def calldb(sql):
if (not HAVE_POSTGIS):
return
try:
postgisdb.query(sql)
except:
errors.write("\n-----------\nSQL: %s\n" % (sql,) )
traceback.print_exc(file=errors)
errors.write("\n-----------\n")
def querydb(sql):
if (not HAVE_POSTGIS):
return []
try:
return postgisdb.query(sql).dictresult()
except:
errors.write("\n-----------\nSQL: %s\n" % (sql,) )
traceback.print_exc(file=errors)
errors.write("\n-----------\n")
return []
def sendJabberMessage(jabberTxt):
jid=JID(jabber_from_jid)
recpt=JID(jabber_to_jid)
send_message(jid, jabber_passwd, recpt, jabberTxt, 'Ba')
def process(raw):
afos = sys.argv[1]
pil = afos[:3]
wfo = afos[3:]
raw = raw.replace("'", "\\'")
tokens = re.findall("\.UPDATE\.\.\.MESOSCALE UPDATE", raw)
if (len(tokens) == 0):
return
sql = "INSERT into text_products(product) values ('%s')" % (raw,)
calldb(sql)
sql = "select last_value from text_products_id_seq"
rs = querydb(sql)
id = rs[0]['last_value']
mess = "%s: %s issues Mesoscale %s http://mesonet.agron.iastate.edu/p.php?id=%s" % \
(wfo, wfo, pil, id)
sendJabberMessage(mess)
errors.seek(0)
print errors.read()
if __name__ == "__main__":
process(raw)
| mit | Python |
3c5d82ced00da284fc8dc48fea34462958a8883e | Fix cross-platform process communication. | joeyespo/grip,ssundarraj/grip,joeyespo/grip,ssundarraj/grip | tests/test_cli.py | tests/test_cli.py | """
Tests the Grip command-line interface.
"""
from __future__ import print_function, unicode_literals
from subprocess import PIPE, STDOUT, CalledProcessError, Popen
import pytest
from grip.command import usage, version
def run(*args, **kwargs):
command = kwargs.pop('command', 'grip')
stdin = kwargs.pop('stdin', None)
cmd = [command] + list(args)
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT,
universal_newlines=True)
# Sent input as STDIN then close it
output, _ = p.communicate(input=stdin)
p.stdin.close()
# Wait for process to terminate
returncode = p.wait()
# Capture any more output that occurred during shutdown
try:
output += p.communicate()[0]
except ValueError:
pass
# Raise exception on failed process calls
if returncode != 0:
raise CalledProcessError(returncode, cmd, output=output)
return output
def test_help():
assert run('-h') == usage
assert run('--help') == usage
def test_version():
assert run('-V') == version + '\n'
assert run('--version') == version + '\n'
def test_bad_command():
simple_usage = '\n\n'.join(usage.split('\n\n')[:1])
with pytest.raises(CalledProcessError) as excinfo:
run('--does-not-exist')
assert excinfo.value.output == simple_usage + '\n'
# TODO: Figure out how to run the CLI and still capture requests
# TODO: Test all Grip CLI commands and arguments
# TODO: Test settings wire-up
| """
Tests the Grip command-line interface.
"""
from __future__ import print_function, unicode_literals
from subprocess import PIPE, STDOUT, CalledProcessError, Popen
import pytest
from grip.command import usage, version
def run(*args, **kwargs):
command = kwargs.pop('command', 'grip')
stdin = kwargs.pop('stdin', None)
cmd = [command] + list(args)
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=STDOUT,
universal_newlines=True)
out1, _ = p.communicate(input=stdin)
p.stdin.close()
returncode = p.wait()
out2, _ = p.communicate()
output = out1 + out2
if returncode != 0:
raise CalledProcessError(returncode, cmd, output=output)
return output
def test_help():
assert run('-h') == usage
assert run('--help') == usage
def test_version():
assert run('-V') == version + '\n'
assert run('--version') == version + '\n'
def test_bad_command():
simple_usage = '\n\n'.join(usage.split('\n\n')[:1])
with pytest.raises(CalledProcessError) as excinfo:
run('--does-not-exist')
assert excinfo.value.output == simple_usage + '\n'
# TODO: Figure out how to run the CLI and still capture requests
# TODO: Test all Grip CLI commands and arguments
# TODO: Test settings wire-up
| mit | Python |
8cf225cae6e3b57fa068ec56373905835f9bc64d | Add automatic selection of default value if not provided | uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged | csunplugged/resources/utils/resource_parameters.py | csunplugged/resources/utils/resource_parameters.py | from lxml import etree
from django.utils.translation import ugettext as _
class ResourceParameter(object):
def __init__(self, name="", description=""):
self.name = name
self.description = description
def html_element(self):
legend = etree.Element('legend')
legend.text = self.description
fieldset = etree.Element('fieldset')
fieldset.append(legend)
return fieldset
class EnumResourceParameter(ResourceParameter):
def __init__(self, values=[], default=None, **kwargs):
super().__init__(**kwargs)
self.values = values
self.default = default
if self.default not in self.values:
self.default = list(values.keys())[0] # Select first value
def html_element(self):
base_elem = super().html_element()
for value, value_desc in self.values.items():
input_elem = etree.Element(
'input',
type="radio",
name=self.name,
id='{}_{}'.format(self.name, value),
value=str(value)
)
if value == self.default:
input_elem.set("checked", "checked")
base_elem.append(input_elem)
label_elem = etree.Element(
"label",
)
label_elem.set("for", "{}_{}".format(self.name, value))
label_elem.text = value_desc
base_elem.append(label_elem)
base_elem.append(etree.Element('br'))
return base_elem
class BoolResourceParameter(EnumResourceParameter):
def __init__(self, default=True, true_text=_("Yes"), false_text=_("No"), **kwargs):
values = {
True: true_text,
False: false_text
}
super().__init__(values=values, default=default, **kwargs)
| from lxml import etree
from django.utils.translation import ugettext as _
class ResourceParameter(object):
def __init__(self, name="", description=""):
self.name = name
self.description = description
def html_element(self):
legend = etree.Element('legend')
legend.text = self.description
fieldset = etree.Element('fieldset')
fieldset.append(legend)
return fieldset
class EnumResourceParameter(ResourceParameter):
def __init__(self, values=[], default=None, **kwargs):
super().__init__(**kwargs)
self.values = values
self.default = default
if self.default not in self.values:
raise Exception(self.values)
def html_element(self):
base_elem = super().html_element()
for value, value_desc in self.values.items():
input_elem = etree.Element(
'input',
type="radio",
name=self.name,
id='{}_{}'.format(self.name, value),
value=str(value)
)
if value == self.default:
input_elem.set("checked", "checked")
base_elem.append(input_elem)
label_elem = etree.Element(
"label",
)
label_elem.set("for", "{}_{}".format(self.name, value))
label_elem.text = value_desc
base_elem.append(label_elem)
base_elem.append(etree.Element('br'))
return base_elem
class BoolResourceParameter(EnumResourceParameter):
def __init__(self, default=True, true_text=_("Yes"), false_text=_("No"), **kwargs):
values = {
True: true_text,
False: false_text
}
super().__init__(values=values, default=default, **kwargs)
| mit | Python |
5db86f0ede5c95f8b08ebb8c5f536e8d431662cb | fix daiquiri_path script | aipescience/django-daiquiri,aipescience/django-daiquiri,aipescience/django-daiquiri | daiquiri/core/management/commands/daiquiri_path.py | daiquiri/core/management/commands/daiquiri_path.py | import imp
from django.core.management.base import BaseCommand
class Command(BaseCommand):
requires_system_checks = False
can_import_settings = False
def handle(self, *args, **options):
print(imp.find_module('daiquiri')[1])
| import imp
from django.core.management.base import BaseCommand
class Command(BaseCommand):
def handle(self, *args, **options):
print(imp.find_module('daiquiri')[1])
| apache-2.0 | Python |
fc2ca6994c10e6f5781965f1d5ad2f651249f6c6 | cover test for warnings | Woile/commitizen,Woile/commitizen | tests/test_cli.py | tests/test_cli.py | import sys
import pytest
from commitizen import cli
from commitizen.__version__ import __version__
def test_sysexit_no_argv(mocker, capsys):
testargs = ["cz"]
mocker.patch.object(sys, "argv", testargs)
with pytest.raises(SystemExit):
cli.main()
out, _ = capsys.readouterr()
assert out.startswith("usage")
def test_cz_with_arg_but_without_command(mocker, capsys):
testargs = ["cz", "--name", "cz_jira"]
mocker.patch.object(sys, "argv", testargs)
with pytest.raises(SystemExit):
cli.main()
_, err = capsys.readouterr()
assert "Command is required" in err
def test_name(mocker, capsys):
testargs = ["cz", "-n", "cz_jira", "example"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, _ = capsys.readouterr()
assert out.startswith("JRA")
@pytest.mark.usefixtures("tmp_git_project")
def test_name_default_value(mocker, capsys):
testargs = ["cz", "example"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, _ = capsys.readouterr()
assert out.startswith("fix: correct minor typos in code")
def test_ls(mocker, capsys):
testargs = ["cz", "-n", "cz_jira", "ls"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, err = capsys.readouterr()
assert "cz_conventional_commits" in out
assert isinstance(out, str)
def test_arg_version(mocker, capsys):
testargs = ["cz", "--version"]
mocker.patch.object(sys, "argv", testargs)
with pytest.warns(DeprecationWarning) as record:
cli.main()
out, _ = capsys.readouterr()
assert out.strip() == __version__
assert record[0].message.args[0] == (
"'cz --version' will be deprecated in next major version. "
"Please use 'cz version' command from your scripts"
)
def test_arg_debug(mocker):
testargs = ["cz", "--debug", "info"]
mocker.patch.object(sys, "argv", testargs)
with pytest.warns(DeprecationWarning) as record:
cli.main()
assert record[0].message.args[0] == (
"Debug will be deprecated in next major version. "
"Please remove it from your scripts"
)
| import sys
import pytest
from commitizen import cli
from commitizen.__version__ import __version__
def test_sysexit_no_argv(mocker, capsys):
testargs = ["cz"]
mocker.patch.object(sys, "argv", testargs)
with pytest.raises(SystemExit):
cli.main()
out, _ = capsys.readouterr()
assert out.startswith("usage")
def test_cz_with_arg_but_without_command(mocker, capsys):
testargs = ["cz", "--name", "cz_jira"]
mocker.patch.object(sys, "argv", testargs)
with pytest.raises(SystemExit):
cli.main()
_, err = capsys.readouterr()
assert "Command is required" in err
def test_name(mocker, capsys):
testargs = ["cz", "-n", "cz_jira", "example"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, _ = capsys.readouterr()
assert out.startswith("JRA")
@pytest.mark.usefixtures("tmp_git_project")
def test_name_default_value(mocker, capsys):
testargs = ["cz", "example"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, _ = capsys.readouterr()
assert out.startswith("fix: correct minor typos in code")
def test_ls(mocker, capsys):
testargs = ["cz", "-n", "cz_jira", "ls"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, err = capsys.readouterr()
assert "cz_conventional_commits" in out
assert isinstance(out, str)
def test_version(mocker, capsys):
testargs = ["cz", "--version"]
mocker.patch.object(sys, "argv", testargs)
cli.main()
out, _ = capsys.readouterr()
assert out.strip() == __version__
| mit | Python |
5f09835f9aa62abb5f891c4d3896206eedd9fe12 | fix somewherein url | banglakit/corpus-builder | corpus_builder/spiders/public_blog/somewherein.py | corpus_builder/spiders/public_blog/somewherein.py | # -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import Rule
from corpus_builder.templates.spider import CommonSpider
class SomewhereInSpider(CommonSpider):
name = 'somewherein'
allowed_domains = ['www.somewhereinblog.net', 'somewhereinblog.net']
base_url = 'http://www.somewhereinblog.net/'
start_request_url = base_url
content_body = {
'css': '.blog-content::text'
}
rules = (
Rule(LinkExtractor(
restrict_css='h2.post-title'
),
callback='parse_content'),
)
allowed_configurations = [
['start_page'],
['start_page', 'end_page']
]
def request_index(self, response):
for page in range(self.start_page - 1, self.end_page ):
yield scrapy.Request(self.base_url + 'live/{page}'.format(page=page * 15))
| # -*- coding: utf-8 -*-
import scrapy
from scrapy.linkextractors import LinkExtractor
from scrapy.spiders import Rule
from corpus_builder.templates.spider import CommonSpider
class SomewhereInSpider(CommonSpider):
name = 'somewherein'
allowed_domains = ['www.somewhereinblog.net', 'somewhereinblog.net']
base_url = 'http://www.somewhereinblog.net/'
start_request_url = base_url
content_body = {
'css': '.blog-content::text'
}
rules = (
Rule(LinkExtractor(
restrict_css='h2.post-title'
),
callback='parse_content'),
)
allowed_configurations = [
['start_page'],
['start_page', 'end_page']
]
def request_index(self, response):
for page in range(self.start_page - 1, self.end_page ):
yield scrapy.Request(self.base_url + 'page/{page}'.format(page=page * 15))
| mit | Python |
38dba247fbc5855dd830a02e52531d8c98964693 | Update ipc_lista1.4.py | any1m1c/ipc20161 | lista1/ipc_lista1.4.py | lista1/ipc_lista1.4.py | #ipc_lista1.4
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça as 4 notas bimestrais e mostre a media
nota1 = int(input("Digite a primeira nota do bimestre: "))
nota2 = int(input("Digite a segunda nota do bimestre: "))
nota3 = int(input("Digite a terceira nota do bismestre: "))
nota4 - int
print
media = (nota1+nota2+nota3+nota4)/4.0
print" A sua média é: %s" %media
| #ipc_lista1.4
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um Programa que peça as 4 notas bimestrais e mostre a media
nota1 = int(input("Digite a primeira nota do bimestre: "))
nota2 = int(input("Digite a segunda nota do bimestre: "))
nota3 = int(input("Digite a terceira nota do bismestre: "))
print
media = (nota1+nota2+nota3+nota4)/4.0
print" A sua média é: %s" %media
| apache-2.0 | Python |
0311ff1b0d3bdb40a8840293aa1acc5667b5db43 | Update ipc_lista1.5.py | any1m1c/ipc20161 | lista1/ipc_lista1.5.py | lista1/ipc_lista1.5.py | #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
| #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
| apache-2.0 | Python |
6bc6487d2659b33a15e93b998058bde637e83993 | Update ipc_lista1.5.py | any1m1c/ipc20161 | lista1/ipc_lista1.5.py | lista1/ipc_lista1.5.py | #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja
| #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que
| apache-2.0 | Python |
e579e03f70bfbea3942647059ebd2a8a3e0043a3 | update sphinx rendering to use built in jinja rendering for .js/.css files | zzzeek/mako,zzzeek/mako,sqlalchemy/mako,mindw/mako,sqlalchemy/mako,mindw/mako,Distrotech/mako,Distrotech/mako,wujuguang/mako,wujuguang/mako | doc/build/builder/builders.py | doc/build/builder/builders.py | from sphinx.application import TemplateBridge
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.highlighting import PygmentsBridge
from sphinx.jinja2glue import BuiltinTemplateLoader
from pygments import highlight
from pygments.lexer import RegexLexer, bygroups, using
from pygments.token import *
from pygments.filter import Filter, apply_filters
from pygments.lexers import PythonLexer, PythonConsoleLexer
from pygments.formatters import HtmlFormatter, LatexFormatter
import re
from mako.lookup import TemplateLookup
from mako.template import Template
from mako.ext.pygmentplugin import MakoLexer
class MakoBridge(TemplateBridge):
def init(self, builder, *args, **kw):
self.jinja2_fallback = BuiltinTemplateLoader()
self.jinja2_fallback.init(builder, *args, **kw)
self.layout = builder.config.html_context.get('mako_layout', 'html')
self.lookup = TemplateLookup(directories=builder.config.templates_path,
imports=[
"from builder import util"
]
)
def render(self, template, context):
template = template.replace(".html", ".mako")
context['prevtopic'] = context.pop('prev', None)
context['nexttopic'] = context.pop('next', None)
context['mako_layout'] = self.layout == 'html' and 'static_base.mako' or 'site_base.mako'
# sphinx 1.0b2 doesn't seem to be providing _ for some reason...
context.setdefault('_', lambda x:x)
return self.lookup.get_template(template).render_unicode(**context)
def render_string(self, template, context):
# this is used for .js, .css etc. and we don't have
# local copies of that stuff here so use the jinja render.
return self.jinja2_fallback.render_string(template, context)
class StripDocTestFilter(Filter):
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Token.Comment and re.match(r'#\s*doctest:', value):
continue
yield ttype, value
def autodoc_skip_member(app, what, name, obj, skip, options):
if what == 'class' and skip and name == '__init__':
return False
else:
return skip
def setup(app):
# app.connect('autodoc-skip-member', autodoc_skip_member)
# Mako is already in Pygments, adding the local
# lexer here so that the latest syntax is available
app.add_lexer('mako', MakoLexer())
| from sphinx.application import TemplateBridge
from sphinx.builders.html import StandaloneHTMLBuilder
from sphinx.highlighting import PygmentsBridge
from pygments import highlight
from pygments.lexer import RegexLexer, bygroups, using
from pygments.token import *
from pygments.filter import Filter, apply_filters
from pygments.lexers import PythonLexer, PythonConsoleLexer
from pygments.formatters import HtmlFormatter, LatexFormatter
import re
from mako.lookup import TemplateLookup
from mako.template import Template
from mako.ext.pygmentplugin import MakoLexer
class MakoBridge(TemplateBridge):
def init(self, builder, *args, **kw):
self.layout = builder.config.html_context.get('mako_layout', 'html')
self.lookup = TemplateLookup(directories=builder.config.templates_path,
imports=[
"from builder import util"
]
)
def render(self, template, context):
template = template.replace(".html", ".mako")
context['prevtopic'] = context.pop('prev', None)
context['nexttopic'] = context.pop('next', None)
context['mako_layout'] = self.layout == 'html' and 'static_base.mako' or 'site_base.mako'
# sphinx 1.0b2 doesn't seem to be providing _ for some reason...
context.setdefault('_', lambda x:x)
return self.lookup.get_template(template).render_unicode(**context)
def render_string(self, template, context):
context['prevtopic'] = context.pop('prev', None)
context['nexttopic'] = context.pop('next', None)
context['mako_layout'] = self.layout == 'html' and 'static_base.mako' or 'site_base.mako'
# sphinx 1.0b2 doesn't seem to be providing _ for some reason...
context.setdefault('_', lambda x:x)
return Template(template, lookup=self.lookup,
format_exceptions=True,
imports=[
"from builder import util"
]
).render_unicode(**context)
class StripDocTestFilter(Filter):
def filter(self, lexer, stream):
for ttype, value in stream:
if ttype is Token.Comment and re.match(r'#\s*doctest:', value):
continue
yield ttype, value
def autodoc_skip_member(app, what, name, obj, skip, options):
if what == 'class' and skip and name == '__init__':
return False
else:
return skip
def setup(app):
# app.connect('autodoc-skip-member', autodoc_skip_member)
# Mako is already in Pygments, adding the local
# lexer here so that the latest syntax is available
app.add_lexer('mako', MakoLexer())
| mit | Python |
6959a95e204599cb18db7211ac4cce005918142c | update utils | dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy,dssg/wikienergy | disaggregator/utils.py | disaggregator/utils.py | import appliance
import pandas
def concatenate_traces(traces, metadata=None, how="strict"):
'''
Given a list of appliance traces, returns a single concatenated
trace. With how="strict" option, must be sampled at the same rate and
consecutive, without overlapping datapoints.
'''
if not metadata:
metadata = traces[0].metadata
if how == "strict":
# require ordered list of consecutive, similarly sampled traces with no
# missing data.
return ApplianceTrace(concat([t.series for t in traces],metadata))
else:
raise NotImplementedError
def aggregate_traces(traces, metadata, how="strict"):
'''
Given a list of temporally aligned traces, aggregate them into a single
signal.
'''
if how == "strict":
# require that traces are exactly aligned
summed_series = traces[0].series
for trace in traces[1:]:
summed_series += trace.series
return ApplianceTrace(summed_series,metadata)
else:
return NotImplementedError
def aggregate_instances(instances,how="strict"):
'''
Given a list of temporally aligned instances, aggregate them into a single
signal.
'''
if how == "strict":
traces = [instance.traces for instance in instances]
traces = [list(t) for t in zip(*traces)] # transpose
traces = [ aggregate_traces(t,{}) for t in traces]
# TODO how to aggregate metadata?
return ApplianceInstance(traces)
else:
return NotImplementedError
def order_traces(self,traces):
'''
Given a set of traces, orders them chronologically and catches
overlapping traces.
'''
order = np.argsort([t.series[0] for t in traces])
new_traces = [traces[i] for i in order]
return new_traces
| from ApplianceTrace import ApplianceTrace
from ApplianceInstance import ApplianceInstance
from ApplianceType import ApplianceType
from ApplianceSet import ApplianceSet
from pandas import concat
def concatenate_traces(traces, metadata=None, how="strict"):
'''
Given a list of appliance traces, returns a single concatenated
trace. With how="strict" option, must be sampled at the same rate and
consecutive, without overlapping datapoints.
'''
if not metadata:
metadata = traces[0].metadata
if how == "strict":
# require ordered list of consecutive, similarly sampled traces with no
# missing data.
return ApplianceTrace(concat([t.series for t in traces],metadata))
else:
raise NotImplementedError
def aggregate_traces(traces, metadata, how="strict"):
'''
Given a list of temporally aligned traces, aggregate them into a single
signal.
'''
if how == "strict":
# require that traces are exactly aligned
summed_series = traces[0].series
for trace in traces[1:]:
summed_series += trace.series
return ApplianceTrace(summed_series,metadata)
else:
return NotImplementedError
def aggregate_instances(instances,how="strict"):
'''
Given a list of temporally aligned instances, aggregate them into a single
signal.
'''
if how == "strict":
traces = [instance.traces for instance in instances]
traces = [list(t) for t in zip(*traces)] # transpose
traces = [ aggregate_traces(t,{}) for t in traces]
# TODO how to aggregate metadata?
return ApplianceInstance(traces)
else:
return NotImplementedError
def order_traces(self,traces):
'''
Given a set of traces, orders them chronologically and catches
overlapping traces.
'''
order = np.argsort([t.series[0] for t in traces])
new_traces = [traces[i] for i in order]
return new_traces
| mit | Python |
03da52f9098e05f564ff7c258a6d60e84bd2cf56 | Make sure the test runner use the local filesystem. | us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite,us-ignite/us_ignite | us_ignite/settings/testing.py | us_ignite/settings/testing.py | # Testing settings for us_ignite
from us_ignite.settings import *
SECRET_KEY = 'c!lizso+53#4dhm*o2qyh9t(n14p#wr5!+%1bfjtrqa#vsc$@h'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'us-ignite-test.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS += (
'django_nose',
)
EXCLUDED_APPS = (
'south',
)
INSTALLED_APPS = filter(lambda a: a not in EXCLUDED_APPS, INSTALLED_APPS)
NOSE_ARGS = [
'-s',
'--failed',
'--stop',
'--nocapture',
'--failure-detail',
'--with-progressive',
'--logging-filter=-south',
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# ignore South
SOUTH_TESTS_MIGRATE = False
SKIP_SOUTH_TESTS = True
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# Faster tests with the MD5hasher.
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SITE_URL = 'http://testing-us-ignite.org'
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
STATIC_URL = '/static/'
| # Testing settings for us_ignite
from us_ignite.settings import *
SECRET_KEY = 'c!lizso+53#4dhm*o2qyh9t(n14p#wr5!+%1bfjtrqa#vsc$@h'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'us-ignite-test.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
INSTALLED_APPS += (
'django_nose',
)
EXCLUDED_APPS = (
'south',
)
INSTALLED_APPS = filter(lambda a: a not in EXCLUDED_APPS, INSTALLED_APPS)
NOSE_ARGS = [
'-s',
'--failed',
'--stop',
'--nocapture',
'--failure-detail',
'--with-progressive',
'--logging-filter=-south',
]
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
# ignore South
SOUTH_TESTS_MIGRATE = False
SKIP_SOUTH_TESTS = True
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
# Faster tests with the MD5hasher.
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
SITE_URL = 'http://testing-us-ignite.org'
| bsd-3-clause | Python |
85aa0455d5accb392cf6bc3b5fc44bc8b8da4350 | Add retry when tag reference was lost | pixelated-project/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated-project/pixelated-user-agent,pixelated/pixelated-user-agent | service/test/functional/features/steps/tag_list.py | service/test/functional/features/steps/tag_list.py | #
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from behave import when
from selenium.common.exceptions import TimeoutException, StaleElementReferenceException
from common import (
find_element_by_class_name,
find_element_by_css_selector,
wait_for_user_alert_to_disapear)
def click_first_element_with_class(context, classname):
element = find_element_by_class_name(context, classname)
element.click()
def is_side_nav_expanded(context):
e = find_element_by_class_name(context, 'content')
return u'move-right' in e.get_attribute("class")
def expand_side_nav(context):
if is_side_nav_expanded(context):
return
find_element_by_css_selector(context, '.side-nav-toggle-icon i').click()
@when('I select the tag \'{tag}\'')
def select_tag(context, tag):
wait_for_user_alert_to_disapear(context)
expand_side_nav(context)
# try this multiple times as there are some race conditions
try_again = 2
success = False
while (not success) and (try_again > 0):
try:
find_element_by_css_selector(context, '#tag-%s' % tag).click()
find_element_by_css_selector(context, ".mail-list-entry__item[href*='%s']" % tag)
success = True
except (TimeoutException, StaleElementReferenceException):
pass
finally:
try_again -= 1
assert success
@when('I am in \'{tag}\'')
def assert_in_tag(context, tag):
expand_side_nav(context)
e = find_element_by_css_selector(context, '#tag-%s' % tag)
assert "selected" in e.get_attribute("class")
| #
# Copyright (c) 2014 ThoughtWorks, Inc.
#
# Pixelated is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Pixelated is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Pixelated. If not, see <http://www.gnu.org/licenses/>.
from behave import when
from selenium.common.exceptions import TimeoutException
from common import (
find_element_by_class_name,
find_element_by_id,
find_element_by_css_selector,
wait_for_user_alert_to_disapear)
def click_first_element_with_class(context, classname):
element = find_element_by_class_name(context, classname)
element.click()
def is_side_nav_expanded(context):
e = find_element_by_class_name(context, 'content')
return u'move-right' in e.get_attribute("class")
def expand_side_nav(context):
if is_side_nav_expanded(context):
return
toggle = find_element_by_css_selector(context, '.side-nav-toggle-icon i')
toggle.click()
@when('I select the tag \'{tag}\'')
def impl(context, tag):
wait_for_user_alert_to_disapear(context)
expand_side_nav(context)
# try this multiple times as there are some race conditions
try_again = 2
success = False
while (not success) and (try_again > 0):
try:
find_element_by_css_selector(context, '#tag-%s' % tag)
e = find_element_by_id(context, 'tag-%s' % tag)
e.click()
find_element_by_css_selector(context, ".mail-list-entry__item[href*='%s']" % tag)
success = True
except TimeoutException:
pass
finally:
try_again -= 1
assert success
@when('I am in \'{tag}\'')
def impl(context, tag):
expand_side_nav(context)
find_element_by_css_selector(context, '#tag-%s' % tag)
e = find_element_by_id(context, 'tag-%s' % tag)
assert "selected" in e.get_attribute("class")
| agpl-3.0 | Python |
895a40c9e92af1700cc174e32472c833e83140be | Use replace instead of strip | jdgwartney/boundary-plugin-shell,jdgwartney/boundary-plugin-shell,boundary/boundary-plugin-shell,boundary/boundary-plugin-shell | exec_proc.py | exec_proc.py | #!/usr/bin/env python
# Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen,PIPE
import shlex
import logging
from string import replace
class ExecProc:
def __init__(self):
self.command = None
self.debug = False
def setDebug(self,debug):
self.debug = debug
def setCommand(self,command):
if type(command) != str:
raise ValueError
self.command = command
def execute(self):
if self.command == None:
raise ValueError
# Remove Carriage Returns
args = shlex.split(self.command)
if self.debug == True:
logging.info("command=\"%s\"",args)
p = Popen(args,stdout=PIPE)
o,e = p.communicate()
logging.info("before: " + ':'.join(x.encode('hex') for x in o))
o = replace(o,"\r","")
logging.info("after: " + ':'.join(x.encode('hex') for x in o))
if self.debug == True:
logging.info("output=\"%s\"",o)
logging.info(':'.join(x.encode('hex') for x in o))
return o
| #!/usr/bin/env python
# Copyright 2014 Boundary, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from subprocess import Popen,PIPE
import shlex
import logging
class ExecProc:
def __init__(self):
self.command = None
self.debug = False
def setDebug(self,debug):
self.debug = debug
def setCommand(self,command):
if type(command) != str:
raise ValueError
self.command = command
def execute(self):
if self.command == None:
raise ValueError
# Remove Carriage Returns
args = shlex.split(self.command)
if self.debug == True:
logging.info("command=\"%s\"",args)
p = Popen(args,stdout=PIPE)
o,e = p.communicate()
logging.info("before: " + ':'.join(x.encode('hex') for x in o))
o = o.strip('\r')
logging.info("after: " + ':'.join(x.encode('hex') for x in o))
if self.debug == True:
logging.info("output=\"%s\"",o)
logging.info(':'.join(x.encode('hex') for x in o))
return o
| apache-2.0 | Python |
d5293bc83495e1d9ddcf8918dd99f983c185f475 | clean formatting | DeadSix27/python_cross_compile_script | packages/dependencies/libsndfile.py | packages/dependencies/libsndfile.py | {
'repo_type' : 'git',
'url' : 'https://github.com/erikd/libsndfile.git',
'configure_options' : '--host={target_host} --prefix={target_prefix} --disable-shared --enable-static --enable-sqlite --disable-test-coverage --enable-external-libs --enable-experimental',
'run_post_patch' : [
'autoreconf -fiv -I M4',
],
'run_post_install' : [
'sed -i.bak \'s/Libs: -L${{libdir}} -lsndfile/Libs: -L${{libdir}} -lsndfile -lFLAC -lvorbis -lvorbisenc -logg -lspeex/\' "{pkg_config_path}/sndfile.pc"', #issue with rubberband not using pkg-config option "--static" or so idk?
],
'depends_on' : [ 'libspeex' ],
'packages' : {
'arch' : [ 'autogen' ],
},
'_info' : { 'version' : None, 'fancy_name' : 'libsndfile' },
} | {
'repo_type' : 'git',
#'branch' : '81a71e08c09b20b0255aa66e40fce293008b9525', # 'd2ca7f4afc776d7c0c14c9a9a5ba94d9ae3affb8',
'url' : 'https://github.com/erikd/libsndfile.git',
'configure_options' : '--host={target_host} --prefix={target_prefix} --disable-shared --enable-static --enable-sqlite --disable-test-coverage --enable-external-libs --enable-experimental',
#'patches' : [ #patches courtesy of https://github.com/Alexpux/MINGW-packages/tree/master/mingw-w64-libsndfile
#('https://raw.githubusercontent.com/DeadSix27/python_cross_compile_script/master/patches/libsndfile/0001-more-elegant-and-foolproof-autogen-fallback.all.patch', '-p0'),
#('https://raw.githubusercontent.com/DeadSix27/python_cross_compile_script/master/patches/libsndfile/0003-fix-source-searches.mingw.patch', '-p0'),
#],
'run_post_patch' : [
'autoreconf -fiv -I M4',
],
'run_post_install' : [
'sed -i.bak \'s/Libs: -L${{libdir}} -lsndfile/Libs: -L${{libdir}} -lsndfile -lFLAC -lvorbis -lvorbisenc -logg -lspeex/\' "{pkg_config_path}/sndfile.pc"', #issue with rubberband not using pkg-config option "--static" or so idk?
],
'depends_on' : [ 'libspeex' ],
'packages' : {
'arch' : [ 'autogen' ],
},
'_info' : { 'version' : None, 'fancy_name' : 'libsndfile' },
} | mpl-2.0 | Python |
08eff1cd735e43a8697fb56047ca4e04120a12e6 | Set url scheme to 'https' when executing through AWS API Gateway | dgnorth/drift,dgnorth/drift,dgnorth/drift | drift/fixers.py | drift/fixers.py | # -*- coding: utf-8 -*-
"""
This module includes various helpers and fixers.
"""
from flask.json import JSONEncoder
from datetime import date
class CustomJSONEncoder(JSONEncoder):
'''Extend the JSON encoder to treat date-time objects as strict
rfc3339 types.
'''
def default(self, obj):
from drift.orm import ModelBase
if isinstance(obj, date):
return obj.isoformat() + "Z"
elif isinstance(obj, ModelBase):
return obj.as_dict()
else:
return JSONEncoder.default(self, obj)
# Fixing SCRIPT_NAME/url_scheme when behind reverse proxy
# http://flask.pocoo.org/snippets/35/
class ReverseProxied(object):
'''Wrap the application in this middleware and configure the
front-end server to add these headers, to let you quietly bind
this to a URL other than / and to an HTTP scheme that is
different than what is used locally.
In nginx:
location /myprefix {
proxy_pass http://192.168.0.1:5001;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /myprefix;
}
:param app: the WSGI application
'''
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
elif environ.get('X-Amzn-Vpce-Id'):
# This is a (poor) way to see if the request is coming through AWS API Gateway
# which only supports https. The API gateway doesn't set 'X-Scheme'.
environ['wsgi.url_scheme'] = 'https'
server = environ.get('HTTP_X_FORWARDED_SERVER', '')
if server:
environ['HTTP_HOST'] = server
return self.app(environ, start_response)
| # -*- coding: utf-8 -*-
"""
This module includes various helpers and fixers.
"""
from flask.json import JSONEncoder
from datetime import date
class CustomJSONEncoder(JSONEncoder):
'''Extend the JSON encoder to treat date-time objects as strict
rfc3339 types.
'''
def default(self, obj):
from drift.orm import ModelBase
if isinstance(obj, date):
return obj.isoformat() + "Z"
elif isinstance(obj, ModelBase):
return obj.as_dict()
else:
return JSONEncoder.default(self, obj)
# Fixing SCRIPT_NAME/url_scheme when behind reverse proxy
# http://flask.pocoo.org/snippets/35/
class ReverseProxied(object):
'''Wrap the application in this middleware and configure the
front-end server to add these headers, to let you quietly bind
this to a URL other than / and to an HTTP scheme that is
different than what is used locally.
In nginx:
location /myprefix {
proxy_pass http://192.168.0.1:5001;
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Scheme $scheme;
proxy_set_header X-Script-Name /myprefix;
}
:param app: the WSGI application
'''
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
script_name = environ.get('HTTP_X_SCRIPT_NAME', '')
if script_name:
environ['SCRIPT_NAME'] = script_name
path_info = environ['PATH_INFO']
if path_info.startswith(script_name):
environ['PATH_INFO'] = path_info[len(script_name):]
scheme = environ.get('HTTP_X_SCHEME', '')
if scheme:
environ['wsgi.url_scheme'] = scheme
server = environ.get('HTTP_X_FORWARDED_SERVER', '')
if server:
environ['HTTP_HOST'] = server
return self.app(environ, start_response)
| mit | Python |
94705d1f4d63e47d2ef677bba13a54daa1d1745c | allow skextremes.models.classic import | kikocorreoso/scikit-extremes | skextremes/models/__init__.py | skextremes/models/__init__.py | from . import wind
from . import engineering
from . import classic
| from . import wind
from . import engineering
| mit | Python |
2111c840c11fe2046c87a2767f00b3781a5e2eac | Update dumpling.client.python.py | leculver/dotnet-reliability,leculver/dotnet-reliability,leculver/dotnet-reliability,leculver/dotnet-reliability,leculver/dotnet-reliability,leculver/dotnet-reliability | src/dumpling-service/dumpling.python-client/dumpling.client.python.py | src/dumpling-service/dumpling.python-client/dumpling.client.python.py | import requests
class DumplingService:
_dumplingUri = 'http://dotnetrp.azurewebsites.net';
@staticmethod
def SayHelloAs(username):
hello_url = DumplingService._dumplingUri + '/dumpling/test/hi/im/%s'%(username)
response = requests.get(hello_url)
return response.content
@staticmethod
def UploadZip(username, origin, filepath):
upload_url = DumplingService._dumplingUri + '/dumpling/store/chunk/%s/%s/0/0'%(username, origin);
files = {'file': open(filepath, 'rb')}
response = requests.post(upload_url, files = files)
return response.content
if __name__ == '__main__':
username = 'bryanar'
origin = 'ubuntu'
file = 'C:/temp/dumps/ubuntu/projectk-24025-00-amd64chk_00AB.zip'
print DumplingService.SayHelloAs('bryanar')
print DumplingService.UploadZip(username, origin, file)
| import requests
class DumplingService:
_dumplingUri = 'http://dotnetrp.azurewebsites.net';
@staticmethod
def SayHelloAs(username):
hello_url = DumplingService._dumplingUri + '/dumpling/test/hi/im/%s'%(username)
response = requests.get(hello_url)
return response.content
@staticmethod
def UploadZip(username, origin, filepath):
upload_url = DumplingService._dumplingUri + '/dumpling/store/chunk/%s/%s/0/0'%(username, origin);
files = {'file': open(file, 'rb')}
response = requests.post(upload_url, files = files)
return response.content
if __name__ == '__main__':
username = 'bryanar'
origin = 'ubuntu'
file = 'C:/temp/dumps/ubuntu/projectk-24025-00-amd64chk_00AB.zip'
print DumplingService.SayHelloAs('bryanar')
print DumplingService.UploadZip(username, origin, file)
| mit | Python |
182f1ab71ff7abcbf54340c59dc438cacf80d066 | Update report API ext method names to match URLs | cneill/designate,kiall/designate-py3,cneill/designate-testing,ionrock/designate,kiall/designate-py3,ionrock/designate,cneill/designate,melodous/designate,melodous/designate,grahamhayes/designate,kiall/designate-py3,ramsateesh/designate,openstack/designate,NeCTAR-RC/designate,kiall/designate-py3,melodous/designate,kiall/designate-py3,tonyli71/designate,cneill/designate-testing,ramsateesh/designate,melodous/designate,grahamhayes/designate,richm/designate,tonyli71/designate,muraliselva10/designate,muraliselva10/designate,muraliselva10/designate,grahamhayes/designate,cneill/designate,openstack/designate,cneill/designate,cneill/designate,cneill/designate-testing,tonyli71/designate,openstack/designate,ramsateesh/designate,NeCTAR-RC/designate,richm/designate,ionrock/designate | moniker/api/v1/extensions/reports.py | moniker/api/v1/extensions/reports.py | # Copyright 2012 Hewlett-Packard Development Company, L.P. All Rights Reserved.
#
# Author: Simon McCartney <simon.mccartney@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import flask
from moniker.openstack.common import log as logging
from moniker.central import rpcapi as central_rpcapi
LOG = logging.getLogger(__name__)
central_api = central_rpcapi.CentralAPI()
blueprint = flask.Blueprint('reports', __name__)
@blueprint.route('/reports/counts', methods=['GET'])
def reports_counts():
context = flask.request.environ.get('context')
domains = central_api.count_domains(context)
records = central_api.count_records(context)
tenants = central_api.count_tenants(context)
return flask.jsonify(domains=domains, records=records, tenants=tenants)
@blueprint.route('/reports/counts/domains', methods=['GET'])
def reports_counts_domains():
context = flask.request.environ.get('context')
count = central_api.count_domains(context)
return flask.jsonify(domains=count)
@blueprint.route('/reports/counts/records', methods=['GET'])
def reports_counts_records():
context = flask.request.environ.get('context')
count = central_api.count_records(context)
return flask.jsonify(records=count)
@blueprint.route('/reports/counts/tenants', methods=['GET'])
def reports_counts_tenants():
context = flask.request.environ.get('context')
count = central_api.count_tenants(context)
return flask.jsonify(tenants=count)
| # Copyright 2012 Hewlett-Packard Development Company, L.P. All Rights Reserved.
#
# Author: Simon McCartney <simon.mccartney@hp.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import flask
from moniker.openstack.common import log as logging
from moniker.central import rpcapi as central_rpcapi
LOG = logging.getLogger(__name__)
central_api = central_rpcapi.CentralAPI()
blueprint = flask.Blueprint('reports', __name__)
@blueprint.route('/reports/counts', methods=['GET'])
def reports():
context = flask.request.environ.get('context')
domains = central_api.count_domains(context)
records = central_api.count_records(context)
tenants = central_api.count_tenants(context)
return flask.jsonify(domains=domains, records=records, tenants=tenants)
@blueprint.route('/reports/counts/domains', methods=['GET'])
def reports_domains():
context = flask.request.environ.get('context')
count = central_api.count_domains(context)
return flask.jsonify(domains=count)
@blueprint.route('/reports/counts/records', methods=['GET'])
def reports_records():
context = flask.request.environ.get('context')
count = central_api.count_records(context)
return flask.jsonify(records=count)
@blueprint.route('/reports/counts/tenants', methods=['GET'])
def reports_tenants():
context = flask.request.environ.get('context')
count = central_api.count_tenants(context)
return flask.jsonify(tenants=count)
| apache-2.0 | Python |
91f7e96758f048f1473de11d8f6a919be8b98e23 | make filename of rected unique and copy files that were imported correctly to central dir | ddsc/ddsc-worker | ddsc_worker/localsettings_example_with_comments.py | ddsc_worker/localsettings_example_with_comments.py | DATABASES = {
'default': {
'NAME': 'ddsc',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'USER': 'xxxx',
'PASSWORD': 'xxxxxxxx',
'HOST': 'xx.xx.xxx.xxx',
'PORT': '',
}
}
CASSANDRA = {
'servers': [
'xxx.xxx.xxx.xx:9160',
'xx.xxx.xxx.xx:9160',
'xx.xxx.xxx.xxx:9160',
],
'keyspace': 'ddsc',
'batch_size': 1000,
}
IMPORTER_PATH = {
'storage_base_path':
'/mnt/file/',
'rejected_file': # where to put the csv file when problem occurs
'rejected_csv/', #
'image': # where to put the image data, path will be created automatically
'images/',
'geotiff': # where to put the rs image data, path will be created automatically
'geo_tiff/',
'video': # where to put the video data, path will be created automatically
'video/',
'pdf': # where to put the pdf file, path will be created automatically
'pdf/',
'unrecognized': # where to put unrecognizable type of file, path will be created automatically
'unknown/',
'lmw':
'/mnt/ftp/lmw_ddsc/',
'accepted_file':
'accepted_file/',
}
IMPORTER_GEOSERVER = {
'geoserver_jar_pusher': # where is the geotif_pub.jar file
'/opt/ddsc-worker/ddsc_worker/geotif_pub.jar',
'geoserver_url':
'http://p-ddsc-map-d1:8080/geoserver',
'geoserver_username':
'admin',
'geoserver_password':
'geoserver',
}
LMW_URL = {
'url':
'https://www.rijkswaterstaat.nl/rws/opendata/meetdata/meetdata.zip',
}
SMTP = {
'host':
'10.10.10.110',
'port':
25,
'sender':
'no_reply@dijkdata.nl',
}
COMPENSATION = {
'csv_path':
'/mnt/file/xxxx'
}
| DATABASES = {
'default': {
'NAME': 'ddsc',
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'USER': 'xxxx',
'PASSWORD': 'xxxxxxxx',
'HOST': 'xx.xx.xxx.xxx',
'PORT': '',
}
}
CASSANDRA = {
'servers': [
'xxx.xxx.xxx.xx:9160',
'xx.xxx.xxx.xx:9160',
'xx.xxx.xxx.xxx:9160',
],
'keyspace': 'ddsc',
'batch_size': 1000,
}
IMPORTER_PATH = {
'storage_base_path':
'/mnt/file/',
'rejected_file': # where to put the csv file when problem occurs
'rejected_csv/', #
'image': # where to put the image data, path will be created automatically
'images/',
'geotiff': # where to put the rs image data, path will be created automatically
'geo_tiff/',
'video': # where to put the video data, path will be created automatically
'video/',
'pdf': # where to put the pdf file, path will be created automatically
'pdf/',
'unrecognized': # where to put unrecognizable type of file, path will be created automatically
'unknown/',
'lmw':
'/mnt/ftp/lmw_ddsc/',
}
IMPORTER_GEOSERVER = {
'geoserver_jar_pusher': # where is the geotif_pub.jar file
'/opt/ddsc-worker/ddsc_worker/geotif_pub.jar',
'geoserver_url':
'http://p-ddsc-map-d1:8080/geoserver',
'geoserver_username':
'admin',
'geoserver_password':
'geoserver',
}
LMW_URL = {
'url':
'https://www.rijkswaterstaat.nl/rws/opendata/meetdata/meetdata.zip',
}
SMTP = {
'host':
'10.10.10.110',
'port':
25,
'sender':
'no_reply@dijkdata.nl',
}
COMPENSATION = {
'csv_path':
'/mnt/file/xxxx'
}
| mit | Python |
c7ae246fce12748f9e6a3e7584e2ca045a1d9138 | add version variable | devsim/devsim,devsim/devsim,devsim/devsim | dist/__init__.py | dist/__init__.py | import sys
#TODO:
#https://stackoverflow.com/questions/6677424/how-do-i-import-variable-packages-in-python-like-using-variable-variables-i
#imported = getattr(__import__(package, fromlist=[name]), name)
if sys.version_info[0] == 2:
from devsim_py27 import *
from devsim_py27 import __version__
elif sys.version_info[0] == 3 and sys.version_info[1] == 6:
from .devsim_py36 import *
from .devsim_py36 import __version__
elif sys.version_info[0] == 3 and sys.version_info[1] == 7:
from .devsim_py37 import *
from .devsim_py37 import __version__
else:
raise ImportError('module not available for Python %d.%d please contact technical support' % sys.version_info[0:2])
| import sys
#TODO:
#https://stackoverflow.com/questions/6677424/how-do-i-import-variable-packages-in-python-like-using-variable-variables-i
#imported = getattr(__import__(package, fromlist=[name]), name)
if sys.version_info[0] == 2:
from devsim_py27 import *
elif sys.version_info[0] == 3 and sys.version_info[1] == 6:
from .devsim_py36 import *
elif sys.version_info[0] == 3 and sys.version_info[1] == 7:
from .devsim_py37 import *
else:
raise ImportError('module not available for Python %d.%d please contact technical support' % sys.version_info[0:2])
| apache-2.0 | Python |
9caf6f3992c74613e0002f142982fce20f42ec4c | update tests that were not passing | mdietrichc2c/vertical-ngo,jorsea/vertical-ngo,yvaucher/vertical-ngo,jorsea/vertical-ngo | framework_agreement_sourcing/tests/test_check_sourcing.py | framework_agreement_sourcing/tests/test_check_sourcing.py | # Author: Leonardo Pistone
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp.tests.common import TransactionCase
class TestCheckSourcing(TransactionCase):
"""Check the _check_sourcing method of the source. """
def test_agreement_sourcing_without_agreement_is_not_sourced(self):
self.source.procurement_method = 'fw_agreement'
errors = self.source._check_sourcing()
self.assertEquals(1, len(errors))
self.assertIn('No Framework Agreement', errors[0])
def test_agreement_sourcing_with_running_agreement_is_sourced(self):
self.source.procurement_method = 'fw_agreement'
self.source.framework_agreement_id = self.Agreement.new({
'state': 'running'
})
self.assertEquals([], self.source._check_sourcing())
def test_other_sourcing_is_always_sourced(self):
self.source.procurement_method = 'other'
self.assertEquals([], self.source._check_sourcing())
def setUp(self):
"""Setup a source.
I use Model.new to get a model instance that is not saved to the
database, but has working methods.
"""
super(TestCheckSourcing, self).setUp()
Source = self.env['logistic.requisition.source']
self.PO = self.env['purchase.order']
self.PurcReq = self.env['purchase.requisition']
self.Agreement = self.env['framework.agreement']
self.source = Source.new()
| # Author: Leonardo Pistone
# Copyright 2014 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from openerp.tests.common import TransactionCase
class TestCheckSourcing(TransactionCase):
"""Check the _check_sourcing method of the source. """
def test_agreement_sourcing_without_po_is_not_sourced(self):
self.source.procurement_method = 'fw_agreement'
errors = self.source._check_sourcing()
self.assertEquals(1, len(errors))
self.assertIn('No Purchase Order Lines', errors[0])
def test_agreement_sourcing_with_po_is_sourced(self):
self.source.procurement_method = 'fw_agreement'
self.source._get_purchase_order_lines = lambda: self.PO.new()
self.assertEquals([], self.source._check_sourcing())
def test_other_sourcing_is_always_sourced(self):
self.source.procurement_method = 'other'
self.assertEquals([], self.source._check_sourcing())
def setUp(self):
"""Setup a source.
I use Model.new to get a model instance that is not saved to the
database, but has working methods.
"""
super(TestCheckSourcing, self).setUp()
Source = self.env['logistic.requisition.source']
self.PO = self.env['purchase.order']
self.PurcReq = self.env['purchase.requisition']
self.source = Source.new()
| agpl-3.0 | Python |
d16d715898bae287520841a5936217c6e53788e0 | Bump version to 3.5.0rc14 | platformio/platformio,platformio/platformio-core,platformio/platformio-core | platformio/__init__.py | platformio/__init__.py | # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (3, 5, "0rc14")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("An open source ecosystem for IoT development. "
"Cross-platform build system and library manager. "
"Continuous and IDE integration. "
"Arduino, ESP8266 and ARM mbed compatible")
__url__ = "http://platformio.org"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7, please upgrade Python.\n"
"Python 3 is not yet supported.\n")
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.exit(1)
| # Copyright (c) 2014-present PlatformIO <contact@platformio.org>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
VERSION = (3, 5, "0rc13")
__version__ = ".".join([str(s) for s in VERSION])
__title__ = "platformio"
__description__ = ("An open source ecosystem for IoT development. "
"Cross-platform build system and library manager. "
"Continuous and IDE integration. "
"Arduino, ESP8266 and ARM mbed compatible")
__url__ = "http://platformio.org"
__author__ = "Ivan Kravets"
__email__ = "me@ikravets.com"
__license__ = "Apache Software License"
__copyright__ = "Copyright 2014-present PlatformIO"
__apiurl__ = "https://api.platformio.org"
if sys.version_info < (2, 7, 0) or sys.version_info >= (3, 0, 0):
msg = ("PlatformIO Core v%s does not run under Python version %s.\n"
"Minimum supported version is 2.7, please upgrade Python.\n"
"Python 3 is not yet supported.\n")
sys.stderr.write(msg % (__version__, sys.version.split()[0]))
sys.exit(1)
| apache-2.0 | Python |
b115430447cc489e6cb83f7bfb954e72aef7927b | comment updated | scottpurdy/NAB,numenta/NAB,kimhungGCZ/combinedAL,scottpurdy/NAB,scottpurdy/NAB,BoltzmannBrain/NAB,kimhungGCZ/combinedAL,rhyolight/NAB,subutai/NAB,rhyolight/NAB,numenta/NAB,kimhungGCZ/combinedAL,breznak/NAB,rhyolight/NAB,BoltzmannBrain/NAB,numenta/NAB,subutai/NAB,breznak/NAB,breznak/NAB,numenta/NAB,subutai/NAB | nab/detectors/numenta/numentaTM_detector.py | nab/detectors/numenta/numentaTM_detector.py | # ----------------------------------------------------------------------
# Copyright (C) 2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import math
import simplejson as json
from nupic.algorithms import anomaly_likelihood
from nupic.frameworks.opf.common_models.cluster_params import (
getScalarMetricWithTimeOfDayAnomalyParams)
from nupic.frameworks.opf.modelfactory import ModelFactory
from nab.detectors.numenta.numenta_detector import NumentaDetector
class NumentaTMDetector(NumentaDetector):
"""
This detector is derived from NumentaDetector and uses HTM with a modified temporal memory implementation i.e "tm_cpp" instead of "cpp". It differs from its parent detector in temporal memory implementation and its parameters.
"""
def __init__(self, *args, **kwargs):
super(NumentaTMDetector, self).__init__(*args, **kwargs)
self.model = None
self.sensorParams = None
self.anomalyLikelihood = None
def initialize(self):
# Get config params, setting the RDSE resolution
rangePadding = abs(self.inputMax - self.inputMin) * 0.2
modelParams = getScalarMetricWithTimeOfDayAnomalyParams(
metricData=[0],
minVal=self.inputMin-rangePadding,
maxVal=self.inputMax+rangePadding,
minResolution=0.001,
tmImplementation="tm_cpp"
)["modelConfig"]
self._setupEncoderParams(
modelParams["modelParams"]["sensorParams"]["encoders"])
self.model = ModelFactory.create(modelParams)
self.model.enableInference({"predictedField": "value"})
# Initialize the anomaly likelihood object
numentaLearningPeriod = math.floor(self.probationaryPeriod / 2.0)
self.anomalyLikelihood = anomaly_likelihood.AnomalyLikelihood(
claLearningPeriod=numentaLearningPeriod,
estimationSamples=self.probationaryPeriod-numentaLearningPeriod,
reestimationPeriod=100
)
| # ----------------------------------------------------------------------
# Copyright (C) 2016, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU Affero Public License for more details.
#
# You should have received a copy of the GNU Affero Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
import os
import math
import simplejson as json
from nupic.algorithms import anomaly_likelihood
from nupic.frameworks.opf.common_models.cluster_params import (
getScalarMetricWithTimeOfDayAnomalyParams)
from nupic.frameworks.opf.modelfactory import ModelFactory
from nab.detectors.numenta.numenta_detector import NumentaDetector
class NumentaTMDetector(NumentaDetector):
"""
This detector is derived from NumentaDetector and uses HTM with a modified temporal memory implementation i.e "tm_cpp" instead of "cpp"
It differs from its parent detector in temporal memory implementation and its parameters.
"""
def __init__(self, *args, **kwargs):
super(NumentaTMDetector, self).__init__(*args, **kwargs)
self.model = None
self.sensorParams = None
self.anomalyLikelihood = None
def initialize(self):
# Get config params, setting the RDSE resolution
rangePadding = abs(self.inputMax - self.inputMin) * 0.2
modelParams = getScalarMetricWithTimeOfDayAnomalyParams(
metricData=[0],
minVal=self.inputMin-rangePadding,
maxVal=self.inputMax+rangePadding,
minResolution=0.001,
tmImplementation="tm_cpp"
)["modelConfig"]
self._setupEncoderParams(
modelParams["modelParams"]["sensorParams"]["encoders"])
self.model = ModelFactory.create(modelParams)
self.model.enableInference({"predictedField": "value"})
# Initialize the anomaly likelihood object
numentaLearningPeriod = math.floor(self.probationaryPeriod / 2.0)
self.anomalyLikelihood = anomaly_likelihood.AnomalyLikelihood(
claLearningPeriod=numentaLearningPeriod,
estimationSamples=self.probationaryPeriod-numentaLearningPeriod,
reestimationPeriod=100
)
| agpl-3.0 | Python |
dc67190ae855de30f0ee33f4d8b34462d44667e9 | Change URL scheme `user` to `users` | avinassh/nightreads,avinassh/nightreads | nightreads/urls.py | nightreads/urls.py | """nightreads URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from nightreads.user_manager import urls as user_manager_urls
urlpatterns = [
url(r'^users/', include(user_manager_urls, namespace='users')),
url(r'^admin/', admin.site.urls),
]
| """nightreads URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import include, url
from django.contrib import admin
from nightreads.user_manager import urls as user_manager_urls
urlpatterns = [
url(r'^user/', include(user_manager_urls, namespace='user')),
url(r'^admin/', admin.site.urls),
]
| mit | Python |
7b0f08235a6a27b3f9d6615c1211ddb6f9c8fea1 | update setup | DynaSlum/SateliteImaging,DynaSlum/SatelliteImaging,DynaSlum/SatelliteImaging,DynaSlum/SateliteImaging | Python/setup.py | Python/setup.py | from __future__ import absolute_import, print_function
import io
import os
from setuptools import find_packages, setup
def read(*names, **kwargs):
with io.open(
os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8'),
) as fp:
return fp.read()
readme = open('README.md').read()
setup(
name='satsense',
version='0.1.0',
url='https://github.com/DynaSlum/SateliteImaging',
license='Apache Software License',
author='Berend Weel, Elena Ranguelova',
tests_require=['pytest'],
install_requires=[
'gdal>=2.1.3',
'numpy>=1.12.1'
],
extras_require={
'test': ['pytest', 'pytest-flake8', 'pytest-cov'],
},
author_email='b.weel@esiencecenter.nl',
description=(
'Library for multispectral remote imaging.'
),
long_description=readme,
packages=find_packages(),
include_package_data=True,
platforms='any',
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| from __future__ import absolute_import, print_function
import io
import os
from setuptools import find_packages, setup
def read(*names, **kwargs):
with io.open(
os.path.join(os.path.dirname(__file__), *names),
encoding=kwargs.get('encoding', 'utf8'),
) as fp:
return fp.read()
readme = open('README.md').read()
setup(
name='satsense',
version='0.1.0',
url='https://github.com/DynaSlum/SateliteImaging',
license='Apache Software License',
author='Berend Weel',
tests_require=['pytest'],
install_requires=[
'gdal>=2.1.3',
'numpy>=1.12.1'
],
extras_require={
'test': ['pytest', 'pytest-flake8', 'pytest-cov'],
},
author_email='b.weel@esiencecenter.nl',
description=(
'Library for multispectral remote imaging.'
),
long_description=readme,
packages=find_packages(),
include_package_data=True,
platforms='any',
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 3.5',
'Development Status :: 3 - Alpha',
'Natural Language :: English',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Topic :: Software Development :: Libraries :: Python Modules',
]
)
| apache-2.0 | Python |
dee8c9791947c517cac6563234368362cbe5b1d7 | bump version | nluedtke/brochat-bot | common.py | common.py | VERSION_YEAR = 2019
VERSION_MONTH = 4
VERSION_DAY = 28
VERSION_REV = 0
whos_in = None
twitter = None
users = {}
twilio_client = None
ARGS = {}
smmry_api_key = None
pubg_api_key = None
pubg_api = None
# Variable hold trumps last tweet id
last_id = 0
trump_chance_roll_rdy = False
# Runtime stats
duels_conducted = 0
items_awarded = 0
trump_tweets_seen = 0
missed_trumps = 0
first = True
# Shot_duel acceptance and active
accepted = False
shot_duel_running = False
vict_name = ""
# News handles to pull from
news_handles = ['mashable', 'cnnbrk', 'whitehouse', 'cnn', 'nytimes',
'foxnews', 'reuters', 'npr', 'usatoday', 'cbsnews',
'abc', 'washingtonpost', 'msnbc', 'ap', 'aphealthscience',
'lifehacker', 'cnnnewsroom', 'theonion']
# Delays for Newsfeed and Check_trump, These are in minutes
# remember that news_del is fuzzed + (0-10)
trump_del = 25
news_del = 55
# Location of db.json and tokens.config
data_dir = "/data"
# Create/Load Local Database
db_file = '{}/db.json'.format(data_dir)
db = {}
# Global toggle for news feed
NEWS_FEED_ON = False
NEWS_FEED_CREATED = False
async def trigger_social(ctx):
"""Triggers a social """
for m in ctx.bot.get_all_members():
if m.display_name != 'brochat-bot' and m.status == 'online':
add_drink(m.display_name)
glass = ":tumbler_glass:"
await ctx.bot.say("Ah shit that's three in a row! ITS A SOCIAL! SHOTS! "
"SHOTS! SHOTS!\n{}{}{}".format(glass, glass, glass))
def add_drink(user):
"""
Adds a drink for the user.
:param user: users display name
:return:
"""
if user not in users:
users[user] = {}
if "drinks_owed" in users[user]:
users[user]['drinks_owed'] += 1
else:
users[user]['drinks_owed'] = 1
return users[user]['drinks_owed']
| VERSION_YEAR = 2019
VERSION_MONTH = 3
VERSION_DAY = 3
VERSION_REV = 0
whos_in = None
twitter = None
users = {}
twilio_client = None
ARGS = {}
smmry_api_key = None
pubg_api_key = None
pubg_api = None
# Variable hold trumps last tweet id
last_id = 0
trump_chance_roll_rdy = False
# Runtime stats
duels_conducted = 0
items_awarded = 0
trump_tweets_seen = 0
missed_trumps = 0
first = True
# Shot_duel acceptance and active
accepted = False
shot_duel_running = False
vict_name = ""
# News handles to pull from
news_handles = ['mashable', 'cnnbrk', 'whitehouse', 'cnn', 'nytimes',
'foxnews', 'reuters', 'npr', 'usatoday', 'cbsnews',
'abc', 'washingtonpost', 'msnbc', 'ap', 'aphealthscience',
'lifehacker', 'cnnnewsroom', 'theonion']
# Delays for Newsfeed and Check_trump, These are in minutes
# remember that news_del is fuzzed + (0-10)
trump_del = 25
news_del = 55
# Location of db.json and tokens.config
data_dir = "/data"
# Create/Load Local Database
db_file = '{}/db.json'.format(data_dir)
db = {}
# Global toggle for news feed
NEWS_FEED_ON = False
NEWS_FEED_CREATED = False
async def trigger_social(ctx):
"""Triggers a social """
for m in ctx.bot.get_all_members():
if m.display_name != 'brochat-bot' and m.status == 'online':
add_drink(m.display_name)
glass = ":tumbler_glass:"
await ctx.bot.say("Ah shit that's three in a row! ITS A SOCIAL! SHOTS! "
"SHOTS! SHOTS!\n{}{}{}".format(glass, glass, glass))
def add_drink(user):
"""
Adds a drink for the user.
:param user: users display name
:return:
"""
if user not in users:
users[user] = {}
if "drinks_owed" in users[user]:
users[user]['drinks_owed'] += 1
else:
users[user]['drinks_owed'] = 1
return users[user]['drinks_owed']
| mit | Python |
28770cf4d0995697f7b2c8edad7a56fb8aeabea5 | Send email to client working | shahariarrabby/Mail_Server | Sendy.py | Sendy.py | # coding: utf-8
# ! /usr/bin/python
__author__ = 'Shahariar Rabby'
# This will read details and send email to clint
# # Sendy
# ### Importing Send mail file
# In[6]:
from Sendmail import *
# ** Take user email, text plan massage, HTML file **
# In[7]:
TO_EMAIL = raw_input("Enter reciver email : ") #Taking Reciver email as input
subject = raw_input("Enter Mail Subject : ") #taking mail subject
text = raw_input("Enter Plain message(or html format) : ") #Taking plane massage as input
filename = raw_input('Enter file name with location(if any) : ')
try:
file = open(filename,'r') #reading HTML format message
html = file.read()
except:
html = text
# **Calling send mail and sending mail **
# In[8]:
Send_Mail(login(),TO_EMAIL=TO_EMAIL,text=text,html=html,subject=subject)
| # coding: utf-8
# ! /usr/bin/python
__author__ = 'Shahariar Rabby'
# # Sendy
# ### Importing Send mail file
# In[6]:
from Sendmail import *
# ** Take user email, text plan massage, HTML file **
# In[7]:
TO_EMAIL = raw_input("Enter reciver email : ") #Taking Reciver email as input
subject = raw_input("Enter Mail Subject : ") #taking mail subject
text = raw_input("Enter Plain message(or html format) : ") #Taking plane massage as input
filename = raw_input('Enter file name with location(if any) : ')
try:
file = open(filename,'r') #reading HTML format message
html = file.read()
except:
html = text
# **Calling send mail and sending mail **
# In[8]:
Send_Mail(login(),TO_EMAIL=TO_EMAIL,text=text,html=html,subject=subject)
| mit | Python |
48a63de70673ce96ead7a81709fdd81e872a847d | Revert changes | joshloyal/scikit-learn,alexeyum/scikit-learn,abhishekgahlot/scikit-learn,RomainBrault/scikit-learn,murali-munna/scikit-learn,RPGOne/scikit-learn,cauchycui/scikit-learn,Sentient07/scikit-learn,shahankhatch/scikit-learn,lin-credible/scikit-learn,Achuth17/scikit-learn,eg-zhang/scikit-learn,nrhine1/scikit-learn,jblackburne/scikit-learn,sanketloke/scikit-learn,q1ang/scikit-learn,sanketloke/scikit-learn,IssamLaradji/scikit-learn,YinongLong/scikit-learn,IshankGulati/scikit-learn,theoryno3/scikit-learn,pompiduskus/scikit-learn,rohanp/scikit-learn,HolgerPeters/scikit-learn,B3AU/waveTree,bikong2/scikit-learn,eg-zhang/scikit-learn,nvoron23/scikit-learn,Jimmy-Morzaria/scikit-learn,hsuantien/scikit-learn,justincassidy/scikit-learn,PrashntS/scikit-learn,fredhusser/scikit-learn,mhue/scikit-learn,mlyundin/scikit-learn,idlead/scikit-learn,ltiao/scikit-learn,jereze/scikit-learn,lesteve/scikit-learn,hrjn/scikit-learn,rrohan/scikit-learn,frank-tancf/scikit-learn,fabianp/scikit-learn,deepesch/scikit-learn,hitszxp/scikit-learn,luo66/scikit-learn,stylianos-kampakis/scikit-learn,anirudhjayaraman/scikit-learn,yask123/scikit-learn,abhishekkrthakur/scikit-learn,JPFrancoia/scikit-learn,DonBeo/scikit-learn,B3AU/waveTree,ldirer/scikit-learn,phdowling/scikit-learn,bigdataelephants/scikit-learn,beepee14/scikit-learn,thilbern/scikit-learn,0asa/scikit-learn,michigraber/scikit-learn,Achuth17/scikit-learn,sgenoud/scikit-learn,appapantula/scikit-learn,Vimos/scikit-learn,manhhomienbienthuy/scikit-learn,rahul-c1/scikit-learn,manhhomienbienthuy/scikit-learn,xiaoxiamii/scikit-learn,bnaul/scikit-learn,fbagirov/scikit-learn,petosegan/scikit-learn,ndingwall/scikit-learn,billy-inn/scikit-learn,thilbern/scikit-learn,TomDLT/scikit-learn,vortex-ape/scikit-learn,akionakamura/scikit-learn,MartinSavc/scikit-learn,costypetrisor/scikit-learn,cainiaocome/scikit-learn,mayblue9/scikit-learn,clemkoa/scikit-learn,aflaxman/scikit-learn,AlexandreAbraham/scikit-learn,kaichogami/scikit-learn,loli/sklearn-ensembletrees,ningchi/scikit-learn,YinongLong/scikit-learn,aminert/scikit-learn,djgagne/scikit-learn,ngoix/OCRF,imaculate/scikit-learn,tomlof/scikit-learn,hainm/scikit-learn,billy-inn/scikit-learn,alexeyum/scikit-learn,madjelan/scikit-learn,arahuja/scikit-learn,cainiaocome/scikit-learn,devanshdalal/scikit-learn,kmike/scikit-learn,gclenaghan/scikit-learn,Srisai85/scikit-learn,tdhopper/scikit-learn,thilbern/scikit-learn,waterponey/scikit-learn,NelisVerhoef/scikit-learn,ashhher3/scikit-learn,fabianp/scikit-learn,nikitasingh981/scikit-learn,bigdataelephants/scikit-learn,abhishekgahlot/scikit-learn,mlyundin/scikit-learn,fabioticconi/scikit-learn,vinayak-mehta/scikit-learn,fyffyt/scikit-learn,beepee14/scikit-learn,vivekmishra1991/scikit-learn,xuewei4d/scikit-learn,terkkila/scikit-learn,hainm/scikit-learn,stylianos-kampakis/scikit-learn,pratapvardhan/scikit-learn,hdmetor/scikit-learn,ndingwall/scikit-learn,larsmans/scikit-learn,procoder317/scikit-learn,shyamalschandra/scikit-learn,JPFrancoia/scikit-learn,Titan-C/scikit-learn,aabadie/scikit-learn,LiaoPan/scikit-learn,trungnt13/scikit-learn,poryfly/scikit-learn,spallavolu/scikit-learn,hdmetor/scikit-learn,rishikksh20/scikit-learn,robin-lai/scikit-learn,lbishal/scikit-learn,lenovor/scikit-learn,depet/scikit-learn,cybernet14/scikit-learn,mrshu/scikit-learn,UNR-AERIAL/scikit-learn,hugobowne/scikit-learn,equialgo/scikit-learn,liberatorqjw/scikit-learn,aewhatley/scikit-learn,sinhrks/scikit-learn,potash/scikit-learn,pypot/scikit-learn,DSLituiev/scikit-learn,macks22/scikit-learn,mjgrav2001/scikit-learn,trankmichael/scikit-learn,pv/scikit-learn,jm-begon/scikit-learn,devanshdalal/scikit-learn,yanlend/scikit-learn,Fireblend/scikit-learn,espg/scikit-learn,fbagirov/scikit-learn,kmike/scikit-learn,xzh86/scikit-learn,CVML/scikit-learn,kaichogami/scikit-learn,pv/scikit-learn,jorik041/scikit-learn,dsquareindia/scikit-learn,huobaowangxi/scikit-learn,samzhang111/scikit-learn,billy-inn/scikit-learn,djgagne/scikit-learn,3manuek/scikit-learn,RPGOne/scikit-learn,hsuantien/scikit-learn,luo66/scikit-learn,appapantula/scikit-learn,wazeerzulfikar/scikit-learn,bthirion/scikit-learn,siutanwong/scikit-learn,aetilley/scikit-learn,vermouthmjl/scikit-learn,olologin/scikit-learn,rahul-c1/scikit-learn,khkaminska/scikit-learn,henrykironde/scikit-learn,pianomania/scikit-learn,NelisVerhoef/scikit-learn,depet/scikit-learn,kjung/scikit-learn,yask123/scikit-learn,walterreade/scikit-learn,hitszxp/scikit-learn,ivannz/scikit-learn,alvarofierroclavero/scikit-learn,shahankhatch/scikit-learn,bhargav/scikit-learn,krez13/scikit-learn,mhdella/scikit-learn,saiwing-yeung/scikit-learn,r-mart/scikit-learn,ivannz/scikit-learn,krez13/scikit-learn,xwolf12/scikit-learn,Garrett-R/scikit-learn,glennq/scikit-learn,OshynSong/scikit-learn,wzbozon/scikit-learn,yanlend/scikit-learn,procoder317/scikit-learn,rsivapr/scikit-learn,pnedunuri/scikit-learn,mhue/scikit-learn,anntzer/scikit-learn,eg-zhang/scikit-learn,anntzer/scikit-learn,ankurankan/scikit-learn,deepesch/scikit-learn,maheshakya/scikit-learn,kevin-intel/scikit-learn,belltailjp/scikit-learn,Djabbz/scikit-learn,iismd17/scikit-learn,joernhees/scikit-learn,Titan-C/scikit-learn,themrmax/scikit-learn,anirudhjayaraman/scikit-learn,RPGOne/scikit-learn,larsmans/scikit-learn,ky822/scikit-learn,shangwuhencc/scikit-learn,nrhine1/scikit-learn,ivannz/scikit-learn,466152112/scikit-learn,jlegendary/scikit-learn,sonnyhu/scikit-learn,qifeigit/scikit-learn,davidgbe/scikit-learn,LiaoPan/scikit-learn,jkarnows/scikit-learn,Lawrence-Liu/scikit-learn,robbymeals/scikit-learn,btabibian/scikit-learn,voxlol/scikit-learn,michigraber/scikit-learn,jayflo/scikit-learn,mhue/scikit-learn,eickenberg/scikit-learn,fbagirov/scikit-learn,dingocuster/scikit-learn,murali-munna/scikit-learn,nesterione/scikit-learn,RayMick/scikit-learn,xyguo/scikit-learn,jorik041/scikit-learn,lesteve/scikit-learn,MartinSavc/scikit-learn,pkruskal/scikit-learn,RomainBrault/scikit-learn,aewhatley/scikit-learn,nvoron23/scikit-learn,toastedcornflakes/scikit-learn,treycausey/scikit-learn,ningchi/scikit-learn,aflaxman/scikit-learn,PrashntS/scikit-learn,untom/scikit-learn,nmayorov/scikit-learn,glouppe/scikit-learn,belltailjp/scikit-learn,chrisburr/scikit-learn,ky822/scikit-learn,NelisVerhoef/scikit-learn,PrashntS/scikit-learn,xavierwu/scikit-learn,pythonvietnam/scikit-learn,bikong2/scikit-learn,ndingwall/scikit-learn,cl4rke/scikit-learn,terkkila/scikit-learn,JeanKossaifi/scikit-learn,pianomania/scikit-learn,robbymeals/scikit-learn,lazywei/scikit-learn,Akshay0724/scikit-learn,OshynSong/scikit-learn,zuku1985/scikit-learn,xwolf12/scikit-learn,AlexRobson/scikit-learn,kmike/scikit-learn,tosolveit/scikit-learn,scikit-learn/scikit-learn,mayblue9/scikit-learn,ChanderG/scikit-learn,jjx02230808/project0223,pythonvietnam/scikit-learn,petosegan/scikit-learn,dingocuster/scikit-learn,Nyker510/scikit-learn,ngoix/OCRF,MartinDelzant/scikit-learn,ClimbsRocks/scikit-learn,fzalkow/scikit-learn,vshtanko/scikit-learn,mattilyra/scikit-learn,jmetzen/scikit-learn,xubenben/scikit-learn,nelson-liu/scikit-learn,procoder317/scikit-learn,luo66/scikit-learn,appapantula/scikit-learn,mjudsp/Tsallis,jereze/scikit-learn,aetilley/scikit-learn,espg/scikit-learn,shusenl/scikit-learn,henridwyer/scikit-learn,mfjb/scikit-learn,zorroblue/scikit-learn,vybstat/scikit-learn,xubenben/scikit-learn,jorik041/scikit-learn,sergeyf/scikit-learn,vibhorag/scikit-learn,poryfly/scikit-learn,altairpearl/scikit-learn,Jimmy-Morzaria/scikit-learn,vinayak-mehta/scikit-learn,yunfeilu/scikit-learn,AlexandreAbraham/scikit-learn,pianomania/scikit-learn,akionakamura/scikit-learn,h2educ/scikit-learn,Nyker510/scikit-learn,kagayakidan/scikit-learn,JosmanPS/scikit-learn,kmike/scikit-learn,fabioticconi/scikit-learn,rohanp/scikit-learn,chrsrds/scikit-learn,khkaminska/scikit-learn,aflaxman/scikit-learn,huzq/scikit-learn,Lawrence-Liu/scikit-learn,fengzhyuan/scikit-learn,LohithBlaze/scikit-learn,ndingwall/scikit-learn,cdegroc/scikit-learn,carrillo/scikit-learn,lenovor/scikit-learn,zaxtax/scikit-learn,simon-pepin/scikit-learn,ky822/scikit-learn,ogrisel/scikit-learn,ChanChiChoi/scikit-learn,jjx02230808/project0223,dsullivan7/scikit-learn,lbishal/scikit-learn,sarahgrogan/scikit-learn,ilyes14/scikit-learn,mojoboss/scikit-learn,btabibian/scikit-learn,jseabold/scikit-learn,dhruv13J/scikit-learn,HolgerPeters/scikit-learn,ltiao/scikit-learn,jayflo/scikit-learn,olologin/scikit-learn,treycausey/scikit-learn,mrshu/scikit-learn,alvarofierroclavero/scikit-learn,vermouthmjl/scikit-learn,tawsifkhan/scikit-learn,ephes/scikit-learn,massmutual/scikit-learn,joernhees/scikit-learn,jakirkham/scikit-learn,lucidfrontier45/scikit-learn,mxjl620/scikit-learn,jjx02230808/project0223,cwu2011/scikit-learn,TomDLT/scikit-learn,tomlof/scikit-learn,sumspr/scikit-learn,anurag313/scikit-learn,pnedunuri/scikit-learn,mayblue9/scikit-learn,kylerbrown/scikit-learn,justincassidy/scikit-learn,ivannz/scikit-learn,smartscheduling/scikit-learn-categorical-tree,rvraghav93/scikit-learn,jm-begon/scikit-learn,gotomypc/scikit-learn,moutai/scikit-learn,AlexRobson/scikit-learn,Vimos/scikit-learn,jseabold/scikit-learn,wzbozon/scikit-learn,schets/scikit-learn,zuku1985/scikit-learn,hsiaoyi0504/scikit-learn,IndraVikas/scikit-learn,Aasmi/scikit-learn,f3r/scikit-learn,fengzhyuan/scikit-learn,xubenben/scikit-learn,lin-credible/scikit-learn,cauchycui/scikit-learn,arjoly/scikit-learn,shyamalschandra/scikit-learn,amueller/scikit-learn,aewhatley/scikit-learn,jm-begon/scikit-learn,larsmans/scikit-learn,quheng/scikit-learn,liyu1990/sklearn,schets/scikit-learn,rvraghav93/scikit-learn,ogrisel/scikit-learn,Myasuka/scikit-learn,hitszxp/scikit-learn,liberatorqjw/scikit-learn,yyjiang/scikit-learn,alvarofierroclavero/scikit-learn,dsquareindia/scikit-learn,rrohan/scikit-learn,lazywei/scikit-learn,ycaihua/scikit-learn,arjoly/scikit-learn,dsullivan7/scikit-learn,Obus/scikit-learn,LohithBlaze/scikit-learn,ngoix/OCRF,macks22/scikit-learn,ClimbsRocks/scikit-learn,wazeerzulfikar/scikit-learn,mjudsp/Tsallis,nmayorov/scikit-learn,NunoEdgarGub1/scikit-learn,fyffyt/scikit-learn,jseabold/scikit-learn,MatthieuBizien/scikit-learn,cl4rke/scikit-learn,rexshihaoren/scikit-learn,0x0all/scikit-learn,r-mart/scikit-learn,abhishekgahlot/scikit-learn,davidgbe/scikit-learn,adamgreenhall/scikit-learn,mblondel/scikit-learn,equialgo/scikit-learn,kashif/scikit-learn,ashhher3/scikit-learn,kashif/scikit-learn,B3AU/waveTree,russel1237/scikit-learn,kevin-intel/scikit-learn,pkruskal/scikit-learn,wazeerzulfikar/scikit-learn,mrshu/scikit-learn,vybstat/scikit-learn,maheshakya/scikit-learn,walterreade/scikit-learn,AnasGhrab/scikit-learn,voxlol/scikit-learn,NunoEdgarGub1/scikit-learn,Lawrence-Liu/scikit-learn,UNR-AERIAL/scikit-learn,manashmndl/scikit-learn,yonglehou/scikit-learn,henrykironde/scikit-learn,mfjb/scikit-learn,mlyundin/scikit-learn,kashif/scikit-learn,zorroblue/scikit-learn,hugobowne/scikit-learn,tmhm/scikit-learn,elkingtonmcb/scikit-learn,evgchz/scikit-learn,YinongLong/scikit-learn,Aasmi/scikit-learn,JsNoNo/scikit-learn,ankurankan/scikit-learn,ycaihua/scikit-learn,bnaul/scikit-learn,mugizico/scikit-learn,akionakamura/scikit-learn,B3AU/waveTree,MechCoder/scikit-learn,mikebenfield/scikit-learn,h2educ/scikit-learn,costypetrisor/scikit-learn,Adai0808/scikit-learn,xyguo/scikit-learn,spallavolu/scikit-learn,Nyker510/scikit-learn,potash/scikit-learn,lazywei/scikit-learn,MohammedWasim/scikit-learn,trungnt13/scikit-learn,jorge2703/scikit-learn,amueller/scikit-learn,0asa/scikit-learn,AlexanderFabisch/scikit-learn,kjung/scikit-learn,devanshdalal/scikit-learn,heli522/scikit-learn,rajat1994/scikit-learn,raghavrv/scikit-learn,thientu/scikit-learn,dhruv13J/scikit-learn,shahankhatch/scikit-learn,ephes/scikit-learn,lucidfrontier45/scikit-learn,alexsavio/scikit-learn,sanketloke/scikit-learn,ZenDevelopmentSystems/scikit-learn,huzq/scikit-learn,zorojean/scikit-learn,jaidevd/scikit-learn,toastedcornflakes/scikit-learn,wazeerzulfikar/scikit-learn,sgenoud/scikit-learn,manhhomienbienthuy/scikit-learn,nrhine1/scikit-learn,jmetzen/scikit-learn,tdhopper/scikit-learn,wanggang3333/scikit-learn,IshankGulati/scikit-learn,lucidfrontier45/scikit-learn,waterponey/scikit-learn,kylerbrown/scikit-learn,plissonf/scikit-learn,jorge2703/scikit-learn,wanggang3333/scikit-learn,ahoyosid/scikit-learn,betatim/scikit-learn,hlin117/scikit-learn,mikebenfield/scikit-learn,fabianp/scikit-learn,tosolveit/scikit-learn,mayblue9/scikit-learn,hitszxp/scikit-learn,lucidfrontier45/scikit-learn,DSLituiev/scikit-learn,jm-begon/scikit-learn,abimannans/scikit-learn,sarahgrogan/scikit-learn,Sentient07/scikit-learn,toastedcornflakes/scikit-learn,MatthieuBizien/scikit-learn,rohanp/scikit-learn,meduz/scikit-learn,ankurankan/scikit-learn,Fireblend/scikit-learn,vshtanko/scikit-learn,treycausey/scikit-learn,vinayak-mehta/scikit-learn,andrewnc/scikit-learn,dhruv13J/scikit-learn,russel1237/scikit-learn,dingocuster/scikit-learn,abimannans/scikit-learn,madjelan/scikit-learn,jakobworldpeace/scikit-learn,Windy-Ground/scikit-learn,jaidevd/scikit-learn,wzbozon/scikit-learn,sinhrks/scikit-learn,jzt5132/scikit-learn,idlead/scikit-learn,yonglehou/scikit-learn,hlin117/scikit-learn,Akshay0724/scikit-learn,btabibian/scikit-learn,0x0all/scikit-learn,waterponey/scikit-learn,depet/scikit-learn,glemaitre/scikit-learn,shangwuhencc/scikit-learn,MechCoder/scikit-learn,idlead/scikit-learn,robbymeals/scikit-learn,jakirkham/scikit-learn,ngoix/OCRF,eickenberg/scikit-learn,beepee14/scikit-learn,CforED/Machine-Learning,ashhher3/scikit-learn,gclenaghan/scikit-learn,jjx02230808/project0223,liyu1990/sklearn,clemkoa/scikit-learn,aminert/scikit-learn,vigilv/scikit-learn,yask123/scikit-learn,samuel1208/scikit-learn,PatrickOReilly/scikit-learn,nomadcube/scikit-learn,RayMick/scikit-learn,giorgiop/scikit-learn,qifeigit/scikit-learn,mehdidc/scikit-learn,saiwing-yeung/scikit-learn,IssamLaradji/scikit-learn,simon-pepin/scikit-learn,lesteve/scikit-learn,Nyker510/scikit-learn,pompiduskus/scikit-learn,sgenoud/scikit-learn,IshankGulati/scikit-learn,ogrisel/scikit-learn,joernhees/scikit-learn,RPGOne/scikit-learn,amueller/scikit-learn,luo66/scikit-learn,Clyde-fare/scikit-learn,hdmetor/scikit-learn,hlin117/scikit-learn,hsuantien/scikit-learn,JosmanPS/scikit-learn,kagayakidan/scikit-learn,pratapvardhan/scikit-learn,yunfeilu/scikit-learn,xiaoxiamii/scikit-learn,fzalkow/scikit-learn,icdishb/scikit-learn,xiaoxiamii/scikit-learn,massmutual/scikit-learn,glouppe/scikit-learn,mfjb/scikit-learn,bhargav/scikit-learn,pratapvardhan/scikit-learn,Vimos/scikit-learn,DonBeo/scikit-learn,CVML/scikit-learn,pv/scikit-learn,DSLituiev/scikit-learn,fbagirov/scikit-learn,russel1237/scikit-learn,elkingtonmcb/scikit-learn,IndraVikas/scikit-learn,mjudsp/Tsallis,jpautom/scikit-learn,harshaneelhg/scikit-learn,jlegendary/scikit-learn,hugobowne/scikit-learn,henridwyer/scikit-learn,lenovor/scikit-learn,AlexRobson/scikit-learn,clemkoa/scikit-learn,mjgrav2001/scikit-learn,murali-munna/scikit-learn,adamgreenhall/scikit-learn,rajat1994/scikit-learn,scikit-learn/scikit-learn,espg/scikit-learn,yyjiang/scikit-learn,PatrickChrist/scikit-learn,larsmans/scikit-learn,poryfly/scikit-learn,gotomypc/scikit-learn,pnedunuri/scikit-learn,BiaDarkia/scikit-learn,mwv/scikit-learn,mehdidc/scikit-learn,glemaitre/scikit-learn,fzalkow/scikit-learn,zaxtax/scikit-learn,arjoly/scikit-learn,0x0all/scikit-learn,AnasGhrab/scikit-learn,mjudsp/Tsallis,pratapvardhan/scikit-learn,Windy-Ground/scikit-learn,ishanic/scikit-learn,zuku1985/scikit-learn,Srisai85/scikit-learn,fabioticconi/scikit-learn,shusenl/scikit-learn,pompiduskus/scikit-learn,arabenjamin/scikit-learn,ankurankan/scikit-learn,costypetrisor/scikit-learn,shenzebang/scikit-learn,rvraghav93/scikit-learn,bthirion/scikit-learn,jereze/scikit-learn,f3r/scikit-learn,florian-f/sklearn,CVML/scikit-learn,hsiaoyi0504/scikit-learn,mxjl620/scikit-learn,betatim/scikit-learn,vibhorag/scikit-learn,q1ang/scikit-learn,sumspr/scikit-learn,tawsifkhan/scikit-learn,arabenjamin/scikit-learn,hrjn/scikit-learn,arahuja/scikit-learn,PatrickChrist/scikit-learn,kaichogami/scikit-learn,rahul-c1/scikit-learn,frank-tancf/scikit-learn,nelson-liu/scikit-learn,liangz0707/scikit-learn,maheshakya/scikit-learn,dsquareindia/scikit-learn,ElDeveloper/scikit-learn,nikitasingh981/scikit-learn,tawsifkhan/scikit-learn,Barmaley-exe/scikit-learn,Clyde-fare/scikit-learn,pkruskal/scikit-learn,sinhrks/scikit-learn,carrillo/scikit-learn,loli/semisupervisedforests,sergeyf/scikit-learn,rishikksh20/scikit-learn,meduz/scikit-learn,yanlend/scikit-learn,sarahgrogan/scikit-learn,AIML/scikit-learn,rahuldhote/scikit-learn,davidgbe/scikit-learn,dhruv13J/scikit-learn,etkirsch/scikit-learn,murali-munna/scikit-learn,pythonvietnam/scikit-learn,vortex-ape/scikit-learn,mikebenfield/scikit-learn,hsiaoyi0504/scikit-learn,ahoyosid/scikit-learn,sumspr/scikit-learn,mojoboss/scikit-learn,bthirion/scikit-learn,abhishekkrthakur/scikit-learn,jakobworldpeace/scikit-learn,JsNoNo/scikit-learn,fengzhyuan/scikit-learn,wanggang3333/scikit-learn,tmhm/scikit-learn,samuel1208/scikit-learn,zorroblue/scikit-learn,aflaxman/scikit-learn,CforED/Machine-Learning,jmschrei/scikit-learn,rsivapr/scikit-learn,quheng/scikit-learn,rahuldhote/scikit-learn,f3r/scikit-learn,shangwuhencc/scikit-learn,mhdella/scikit-learn,MatthieuBizien/scikit-learn,xuewei4d/scikit-learn,mattilyra/scikit-learn,bnaul/scikit-learn,Vimos/scikit-learn,quheng/scikit-learn,treycausey/scikit-learn,anntzer/scikit-learn,mhdella/scikit-learn,3manuek/scikit-learn,3manuek/scikit-learn,kmike/scikit-learn,fzalkow/scikit-learn,TomDLT/scikit-learn,mfjb/scikit-learn,cwu2011/scikit-learn,PatrickChrist/scikit-learn,nesterione/scikit-learn,alexsavio/scikit-learn,466152112/scikit-learn,Akshay0724/scikit-learn,YinongLong/scikit-learn,anirudhjayaraman/scikit-learn,Obus/scikit-learn,ElDeveloper/scikit-learn,herilalaina/scikit-learn,TomDLT/scikit-learn,heli522/scikit-learn,rrohan/scikit-learn,pythonvietnam/scikit-learn,mwv/scikit-learn,sinhrks/scikit-learn,liberatorqjw/scikit-learn,lucidfrontier45/scikit-learn,anntzer/scikit-learn,simon-pepin/scikit-learn,cwu2011/scikit-learn,mxjl620/scikit-learn,MartinSavc/scikit-learn,DonBeo/scikit-learn,voxlol/scikit-learn,roxyboy/scikit-learn,MartinDelzant/scikit-learn,simon-pepin/scikit-learn,B3AU/waveTree,Jimmy-Morzaria/scikit-learn,petosegan/scikit-learn,cybernet14/scikit-learn,mehdidc/scikit-learn,roxyboy/scikit-learn,altairpearl/scikit-learn,chrisburr/scikit-learn,nomadcube/scikit-learn,shahankhatch/scikit-learn,mikebenfield/scikit-learn,qifeigit/scikit-learn,anurag313/scikit-learn,JPFrancoia/scikit-learn,MatthieuBizien/scikit-learn,cainiaocome/scikit-learn,fredhusser/scikit-learn,IssamLaradji/scikit-learn,hlin117/scikit-learn,smartscheduling/scikit-learn-categorical-tree,cdegroc/scikit-learn,zhenv5/scikit-learn,raghavrv/scikit-learn,jzt5132/scikit-learn,mrshu/scikit-learn,jkarnows/scikit-learn,toastedcornflakes/scikit-learn,JsNoNo/scikit-learn,tosolveit/scikit-learn,spallavolu/scikit-learn,sonnyhu/scikit-learn,yyjiang/scikit-learn,AlexandreAbraham/scikit-learn,mwv/scikit-learn,RomainBrault/scikit-learn,mblondel/scikit-learn,theoryno3/scikit-learn,icdishb/scikit-learn,samuel1208/scikit-learn,huobaowangxi/scikit-learn,altairpearl/scikit-learn,cwu2011/scikit-learn,zhenv5/scikit-learn,jblackburne/scikit-learn,cybernet14/scikit-learn,tosolveit/scikit-learn,zhenv5/scikit-learn,jmetzen/scikit-learn,rvraghav93/scikit-learn,glennq/scikit-learn,giorgiop/scikit-learn,aabadie/scikit-learn,themrmax/scikit-learn,AIML/scikit-learn,ankurankan/scikit-learn,evgchz/scikit-learn,cauchycui/scikit-learn,sonnyhu/scikit-learn,trankmichael/scikit-learn,abhishekkrthakur/scikit-learn,beepee14/scikit-learn,mugizico/scikit-learn,BiaDarkia/scikit-learn,ChanChiChoi/scikit-learn,rexshihaoren/scikit-learn,sonnyhu/scikit-learn,florian-f/sklearn,kevin-intel/scikit-learn,justincassidy/scikit-learn,vivekmishra1991/scikit-learn,alexsavio/scikit-learn,xubenben/scikit-learn,appapantula/scikit-learn,ycaihua/scikit-learn,shyamalschandra/scikit-learn,cl4rke/scikit-learn,JeanKossaifi/scikit-learn,arabenjamin/scikit-learn,ClimbsRocks/scikit-learn,liangz0707/scikit-learn,ishanic/scikit-learn,xavierwu/scikit-learn,arabenjamin/scikit-learn,rishikksh20/scikit-learn,PrashntS/scikit-learn,equialgo/scikit-learn,RayMick/scikit-learn,AnasGhrab/scikit-learn,mjudsp/Tsallis,tomlof/scikit-learn,evgchz/scikit-learn,thientu/scikit-learn,LohithBlaze/scikit-learn,tomlof/scikit-learn,ilyes14/scikit-learn,jpautom/scikit-learn,ElDeveloper/scikit-learn,mblondel/scikit-learn,nrhine1/scikit-learn,joshloyal/scikit-learn,pompiduskus/scikit-learn,yunfeilu/scikit-learn,russel1237/scikit-learn,Garrett-R/scikit-learn,0asa/scikit-learn,harshaneelhg/scikit-learn,mblondel/scikit-learn,evgchz/scikit-learn,RachitKansal/scikit-learn,rsivapr/scikit-learn,betatim/scikit-learn,vybstat/scikit-learn,mattgiguere/scikit-learn,quheng/scikit-learn,ssaeger/scikit-learn,henrykironde/scikit-learn,nomadcube/scikit-learn,khkaminska/scikit-learn,jmschrei/scikit-learn,mattgiguere/scikit-learn,rahuldhote/scikit-learn,shenzebang/scikit-learn,meduz/scikit-learn,pkruskal/scikit-learn,xuewei4d/scikit-learn,etkirsch/scikit-learn,samuel1208/scikit-learn,walterreade/scikit-learn,lesteve/scikit-learn,Barmaley-exe/scikit-learn,huobaowangxi/scikit-learn,pypot/scikit-learn,eickenberg/scikit-learn,deepesch/scikit-learn,potash/scikit-learn,CforED/Machine-Learning,iismd17/scikit-learn,wlamond/scikit-learn,moutai/scikit-learn,Achuth17/scikit-learn,wlamond/scikit-learn,huzq/scikit-learn,icdishb/scikit-learn,frank-tancf/scikit-learn,rishikksh20/scikit-learn,zihua/scikit-learn,aetilley/scikit-learn,Garrett-R/scikit-learn,massmutual/scikit-learn,untom/scikit-learn,Myasuka/scikit-learn,hrjn/scikit-learn,vigilv/scikit-learn,mattilyra/scikit-learn,justincassidy/scikit-learn,jpautom/scikit-learn,ngoix/OCRF,rajat1994/scikit-learn,imaculate/scikit-learn,loli/semisupervisedforests,AIML/scikit-learn,smartscheduling/scikit-learn-categorical-tree,ldirer/scikit-learn,robin-lai/scikit-learn,iismd17/scikit-learn,hsiaoyi0504/scikit-learn,ahoyosid/scikit-learn,ZENGXH/scikit-learn,sgenoud/scikit-learn,PatrickOReilly/scikit-learn,michigraber/scikit-learn,depet/scikit-learn,jseabold/scikit-learn,betatim/scikit-learn,manashmndl/scikit-learn,mattilyra/scikit-learn,wanggang3333/scikit-learn,jmschrei/scikit-learn,bikong2/scikit-learn,lazywei/scikit-learn,giorgiop/scikit-learn,RomainBrault/scikit-learn,vermouthmjl/scikit-learn,larsmans/scikit-learn,eickenberg/scikit-learn,Srisai85/scikit-learn,loli/sklearn-ensembletrees,ilo10/scikit-learn,sgenoud/scikit-learn,fabianp/scikit-learn,amueller/scikit-learn,tmhm/scikit-learn,thilbern/scikit-learn,chrisburr/scikit-learn,liangz0707/scikit-learn,iismd17/scikit-learn,Clyde-fare/scikit-learn,ldirer/scikit-learn,466152112/scikit-learn,qifeigit/scikit-learn,shenzebang/scikit-learn,terkkila/scikit-learn,andrewnc/scikit-learn,victorbergelin/scikit-learn,pnedunuri/scikit-learn,mjgrav2001/scikit-learn,PatrickChrist/scikit-learn,imaculate/scikit-learn,yanlend/scikit-learn,glemaitre/scikit-learn,ilyes14/scikit-learn,JsNoNo/scikit-learn,phdowling/scikit-learn,gclenaghan/scikit-learn,henrykironde/scikit-learn,jorge2703/scikit-learn,mehdidc/scikit-learn,gclenaghan/scikit-learn,tdhopper/scikit-learn,loli/semisupervisedforests,gotomypc/scikit-learn,ycaihua/scikit-learn,nikitasingh981/scikit-learn,OshynSong/scikit-learn,macks22/scikit-learn,ningchi/scikit-learn,ilo10/scikit-learn,PatrickOReilly/scikit-learn,bigdataelephants/scikit-learn,florian-f/sklearn,evgchz/scikit-learn,herilalaina/scikit-learn,icdishb/scikit-learn,samzhang111/scikit-learn,vibhorag/scikit-learn,djgagne/scikit-learn,sumspr/scikit-learn,LohithBlaze/scikit-learn,victorbergelin/scikit-learn,ngoix/OCRF,fredhusser/scikit-learn,ssaeger/scikit-learn,glouppe/scikit-learn,glouppe/scikit-learn,huobaowangxi/scikit-learn,scikit-learn/scikit-learn,schets/scikit-learn,theoryno3/scikit-learn,massmutual/scikit-learn,lbishal/scikit-learn,xyguo/scikit-learn,saiwing-yeung/scikit-learn,IndraVikas/scikit-learn,jzt5132/scikit-learn,vortex-ape/scikit-learn,nelson-liu/scikit-learn,hdmetor/scikit-learn,maheshakya/scikit-learn,equialgo/scikit-learn,MartinSavc/scikit-learn,IssamLaradji/scikit-learn,ZenDevelopmentSystems/scikit-learn,meduz/scikit-learn,zorojean/scikit-learn,fabioticconi/scikit-learn,pv/scikit-learn,ishanic/scikit-learn,Obus/scikit-learn,gotomypc/scikit-learn,mojoboss/scikit-learn,themrmax/scikit-learn,kashif/scikit-learn,Djabbz/scikit-learn,NunoEdgarGub1/scikit-learn,loli/sklearn-ensembletrees,cybernet14/scikit-learn,themrmax/scikit-learn,trungnt13/scikit-learn,RachitKansal/scikit-learn,nesterione/scikit-learn,mattgiguere/scikit-learn,imaculate/scikit-learn,joshloyal/scikit-learn,IndraVikas/scikit-learn,stylianos-kampakis/scikit-learn,wlamond/scikit-learn,wlamond/scikit-learn,henridwyer/scikit-learn,MohammedWasim/scikit-learn,kagayakidan/scikit-learn,arahuja/scikit-learn,procoder317/scikit-learn,davidgbe/scikit-learn,samzhang111/scikit-learn,hitszxp/scikit-learn,ssaeger/scikit-learn,cl4rke/scikit-learn,DonBeo/scikit-learn,btabibian/scikit-learn,thientu/scikit-learn,rahul-c1/scikit-learn,olologin/scikit-learn,herilalaina/scikit-learn,joshloyal/scikit-learn,andaag/scikit-learn,pypot/scikit-learn,shikhardb/scikit-learn,Titan-C/scikit-learn,chrsrds/scikit-learn,Clyde-fare/scikit-learn,yunfeilu/scikit-learn,JeanKossaifi/scikit-learn,AlexanderFabisch/scikit-learn,dsullivan7/scikit-learn,jblackburne/scikit-learn,ZENGXH/scikit-learn,eg-zhang/scikit-learn,ZenDevelopmentSystems/scikit-learn,jakirkham/scikit-learn,ZenDevelopmentSystems/scikit-learn,ChanChiChoi/scikit-learn,ephes/scikit-learn,ChanChiChoi/scikit-learn,loli/sklearn-ensembletrees,roxyboy/scikit-learn,0x0all/scikit-learn,vigilv/scikit-learn,akionakamura/scikit-learn,mjgrav2001/scikit-learn,altairpearl/scikit-learn,mugizico/scikit-learn,chrsrds/scikit-learn,plissonf/scikit-learn,roxyboy/scikit-learn,espg/scikit-learn,raghavrv/scikit-learn,heli522/scikit-learn,jakobworldpeace/scikit-learn,shikhardb/scikit-learn,h2educ/scikit-learn,hrjn/scikit-learn,xavierwu/scikit-learn,waterponey/scikit-learn,phdowling/scikit-learn,OshynSong/scikit-learn,Sentient07/scikit-learn,glennq/scikit-learn,ldirer/scikit-learn,Adai0808/scikit-learn,HolgerPeters/scikit-learn,kjung/scikit-learn,yask123/scikit-learn,aminert/scikit-learn,zaxtax/scikit-learn,aetilley/scikit-learn,aabadie/scikit-learn,jayflo/scikit-learn,joernhees/scikit-learn,h2educ/scikit-learn,ogrisel/scikit-learn,PatrickOReilly/scikit-learn,liangz0707/scikit-learn,devanshdalal/scikit-learn,florian-f/sklearn,ClimbsRocks/scikit-learn,xzh86/scikit-learn,jakobworldpeace/scikit-learn,NelisVerhoef/scikit-learn,CVML/scikit-learn,nhejazi/scikit-learn,xuewei4d/scikit-learn,Barmaley-exe/scikit-learn,bhargav/scikit-learn,glennq/scikit-learn,ChanderG/scikit-learn,ltiao/scikit-learn,krez13/scikit-learn,anirudhjayaraman/scikit-learn,CforED/Machine-Learning,abhishekkrthakur/scikit-learn,LiaoPan/scikit-learn,sergeyf/scikit-learn,pypot/scikit-learn,anurag313/scikit-learn,HolgerPeters/scikit-learn,siutanwong/scikit-learn,nhejazi/scikit-learn,alexeyum/scikit-learn,ephes/scikit-learn,victorbergelin/scikit-learn,walterreade/scikit-learn,xavierwu/scikit-learn,vivekmishra1991/scikit-learn,manashmndl/scikit-learn,466152112/scikit-learn,Lawrence-Liu/scikit-learn,Barmaley-exe/scikit-learn,jakirkham/scikit-learn,harshaneelhg/scikit-learn,marcocaccin/scikit-learn,ningchi/scikit-learn,mlyundin/scikit-learn,nelson-liu/scikit-learn,tawsifkhan/scikit-learn,AlexanderFabisch/scikit-learn,jayflo/scikit-learn,xwolf12/scikit-learn,yonglehou/scikit-learn,victorbergelin/scikit-learn,Achuth17/scikit-learn,jaidevd/scikit-learn,nesterione/scikit-learn,glemaitre/scikit-learn,Djabbz/scikit-learn,Titan-C/scikit-learn,jaidevd/scikit-learn,schets/scikit-learn,kevin-intel/scikit-learn,loli/semisupervisedforests,mugizico/scikit-learn,trankmichael/scikit-learn,shenzebang/scikit-learn,Myasuka/scikit-learn,etkirsch/scikit-learn,zihua/scikit-learn,NunoEdgarGub1/scikit-learn,robin-lai/scikit-learn,plissonf/scikit-learn,rahuldhote/scikit-learn,plissonf/scikit-learn,kylerbrown/scikit-learn,phdowling/scikit-learn,robin-lai/scikit-learn,nomadcube/scikit-learn,mrshu/scikit-learn,BiaDarkia/scikit-learn,saiwing-yeung/scikit-learn,kaichogami/scikit-learn,0x0all/scikit-learn,RachitKansal/scikit-learn,adamgreenhall/scikit-learn,elkingtonmcb/scikit-learn,cdegroc/scikit-learn,ElDeveloper/scikit-learn,Windy-Ground/scikit-learn,ycaihua/scikit-learn,0asa/scikit-learn,jblackburne/scikit-learn,vibhorag/scikit-learn,hugobowne/scikit-learn,mxjl620/scikit-learn,kylerbrown/scikit-learn,marcocaccin/scikit-learn,rexshihaoren/scikit-learn,rrohan/scikit-learn,macks22/scikit-learn,AIML/scikit-learn,vortex-ape/scikit-learn,shikhardb/scikit-learn,liyu1990/sklearn,andrewnc/scikit-learn,Windy-Ground/scikit-learn,jorge2703/scikit-learn,zhenv5/scikit-learn,dsullivan7/scikit-learn,maheshakya/scikit-learn,AnasGhrab/scikit-learn,Akshay0724/scikit-learn,marcocaccin/scikit-learn,arahuja/scikit-learn,jlegendary/scikit-learn,MechCoder/scikit-learn,shyamalschandra/scikit-learn,AlexandreAbraham/scikit-learn,ishanic/scikit-learn,aminert/scikit-learn,liberatorqjw/scikit-learn,abhishekgahlot/scikit-learn,Myasuka/scikit-learn,andaag/scikit-learn,ilo10/scikit-learn,mwv/scikit-learn,MartinDelzant/scikit-learn,3manuek/scikit-learn,alvarofierroclavero/scikit-learn,rsivapr/scikit-learn,bigdataelephants/scikit-learn,MohammedWasim/scikit-learn,vinayak-mehta/scikit-learn,ZENGXH/scikit-learn,trungnt13/scikit-learn,Garrett-R/scikit-learn,IshankGulati/scikit-learn,xzh86/scikit-learn,BiaDarkia/scikit-learn,Garrett-R/scikit-learn,moutai/scikit-learn,yyjiang/scikit-learn,costypetrisor/scikit-learn,q1ang/scikit-learn,harshaneelhg/scikit-learn,xwolf12/scikit-learn,zorojean/scikit-learn,fengzhyuan/scikit-learn,clemkoa/scikit-learn,jmschrei/scikit-learn,djgagne/scikit-learn,MartinDelzant/scikit-learn,mojoboss/scikit-learn,ky822/scikit-learn,untom/scikit-learn,khkaminska/scikit-learn,LiaoPan/scikit-learn,hsuantien/scikit-learn,DSLituiev/scikit-learn,potash/scikit-learn,voxlol/scikit-learn,untom/scikit-learn,bikong2/scikit-learn,dsquareindia/scikit-learn,cdegroc/scikit-learn,shusenl/scikit-learn,UNR-AERIAL/scikit-learn,manhhomienbienthuy/scikit-learn,samzhang111/scikit-learn,giorgiop/scikit-learn,siutanwong/scikit-learn,zihua/scikit-learn,vshtanko/scikit-learn,trankmichael/scikit-learn,MohammedWasim/scikit-learn,ChanderG/scikit-learn,lenovor/scikit-learn,herilalaina/scikit-learn,JPFrancoia/scikit-learn,spallavolu/scikit-learn,andaag/scikit-learn,JosmanPS/scikit-learn,moutai/scikit-learn,jmetzen/scikit-learn,thientu/scikit-learn,mattilyra/scikit-learn,dingocuster/scikit-learn,belltailjp/scikit-learn,sergeyf/scikit-learn,alexeyum/scikit-learn,Djabbz/scikit-learn,tmhm/scikit-learn,nmayorov/scikit-learn,lbishal/scikit-learn,henridwyer/scikit-learn,cainiaocome/scikit-learn,ltiao/scikit-learn,vivekmishra1991/scikit-learn,jkarnows/scikit-learn,chrsrds/scikit-learn,nmayorov/scikit-learn,deepesch/scikit-learn,JosmanPS/scikit-learn,JeanKossaifi/scikit-learn,marcocaccin/scikit-learn,poryfly/scikit-learn,eickenberg/scikit-learn,abimannans/scikit-learn,f3r/scikit-learn,elkingtonmcb/scikit-learn,ssaeger/scikit-learn,arjoly/scikit-learn,lin-credible/scikit-learn,theoryno3/scikit-learn,zihua/scikit-learn,aabadie/scikit-learn,fredhusser/scikit-learn,hainm/scikit-learn,bnaul/scikit-learn,idlead/scikit-learn,loli/sklearn-ensembletrees,ahoyosid/scikit-learn,AlexRobson/scikit-learn,RachitKansal/scikit-learn,ChanderG/scikit-learn,0asa/scikit-learn,krez13/scikit-learn,jereze/scikit-learn,r-mart/scikit-learn,carrillo/scikit-learn,jorik041/scikit-learn,Jimmy-Morzaria/scikit-learn,depet/scikit-learn,siutanwong/scikit-learn,robbymeals/scikit-learn,AlexanderFabisch/scikit-learn,Srisai85/scikit-learn,madjelan/scikit-learn,mhue/scikit-learn,fyffyt/scikit-learn,Aasmi/scikit-learn,kagayakidan/scikit-learn,florian-f/sklearn,vermouthmjl/scikit-learn,treycausey/scikit-learn,manashmndl/scikit-learn,aewhatley/scikit-learn,zaxtax/scikit-learn,pianomania/scikit-learn,Fireblend/scikit-learn,frank-tancf/scikit-learn,shangwuhencc/scikit-learn,vybstat/scikit-learn,adamgreenhall/scikit-learn,olologin/scikit-learn,bthirion/scikit-learn,kjung/scikit-learn,lin-credible/scikit-learn,jlegendary/scikit-learn,tdhopper/scikit-learn,Aasmi/scikit-learn,shusenl/scikit-learn,alexsavio/scikit-learn,rajat1994/scikit-learn,zorroblue/scikit-learn,madjelan/scikit-learn,abimannans/scikit-learn,xiaoxiamii/scikit-learn,xzh86/scikit-learn,liyu1990/sklearn,jkarnows/scikit-learn,ilo10/scikit-learn,smartscheduling/scikit-learn-categorical-tree,anurag313/scikit-learn,belltailjp/scikit-learn,shikhardb/scikit-learn,nhejazi/scikit-learn,ashhher3/scikit-learn,etkirsch/scikit-learn,scikit-learn/scikit-learn,RayMick/scikit-learn,wzbozon/scikit-learn,rsivapr/scikit-learn,zuku1985/scikit-learn,rexshihaoren/scikit-learn,fyffyt/scikit-learn,petosegan/scikit-learn,UNR-AERIAL/scikit-learn,abhishekgahlot/scikit-learn,rohanp/scikit-learn,jzt5132/scikit-learn,sarahgrogan/scikit-learn,ilyes14/scikit-learn,bhargav/scikit-learn,mattgiguere/scikit-learn,Adai0808/scikit-learn,billy-inn/scikit-learn,raghavrv/scikit-learn,zorojean/scikit-learn,r-mart/scikit-learn,huzq/scikit-learn,carrillo/scikit-learn,Sentient07/scikit-learn,q1ang/scikit-learn,vigilv/scikit-learn,nvoron23/scikit-learn,terkkila/scikit-learn,sanketloke/scikit-learn,Fireblend/scikit-learn,nvoron23/scikit-learn,nikitasingh981/scikit-learn,andaag/scikit-learn,hainm/scikit-learn,Adai0808/scikit-learn,mhdella/scikit-learn,heli522/scikit-learn,andrewnc/scikit-learn,cauchycui/scikit-learn,xyguo/scikit-learn,michigraber/scikit-learn,ZENGXH/scikit-learn,stylianos-kampakis/scikit-learn,nhejazi/scikit-learn,MechCoder/scikit-learn,jpautom/scikit-learn,Obus/scikit-learn,vshtanko/scikit-learn,chrisburr/scikit-learn,yonglehou/scikit-learn | examples/ensemble/plot_forest_importances_faces.py | examples/ensemble/plot_forest_importances_faces.py | """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| """
=======================================
Pixel importances with forests of trees
=======================================
This example shows the use of forests of trees to evaluate the importance
of the pixels in an image classification task (faces). The hotter the pixel,
the more important.
"""
print __doc__
import pylab as pl
from sklearn.datasets import fetch_olivetti_faces
from sklearn.ensemble import ExtraTreesClassifier
# Loading the digits dataset
data = fetch_olivetti_faces()
X = data.images.reshape((len(data.images), -1))
y = data.target
mask = y < 5 # Limit to 5 classes
X = X[mask]
y = y[mask]
# Build a forest and compute the pixel importances
forest = ExtraTreesClassifier(n_estimators=1000,
max_features=128,
compute_importances=True,
n_jobs=2,
random_state=0)
forest.fit(X, y)
importances = forest.feature_importances_
importances = importances.reshape(data.images[0].shape)
# Plot pixel importances
pl.matshow(importances, cmap=pl.cm.hot)
pl.title("Pixel importances with forests of trees")
pl.show()
| bsd-3-clause | Python |
918421a26c762a5afc0161d874ab4fec6ff11cb0 | add url | dictoss/osmmarkerstorage,dictoss/osmmarkerstorage,dictoss/osmmarkerstorage,dictoss/osmmarkerstorage | webapp/osmproj1/markerstorage/urls.py | webapp/osmproj1/markerstorage/urls.py | from django.conf.urls import url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from . import views
urlpatterns = [
url(r'^markerdata/$', views.markerdata_list),
url(r'^markerdata/(?P<pk>[0-9]+)/$', views.markerdata_detail),
#
url(r'^osm/index.html', views.osm_index),
url(r'^osm/first.html', views.osm_first),
url(r'^osm/marker1.html', views.osm_marker1),
url(r'^osm/marker2.html', views.osm_marker2),
url(r'^osm/marker2.json', views.osm_marker2_json),
url(r'^osm/marker3.html', views.osm_marker3),
url(r'^osm/tile1.html', views.osm_tile1),
url(r'^osm/tiles/(?P<tile_ver>\d+)/testtile1/(?P<tile_z>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+).png', views.osm_tile1_image),
url(r'^osm/$', views.osm_index),
url(r'^$', views.osm_index),
]
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from . import views
urlpatterns = [
url(r'^markerdata/$', views.markerdata_list),
url(r'^markerdata/(?P<pk>[0-9]+)/$', views.markerdata_detail),
#
url(r'^osm/index.html', views.osm_index),
url(r'^osm/first.html', views.osm_first),
url(r'^osm/marker1.html', views.osm_marker1),
url(r'^osm/marker2.html', views.osm_marker2),
url(r'^osm/marker2.json', views.osm_marker2_json),
url(r'^osm/marker3.html', views.osm_marker3),
url(r'^osm/tile1.html', views.osm_tile1),
url(r'^osm/tiles/(?P<tile_ver>\d+)/testtile1/(?P<tile_z>\d+)/(?P<tile_x>\d+)/(?P<tile_y>\d+).png', views.osm_tile1_image),
url(r'^osm/$', views.osm_index),
]
urlpatterns += staticfiles_urlpatterns()
| bsd-2-clause | Python |
d316513501d21bd04cb64091133b93cacd9bcd3b | Switch RawConfigParser -> ConfigParser. | pkexcellent/luigi,Yoone/luigi,ChrisBeaumont/luigi,graingert/luigi,rayrrr/luigi,realgo/luigi,leafjungle/luigi,ehdr/luigi,JackDanger/luigi,theoryno3/luigi,fabriziodemaria/luigi,ChrisBeaumont/luigi,alkemics/luigi,dstandish/luigi,mfcabrera/luigi,samuell/luigi,moritzschaefer/luigi,fw1121/luigi,aeron15/luigi,slvnperron/luigi,humanlongevity/luigi,javrasya/luigi,alkemics/luigi,slvnperron/luigi,SeedScientific/luigi,kalaidin/luigi,soxofaan/luigi,ehdr/luigi,mortardata/luigi,kevhill/luigi,vine/luigi,mbruggmann/luigi,fw1121/luigi,rayrrr/luigi,qpxu007/luigi,17zuoye/luigi,Dawny33/luigi,ContextLogic/luigi,mbruggmann/luigi,altaf-ali/luigi,javrasya/luigi,wakamori/luigi,Magnetic/luigi,leafjungle/luigi,hellais/luigi,bowlofstew/luigi,glenndmello/luigi,pkexcellent/luigi,riga/luigi,cpcloud/luigi,anyman/luigi,pkexcellent/luigi,hadesbox/luigi,ivannotes/luigi,jw0201/luigi,glenndmello/luigi,SeedScientific/luigi,meyerson/luigi,laserson/luigi,penelopy/luigi,anyman/luigi,samuell/luigi,LamCiuLoeng/luigi,stephenpascoe/luigi,lichia/luigi,oldpa/luigi,LamCiuLoeng/luigi,mbruggmann/luigi,dkroy/luigi,samuell/luigi,Yoone/luigi,upworthy/luigi,dylanjbarth/luigi,mfcabrera/luigi,mortardata/luigi,Tarrasch/luigi,moritzschaefer/luigi,gpoulin/luigi,drincruz/luigi,harveyxia/luigi,drincruz/luigi,dkroy/luigi,vine/luigi,LamCiuLoeng/luigi,jamesmcm/luigi,adaitche/luigi,altaf-ali/luigi,stephenpascoe/luigi,JackDanger/luigi,spotify/luigi,linearregression/luigi,linearregression/luigi,huiyi1990/luigi,wakamori/luigi,percyfal/luigi,casey-green/luigi,jw0201/luigi,vine/luigi,torypages/luigi,alkemics/luigi,torypages/luigi,belevtsoff/luigi,harveyxia/luigi,joeshaw/luigi,stroykova/luigi,Tarrasch/luigi,casey-green/luigi,thejens/luigi,adaitche/luigi,thejens/luigi,dhruvg/luigi,ivannotes/luigi,oldpa/luigi,edx/luigi,laserson/luigi,Houzz/luigi,mortardata/luigi,edx/luigi,ThQ/luigi,dstandish/luigi,tuulos/luigi,anyman/luigi,meyerson/luigi,gpoulin/luigi,penelopy/luigi,foursquare/luigi,dhruvg/luigi,stroykova/luigi,spotify/luigi,vine/luigi,stroykova/luigi,foursquare/luigi,springcoil/luigi,soxofaan/luigi,gpoulin/luigi,upworthy/luigi,rayrrr/luigi,republic-analytics/luigi,lichia/luigi,linsomniac/luigi,lungetech/luigi,huiyi1990/luigi,ChrisBeaumont/luigi,SeedScientific/luigi,h3biomed/luigi,dylanjbarth/luigi,ThQ/luigi,wakamori/luigi,belevtsoff/luigi,glenndmello/luigi,17zuoye/luigi,bmaggard/luigi,DomainGroupOSS/luigi,humanlongevity/luigi,joeshaw/luigi,PeteW/luigi,bmaggard/luigi,bowlofstew/luigi,anyman/luigi,edx/luigi,Tarrasch/luigi,spotify/luigi,dlstadther/luigi,ContextLogic/luigi,dhruvg/luigi,DomainGroupOSS/luigi,jw0201/luigi,fabriziodemaria/luigi,bmaggard/luigi,SkyTruth/luigi,walkers-mv/luigi,spotify/luigi,jamesmcm/luigi,Magnetic/luigi,foursquare/luigi,humanlongevity/luigi,drincruz/luigi,soxofaan/luigi,Dawny33/luigi,hellais/luigi,kevhill/luigi,linsomniac/luigi,moandcompany/luigi,meyerson/luigi,lungetech/luigi,graingert/luigi,graingert/luigi,DomainGroupOSS/luigi,kevhill/luigi,linsomniac/luigi,ZhenxingWu/luigi,linearregression/luigi,ehdr/luigi,laserson/luigi,qpxu007/luigi,Wattpad/luigi,rizzatti/luigi,Dawny33/luigi,altaf-ali/luigi,alkemics/luigi,moritzschaefer/luigi,aeron15/luigi,bowlofstew/luigi,ViaSat/luigi,mfcabrera/luigi,ivannotes/luigi,stephenpascoe/luigi,PeteW/luigi,casey-green/luigi,bowlofstew/luigi,mortardata/luigi,huiyi1990/luigi,riga/luigi,belevtsoff/luigi,springcoil/luigi,oldpa/luigi,meyerson/luigi,republic-analytics/luigi,fw1121/luigi,mbruggmann/luigi,Yoone/luigi,lungetech/luigi,walkers-mv/luigi,rizzatti/luigi,stephenpascoe/luigi,DomainGroupOSS/luigi,realgo/luigi,leafjungle/luigi,lichia/luigi,javrasya/luigi,dstandish/luigi,ivannotes/luigi,neilisaac/luigi,Dawny33/luigi,ContextLogic/luigi,dkroy/luigi,kalaidin/luigi,hellais/luigi,ContextLogic/luigi,humanlongevity/luigi,linsomniac/luigi,republic-analytics/luigi,graingert/luigi,SkyTruth/luigi,springcoil/luigi,tuulos/luigi,ChrisBeaumont/luigi,hadesbox/luigi,SkyTruth/luigi,soxofaan/luigi,fabriziodemaria/luigi,rayrrr/luigi,linearregression/luigi,slvnperron/luigi,tuulos/luigi,fw1121/luigi,belevtsoff/luigi,dylanjbarth/luigi,ViaSat/luigi,percyfal/luigi,dkroy/luigi,neilisaac/luigi,torypages/luigi,slvnperron/luigi,realgo/luigi,percyfal/luigi,glenndmello/luigi,h3biomed/luigi,pkexcellent/luigi,dlstadther/luigi,Wattpad/luigi,sahitya-pavurala/luigi,bmaggard/luigi,altaf-ali/luigi,moritzschaefer/luigi,tuulos/luigi,PeteW/luigi,penelopy/luigi,upworthy/luigi,ehdr/luigi,neilisaac/luigi,penelopy/luigi,aeron15/luigi,joeshaw/luigi,ViaSat/luigi,theoryno3/luigi,lungetech/luigi,huiyi1990/luigi,dhruvg/luigi,sahitya-pavurala/luigi,moandcompany/luigi,dlstadther/luigi,moandcompany/luigi,adaitche/luigi,qpxu007/luigi,dylanjbarth/luigi,PeteW/luigi,moandcompany/luigi,percyfal/luigi,h3biomed/luigi,cpcloud/luigi,Magnetic/luigi,joeshaw/luigi,qpxu007/luigi,casey-green/luigi,upworthy/luigi,sahitya-pavurala/luigi,ZhenxingWu/luigi,Magnetic/luigi,Houzz/luigi,harveyxia/luigi,kevhill/luigi,Wattpad/luigi,fabriziodemaria/luigi,dstandish/luigi,samepage-labs/luigi,edx/luigi,hellais/luigi,oldpa/luigi,ThQ/luigi,adaitche/luigi,dlstadther/luigi,riga/luigi,drincruz/luigi,h3biomed/luigi,theoryno3/luigi,17zuoye/luigi,ViaSat/luigi,sahitya-pavurala/luigi,17zuoye/luigi,walkers-mv/luigi,jamesmcm/luigi,rizzatti/luigi,jw0201/luigi,leafjungle/luigi,rizzatti/luigi,republic-analytics/luigi,hadesbox/luigi,riga/luigi,wakamori/luigi,stroykova/luigi,JackDanger/luigi,theoryno3/luigi,foursquare/luigi,ZhenxingWu/luigi,neilisaac/luigi,laserson/luigi,thejens/luigi,SeedScientific/luigi,mfcabrera/luigi,SkyTruth/luigi,aeron15/luigi,Houzz/luigi,springcoil/luigi,ZhenxingWu/luigi,samuell/luigi,realgo/luigi,harveyxia/luigi,samepage-labs/luigi,Houzz/luigi,jamesmcm/luigi,lichia/luigi,walkers-mv/luigi,ThQ/luigi,gpoulin/luigi,torypages/luigi,Yoone/luigi,kalaidin/luigi,thejens/luigi,javrasya/luigi,LamCiuLoeng/luigi,hadesbox/luigi,JackDanger/luigi,kalaidin/luigi,samepage-labs/luigi,samepage-labs/luigi,Tarrasch/luigi | luigi/configuration.py | luigi/configuration.py |
import logging
from ConfigParser import ConfigParser, NoOptionError, NoSectionError
class LuigiConfigParser(ConfigParser):
NO_DEFAULT = object()
_instance = None
_config_paths = ['/etc/luigi/client.cfg', 'client.cfg']
@classmethod
def add_config_path(cls, path):
cls._config_paths.append(path)
cls._instance.reload()
@classmethod
def instance(cls, *args, **kwargs):
""" Singleton getter """
if cls._instance is None:
cls._instance = cls(*args, **kwargs)
loaded = cls._instance.reload()
logging.getLogger('luigi-interface').info('Loaded %r' % loaded)
return cls._instance
def reload(self):
return self._instance.read(self._config_paths)
def _get_with_default(self, method, section, option, default, expected_type=None):
""" Gets the value of the section/option using method. Returns default if value
is not found. Raises an exception if the default value is not None and doesn't match
the expected_type.
"""
try:
return method(self, section, option)
except (NoOptionError, NoSectionError):
if default is LuigiConfigParser.NO_DEFAULT:
raise
if expected_type is not None and default is not None and \
not isinstance(default, expected_type):
raise
return default
def get(self, section, option, default=NO_DEFAULT):
return self._get_with_default(ConfigParser.get, section, option, default)
def getboolean(self, section, option, default=NO_DEFAULT):
return self._get_with_default(ConfigParser.getboolean, section, option, default, bool)
def getint(self, section, option, default=NO_DEFAULT):
return self._get_with_default(ConfigParser.getint, section, option, default, int)
def getfloat(self, section, option, default=NO_DEFAULT):
return self._get_with_default(ConfigParser.getfloat, section, option, default, float)
def get_config():
""" Convenience method (for backwards compatibility) for accessing config singleton """
return LuigiConfigParser.instance()
|
import logging
from ConfigParser import RawConfigParser, NoOptionError, NoSectionError
class LuigiConfigParser(RawConfigParser):
NO_DEFAULT = object()
_instance = None
_config_paths = ['/etc/luigi/client.cfg', 'client.cfg']
@classmethod
def add_config_path(cls, path):
cls._config_paths.append(path)
cls._instance.reload()
@classmethod
def instance(cls, *args, **kwargs):
""" Singleton getter """
if cls._instance is None:
cls._instance = cls(*args, **kwargs)
loaded = cls._instance.reload()
logging.getLogger('luigi-interface').info('Loaded %r' % loaded)
return cls._instance
def reload(self):
return self._instance.read(self._config_paths)
def _get_with_default(self, method, section, option, default, expected_type=None):
""" Gets the value of the section/option using method. Returns default if value
is not found. Raises an exception if the default value is not None and doesn't match
the expected_type.
"""
try:
return method(self, section, option)
except (NoOptionError, NoSectionError):
if default is LuigiConfigParser.NO_DEFAULT:
raise
if expected_type is not None and default is not None and \
not isinstance(default, expected_type):
raise
return default
def get(self, section, option, default=NO_DEFAULT):
return self._get_with_default(RawConfigParser.get, section, option, default)
def getboolean(self, section, option, default=NO_DEFAULT):
return self._get_with_default(RawConfigParser.getboolean, section, option, default, bool)
def getint(self, section, option, default=NO_DEFAULT):
return self._get_with_default(RawConfigParser.getint, section, option, default, int)
def getfloat(self, section, option, default=NO_DEFAULT):
return self._get_with_default(RawConfigParser.getfloat, section, option, default, float)
def get_config():
""" Convenience method (for backwards compatibility) for accessing config singleton """
return LuigiConfigParser.instance()
| apache-2.0 | Python |
550fd8bc4a9f9e7f780a99b9d925b95883e046ea | remove extra log statement | CitrineInformatics/pif-dft | dfttopif/web.py | dfttopif/web.py | import sys
import json
import logging
import requests
from pypif import pif
from flask import Flask, request
from flask_cors import CORS
from dfttopif import *
# Configure flask
app = Flask(__name__)
CORS(app)
# Configure logging
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
@app.route('/convert/from/tarfile', methods=['POST'])
def convert_from_tarfile():
# Create a temporary directory to save the files and cleanup when
# finished with it
temp_dir_name = '/tmp/' + str(uuid.uuid4())
os.makedirs(temp_dir_name)
try:
data = json.loads(request.get_data(as_text=True))
response = requests.get(data['url'], stream=True)
filename = temp_dir_name + '/file_to_process'
with open(filename, 'wb') as output:
shutil.copyfileobj(response.raw, output)
return pif.dumps({'system': tarfile_to_pif(filename, '/tmp/')})
finally:
shutil.rmtree(temp_dir_name)
| import sys
import json
import logging
import requests
from pypif import pif
from flask import Flask, request
from flask_cors import CORS
from dfttopif import *
# Configure flask
app = Flask(__name__)
CORS(app)
# Configure logging
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
@app.route('/convert/from/tarfile', methods=['POST'])
def convert_from_tarfile():
# Create a temporary directory to save the files and cleanup when
# finished with it
temp_dir_name = '/tmp/' + str(uuid.uuid4())
os.makedirs(temp_dir_name)
try:
data = json.loads(request.get_data(as_text=True))
response = requests.get(data['url'], stream=True)
filename = temp_dir_name + '/file_to_process'
with open(filename, 'wb') as output:
shutil.copyfileobj(response.raw, output)
logging.info(os.listdir(temp_dir_name))
return pif.dumps({'system': tarfile_to_pif(filename, '/tmp/')})
finally:
shutil.rmtree(temp_dir_name)
| apache-2.0 | Python |
36b15658906685ea7eaaa3289678c416bc2d23af | Remove sleep from sample script, it was slowing down the tests. | cfe-lab/Kive,cfe-lab/Kive,cfe-lab/Kive,cfe-lab/Kive,cfe-lab/Kive | samplecode/generic_script.py | samplecode/generic_script.py | #! /usr/bin/env python
import argparse
import csv
import sys
# In order to work with kive, scripts which having a inputs
# and b inputs must have a+b command line arguments, the first a
# arguments specifying paths of input files, the subsequent b
# arguments specifying the paths of where outputs are written]
scriptDescription = "Convert CSV (str a, int b) to (str c, int d)"
parser = argparse.ArgumentParser(scriptDescription)
parser.add_argument("input_csv", help="CSV containing (str a, int b) doublets")
parser.add_argument("output_csv", help="CSV containing (int c, str d) doublets")
args = parser.parse_args()
try:
with open(args.input_csv, "rb") as f:
output = open(args.output_csv, "wb")
# csv.reader() returns list inside an iterable
# Iterables can be used in for/in blocks
string_csv = csv.reader(f, delimiter=',')
try:
for i, row in enumerate(string_csv):
if i == 0:
# Output column names must be registered in kive
output.write("c,d\n")
continue
print "Processing row {}".format(i)
a = str(row[0])
b = int(row[1])
c = 2*b
d = a
output.write(str(c) + "," + str(d) + "\n")
# If csv iterable method __next__() throws error, exit
except csv.Error as e:
print("Error at line {}: {}".format(string_csv.line_num, e))
sys.exit(1)
output.close()
# If no errors, return with code 0 (success)
sys.exit(0)
# Return error code 2 if file cannot be opened
except IOError as e:
print(e)
sys.exit(2)
| #! /usr/bin/env python
import argparse
import csv
import sys
import time
# In order to work with kive, scripts which having a inputs
# and b inputs must have a+b command line arguments, the first a
# arguments specifying paths of input files, the subsequent b
# arguments specifying the paths of where outputs are written]
scriptDescription = "Convert CSV (str a, int b) to (str c, int d)"
parser = argparse.ArgumentParser(scriptDescription)
parser.add_argument("input_csv", help="CSV containing (str a, int b) doublets")
parser.add_argument("output_csv", help="CSV containing (int c, str d) doublets")
args = parser.parse_args()
try:
with open(args.input_csv, "rb") as f:
output = open(args.output_csv, "wb")
# csv.reader() returns list inside an iterable
# Iterables can be used in for/in blocks
string_csv = csv.reader(f, delimiter=',')
try:
for i, row in enumerate(string_csv):
time.sleep(0.5)
if i == 0:
# Output column names must be registered in kive
output.write("c,d\n")
continue
print "Processing row {}".format(i)
a = str(row[0])
b = int(row[1])
c = 2*b
d = a
output.write(str(c) + "," + str(d) + "\n")
# If csv iterable method __next__() throws error, exit
except csv.Error as e:
print("Error at line {}: {}".format(string_csv.line_num, e))
sys.exit(1)
output.close()
# If no errors, return with code 0 (success)
sys.exit(0)
# Return error code 2 if file cannot be opened
except IOError as e:
print(e)
sys.exit(2)
| bsd-3-clause | Python |
b4c1eb9acba9e5ce74da3ccfda177ccf41c07ab5 | Bump revision number | cread/ecks,cread/ecks | ecks/__init__.py | ecks/__init__.py | """
A simple way to get data out of a remote machine using SNMP without having to deal with a single MIB or OID
The goal of Ecks is simple - make it really easy to get get any data
from an SNMP service.
Ecks is made up of a core class that will collect data via SNMP,
and a set of plugins that contain the OID and the code needed to
transform the results from nested OID's to usable data.
See help(ecks.Ecks) for more info
Copyright 2011 Chris Read (chris.read@gmail.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__version__ = '0.5-dev'
__author__ = 'Chris Read <chris.read@gmail.com>'
from ecks import Ecks
| """
A simple way to get data out of a remote machine using SNMP without having to deal with a single MIB or OID
The goal of Ecks is simple - make it really easy to get get any data
from an SNMP service.
Ecks is made up of a core class that will collect data via SNMP,
and a set of plugins that contain the OID and the code needed to
transform the results from nested OID's to usable data.
See help(ecks.Ecks) for more info
Copyright 2011 Chris Read (chris.read@gmail.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
__version__ = '0.4'
__author__ = 'Chris Read <chris.read@gmail.com>'
from ecks import Ecks
| apache-2.0 | Python |
39980bd4063cd55e86f715ecf4c1a0eef48dadce | Update bucket listing to return key names | bahoo/django-dbbackup,bahoo/django-dbbackup | dbbackup/storage/s3_storage.py | dbbackup/storage/s3_storage.py | """
S3 Storage object.
"""
import os
import boto
from boto.s3.key import Key
from django.conf import settings
from .base import BaseStorage, StorageError
################################
# S3 Storage Object
################################
class Storage(BaseStorage):
""" S3 API Storage. """
S3_BUCKET = getattr(settings, 'DBBACKUP_S3_BUCKET', None)
S3_ACCESS_KEY = getattr(settings, 'DBBACKUP_S3_ACCESS_KEY', None)
S3_SECRET_KEY = getattr(settings, 'DBBACKUP_S3_SECRET_KEY', None)
S3_DOMAIN = getattr(settings, 'DBBACKUP_S3_DOMAIN', 'https://s3.amazonaws.com/')
S3_DIRECTORY = getattr(settings, 'DBBACKUP_S3_DIRECTORY', "django-dbbackups/")
S3_DIRECTORY = '%s/' % S3_DIRECTORY.strip('/')
def __init__(self, server_name=None):
self._check_filesystem_errors()
self.name = 'AmazonS3'
self.conn = boto.connect_s3(self.S3_ACCESS_KEY, self.S3_SECRET_KEY)
self.bucket = self.conn.get_bucket(self.S3_BUCKET)
BaseStorage.__init__(self)
def _check_filesystem_errors(self):
""" Check we have all the required settings defined. """
if not self.S3_BUCKET:
raise StorageError('Filesystem storage requires DBBACKUP_S3_BUCKET to be defined in settings.')
if not self.S3_ACCESS_KEY:
raise StorageError('Filesystem storage requires DBBACKUP_S3_ACCESS_KEY to be defined in settings.')
if not self.S3_SECRET_KEY:
raise StorageError('Filesystem storage requires DBBACKUP_S3_SECRET_KEY to be defined in settings.')
###################################
# DBBackup Storage Methods
###################################
@property
def bucket(self):
return self.bucket
def backup_dir(self):
return self.S3_DIRECTORY
def delete_file(self, filepath):
""" Delete the specified filepath. """
self.bucket.delete_key(filepath)
def list_directory(self):
""" List all stored backups for the specified. """
return [k.name for k in
self.bucket.get_all_keys(prefix=self.S3_DIRECTORY)]
def write_file(self, filehandle):
""" Write the specified file. """
filepath = os.path.join(self.S3_DIRECTORY, filehandle.name)
key = Key(self.bucket)
key.key = filepath
filehandle.seek(0)
key.set_contents_from_file(filehandle)
def read_file(self, filepath):
""" Read the specified file and return it's handle. """
return self.bucket.get_key(filepath)
| """
S3 Storage object.
"""
import os
import boto
from boto.s3.key import Key
from django.conf import settings
from .base import BaseStorage, StorageError
################################
# S3 Storage Object
################################
class Storage(BaseStorage):
""" S3 API Storage. """
S3_BUCKET = getattr(settings, 'DBBACKUP_S3_BUCKET', None)
S3_ACCESS_KEY = getattr(settings, 'DBBACKUP_S3_ACCESS_KEY', None)
S3_SECRET_KEY = getattr(settings, 'DBBACKUP_S3_SECRET_KEY', None)
S3_DOMAIN = getattr(settings, 'DBBACKUP_S3_DOMAIN', 'https://s3.amazonaws.com/')
S3_DIRECTORY = getattr(settings, 'DBBACKUP_S3_DIRECTORY', "django-dbbackups/")
S3_DIRECTORY = '%s/' % S3_DIRECTORY.strip('/')
def __init__(self, server_name=None):
self._check_filesystem_errors()
self.name = 'AmazonS3'
self.conn = boto.connect_s3(self.S3_ACCESS_KEY, self.S3_SECRET_KEY)
self.bucket = self.conn.get_bucket(self.S3_BUCKET)
BaseStorage.__init__(self)
def _check_filesystem_errors(self):
""" Check we have all the required settings defined. """
if not self.S3_BUCKET:
raise StorageError('Filesystem storage requires DBBACKUP_S3_BUCKET to be defined in settings.')
if not self.S3_ACCESS_KEY:
raise StorageError('Filesystem storage requires DBBACKUP_S3_ACCESS_KEY to be defined in settings.')
if not self.S3_SECRET_KEY:
raise StorageError('Filesystem storage requires DBBACKUP_S3_SECRET_KEY to be defined in settings.')
###################################
# DBBackup Storage Methods
###################################
@property
def bucket(self):
return self.bucket
def backup_dir(self):
return self.S3_DIRECTORY
def delete_file(self, filepath):
""" Delete the specified filepath. """
self.bucket.delete_key(filepath)
def list_directory(self):
""" List all stored backups for the specified. """
return self.bucket.list(prefix=self.S3_DIRECTORY)
def write_file(self, filehandle):
""" Write the specified file. """
filepath = os.path.join(self.S3_DIRECTORY, filehandle.name)
key = Key(self.bucket)
key.key = filepath
filehandle.seek(0)
key.set_contents_from_file(filehandle)
def read_file(self, filepath):
""" Read the specified file and return it's handle. """
return self.bucket.get_key(filepath)
| bsd-3-clause | Python |
812031ff7e3017dfcbff4c3434fbd3c2437dcb33 | print the information of testcase which is on failure | F30/storm,0x726d77/storm,kevpeek/storm,ujfjhz/storm,kevinconaway/storm,carl34/storm,hmcl/storm-apache,kevpeek/storm,ujfjhz/storm,srdo/storm,ujfjhz/storm,kevpeek/storm,0x726d77/storm,srishtyagrawal/storm,kevpeek/storm,F30/storm,kamleshbhatt/storm,srdo/storm,kamleshbhatt/storm,carl34/storm,pczb/storm,srishtyagrawal/storm,F30/storm,kishorvpatil/incubator-storm,pczb/storm,kishorvpatil/incubator-storm,raviperi/storm,kevinconaway/storm,knusbaum/incubator-storm,raviperi/storm,kishorvpatil/incubator-storm,erikdw/storm,srishtyagrawal/storm,hmcc/storm,kevinconaway/storm,0x726d77/storm,sakanaou/storm,erikdw/storm,hmcc/storm,hmcc/storm,kevinconaway/storm,kishorvpatil/incubator-storm,cluo512/storm,knusbaum/incubator-storm,kamleshbhatt/storm,erikdw/storm,carl34/storm,srdo/storm,sakanaou/storm,kevpeek/storm,hmcc/storm,pczb/storm,roshannaik/storm,hmcc/storm,roshannaik/storm,knusbaum/incubator-storm,hmcl/storm-apache,Crim/storm,pczb/storm,ujfjhz/storm,knusbaum/incubator-storm,knusbaum/incubator-storm,sakanaou/storm,srdo/storm,0x726d77/storm,erikdw/storm,kevinconaway/storm,raviperi/storm,hmcl/storm-apache,carl34/storm,kishorvpatil/incubator-storm,roshannaik/storm,srishtyagrawal/storm,raviperi/storm,roshannaik/storm,Crim/storm,Crim/storm,srdo/storm,0x726d77/storm,adityasharad/storm,srishtyagrawal/storm,adityasharad/storm,hmcl/storm-apache,roshannaik/storm,cluo512/storm,adityasharad/storm,srdo/storm,cluo512/storm,hmcl/storm-apache,roshannaik/storm,adityasharad/storm,raviperi/storm,carl34/storm,erikdw/storm,cluo512/storm,kamleshbhatt/storm,F30/storm,pczb/storm,adityasharad/storm,kishorvpatil/incubator-storm,kevpeek/storm,srdo/storm,kevinconaway/storm,kevinconaway/storm,sakanaou/storm,sakanaou/storm,roshannaik/storm,hmcl/storm-apache,cluo512/storm,erikdw/storm,F30/storm,hmcc/storm,kishorvpatil/incubator-storm,F30/storm,pczb/storm,pczb/storm,hmcc/storm,adityasharad/storm,kamleshbhatt/storm,0x726d77/storm,srishtyagrawal/storm,ujfjhz/storm,cluo512/storm,Crim/storm,sakanaou/storm,ujfjhz/storm,knusbaum/incubator-storm,Crim/storm,F30/storm,sakanaou/storm,erikdw/storm,knusbaum/incubator-storm,Crim/storm,Crim/storm,kamleshbhatt/storm,cluo512/storm,adityasharad/storm,carl34/storm,carl34/storm,0x726d77/storm,hmcl/storm-apache,srishtyagrawal/storm,ujfjhz/storm,kevpeek/storm,raviperi/storm,kamleshbhatt/storm,raviperi/storm | dev-tools/travis/print-errors-from-test-reports.py | dev-tools/travis/print-errors-from-test-reports.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import glob
import traceback
from xml.etree.ElementTree import ElementTree
def print_detail_information(testcase, fail_or_error):
print "-" * 50
print "classname: %s / testname: %s" % (testcase.get("classname"), testcase.get("name"))
print fail_or_error.text
stdout = testcase.find("system-out")
if stdout != None:
print "-" * 20, "system-out", "-"*20
print stdout.text
stderr = testcase.find("system-err")
if stderr != None:
print "-" * 20, "system-err", "-"*20
print stderr.text
print "-" * 50
def print_error_reports_from_report_file(file_path):
tree = ElementTree()
try:
tree.parse(file_path)
except:
print "-" * 50
print "Error parsing %s"%file_path
f = open(file_path, "r");
print f.read();
print "-" * 50
return
testcases = tree.findall(".//testcase")
for testcase in testcases:
error = testcase.find("error")
if error is not None:
print_detail_information(testcase, error)
fail = testcase.find("fail")
if fail is not None:
print_detail_information(testcase, fail)
failure = testcase.find("failure")
if failure is not None:
print_detail_information(testcase, failure)
def main(report_dir_path):
for test_report in glob.iglob(report_dir_path + '/*.xml'):
file_path = os.path.abspath(test_report)
try:
print "Checking %s" % test_report
print_error_reports_from_report_file(file_path)
except Exception, e:
print "Error while reading report file, %s" % file_path
print "Exception: %s" % e
traceback.print_exc()
if __name__ == "__main__":
if sys.argv < 2:
print "Usage: %s [report dir path]" % sys.argv[0]
sys.exit(1)
main(sys.argv[1])
| #!/usr/bin/python
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import glob
import traceback
from xml.etree.ElementTree import ElementTree
def print_detail_information(testcase, fail_or_error):
print "-" * 50
print "classname: %s / testname: %s" % (testcase.get("classname"), testcase.get("name"))
print fail_or_error.text
stdout = testcase.find("system-out")
if stdout != None:
print "-" * 20, "system-out", "-"*20
print stdout.text
stderr = testcase.find("system-err")
if stderr != None:
print "-" * 20, "system-err", "-"*20
print stderr.text
print "-" * 50
def print_error_reports_from_report_file(file_path):
tree = ElementTree()
try:
tree.parse(file_path)
except:
print "-" * 50
print "Error parsing %s"%file_path
f = open(file_path, "r");
print f.read();
print "-" * 50
return
testcases = tree.findall(".//testcase")
for testcase in testcases:
error = testcase.find("error")
if error is not None:
print_detail_information(testcase, error)
fail = testcase.find("fail")
if fail is not None:
print_detail_information(testcase, fail)
def main(report_dir_path):
for test_report in glob.iglob(report_dir_path + '/*.xml'):
file_path = os.path.abspath(test_report)
try:
print "Checking %s" % test_report
print_error_reports_from_report_file(file_path)
except Exception, e:
print "Error while reading report file, %s" % file_path
print "Exception: %s" % e
traceback.print_exc()
if __name__ == "__main__":
if sys.argv < 2:
print "Usage: %s [report dir path]" % sys.argv[0]
sys.exit(1)
main(sys.argv[1])
| apache-2.0 | Python |
8c9da7ada644e806926937087d6b4b06e5b62d54 | Allow publish_trajectory to be gracefully interrupted without killing kernel | openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica,openhumanoids/exotica | exotica_python/src/pyexotica/publish_trajectory.py | exotica_python/src/pyexotica/publish_trajectory.py | from time import sleep
import matplotlib.pyplot as plt
import signal
def sigIntHandler(signal, frame):
raise KeyboardInterrupt
def publishPose(q, problem, t=0.0):
problem.getScene().Update(q, t)
problem.getScene().getSolver().publishFrames()
def publishTrajectory(traj, T, problem):
signal.signal(signal.SIGINT, sigIntHandler)
print('Playing back trajectory '+str(T)+'s')
dt = float(T)/float(len(traj))
t = 0
while True:
try:
publishPose(traj[t], problem, float(t)*dt)
sleep(dt)
t = (t+1) % len(traj)
except KeyboardInterrupt:
break
def publishTimeIndexedTrajectory(traj, Ts, problem, once=False):
signal.signal(signal.SIGINT, sigIntHandler)
print('Playing back trajectory '+str(len(Ts)) +
' states in '+str(Ts[len(Ts)-1]))
idx = 0
while True:
try:
for i in range(1, len(Ts)-1):
publishPose(traj[i], problem, Ts[i])
sleep(Ts[i]-Ts[i-1])
if once:
break
except KeyboardInterrupt:
break
def plot(solution):
print('Plotting the solution')
plt.plot(solution, '.-')
plt.show()
| #!/usr/bin/env python
from time import sleep
import matplotlib.pyplot as plt
import signal
trajectoryPlaybackIsShutdown = False
def sigIntHandler(signal, frame):
global trajectoryPlaybackIsShutdown
trajectoryPlaybackIsShutdown = True
raise KeyboardInterrupt
def is_shutdown():
signal.signal(signal.SIGINT, sigIntHandler)
global trajectoryPlaybackIsShutdown
return trajectoryPlaybackIsShutdown
def publishPose(q, problem, t=0.0):
problem.getScene().Update(q, t)
problem.getScene().getSolver().publishFrames()
def publishTrajectory(traj, T, problem):
print('Playing back trajectory '+str(T)+'s')
dt = float(T)/float(len(traj))
t=0
while not is_shutdown():
publishPose(traj[t], problem, float(t)*dt)
sleep(dt)
t=(t+1)%len(traj)
def publishTimeIndexedTrajectory(traj, Ts, problem, once=False):
print('Playing back trajectory '+str(len(Ts))+' states in '+str(Ts[len(Ts)-1]))
idx=0
while not is_shutdown():
for i in range(1, len(Ts)-1):
if not is_shutdown():
publishPose(traj[i], problem, Ts[i])
sleep(Ts[i]-Ts[i-1])
if once:
break
def plot(solution):
print('Plotting the solution')
plt.plot(solution,'.-')
plt.show()
| bsd-3-clause | Python |
f92d054a6ea8db18ef225b75af34aafb045d3492 | Add DOCUMENTS_URL to config | alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,mtekel/digitalmarketplace-admin-frontend,alphagov/digitalmarketplace-admin-frontend | config.py | config.py | import os
import jinja2
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = True
S3_DOCUMENT_BUCKET = os.getenv('DM_S3_DOCUMENT_BUCKET')
DOCUMENTS_URL = 'https://assets.dev.digitalmarketplace.service.gov.uk'
API_URL = os.getenv('DM_API_URL')
API_AUTH_TOKEN = os.getenv('DM_ADMIN_FRONTEND_API_AUTH_TOKEN')
BASE_TEMPLATE_DATA = {}
SECRET_KEY = os.getenv('DM_ADMIN_FRONTEND_COOKIE_SECRET')
PASSWORD_HASH = os.getenv('DM_ADMIN_FRONTEND_PASSWORD_HASH')
@staticmethod
def init_app(app):
repo_root = os.path.abspath(os.path.dirname(__file__))
template_folders = [
os.path.join(repo_root,
'bower_components/govuk_template/views/layouts'),
os.path.join(repo_root, 'app/templates')
]
jinja_loader = jinja2.FileSystemLoader(template_folders)
app.jinja_loader = jinja_loader
class Test(Config):
DEBUG = True
AUTHENTICATION = True
DOCUMENTS_URL = 'https://assets.test.digitalmarketplace.service.gov.uk'
SECRET_KEY = "test_secret"
PASSWORD_HASH = "JHA1azIkMjcxMCQwYmZiN2Y5YmJlZmI0YTg4YmNkZjQ1ODY0NWUzOGEwNCRoeDBwbUpHZVhSalREUFBGREFydmJQWnlFYnhWU1g1ag==" # noqa
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
class Development(Config):
DEBUG = True
AUTHENTICATION = True
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
class Live(Config):
DEBUG = False
AUTHENTICATION = True
DOCUMENTS_URL = 'https://assets.digitalmarketplace.service.gov.uk'
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
config = {
'live': Live,
'development': Development,
'test': Test,
'default': Development
}
| import os
import jinja2
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = True
S3_DOCUMENT_BUCKET = os.getenv('DM_S3_DOCUMENT_BUCKET')
API_URL = os.getenv('DM_API_URL')
API_AUTH_TOKEN = os.getenv('DM_ADMIN_FRONTEND_API_AUTH_TOKEN')
BASE_TEMPLATE_DATA = {}
SECRET_KEY = os.getenv('DM_ADMIN_FRONTEND_COOKIE_SECRET')
PASSWORD_HASH = os.getenv('DM_ADMIN_FRONTEND_PASSWORD_HASH')
@staticmethod
def init_app(app):
repo_root = os.path.abspath(os.path.dirname(__file__))
template_folders = [
os.path.join(repo_root,
'bower_components/govuk_template/views/layouts'),
os.path.join(repo_root, 'app/templates')
]
jinja_loader = jinja2.FileSystemLoader(template_folders)
app.jinja_loader = jinja_loader
class Test(Config):
DEBUG = True
AUTHENTICATION = True
SECRET_KEY = "test_secret"
PASSWORD_HASH = "JHA1azIkMjcxMCQwYmZiN2Y5YmJlZmI0YTg4YmNkZjQ1ODY0NWUzOGEwNCRoeDBwbUpHZVhSalREUFBGREFydmJQWnlFYnhWU1g1ag==" # noqa
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
class Development(Config):
DEBUG = True
AUTHENTICATION = True
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
class Live(Config):
DEBUG = False
AUTHENTICATION = True
BASE_TEMPLATE_DATA = {
'asset_path': '/static/',
'header_class': 'with-proposition'
}
config = {
'live': Live,
'development': Development,
'test': Test,
'default': Development
}
| mit | Python |
0cb45bbc1c7b6b5f1a2722e85159b97c8a555e0c | Update the docblock of the example | ets-labs/dependency_injector,rmk135/dependency_injector,ets-labs/python-dependency-injector,rmk135/objects | examples/providers/factory_deep_init_injections.py | examples/providers/factory_deep_init_injections.py | """`Factory` providers - building a complex object graph with deep init injections example."""
from dependency_injector import providers
class Regularizer:
def __init__(self, alpha):
self.alpha = alpha
class Loss:
def __init__(self, regularizer):
self.regularizer = regularizer
class ClassificationTask:
def __init__(self, loss):
self.loss = loss
class Algorithm:
def __init__(self, task):
self.task = task
algorithm_factory = providers.Factory(
Algorithm,
task=providers.Factory(
ClassificationTask,
loss=providers.Factory(
Loss,
regularizer=providers.Factory(
Regularizer,
),
),
),
)
if __name__ == '__main__':
algorithm_1 = algorithm_factory(task__loss__regularizer__alpha=0.5)
assert algorithm_1.task.loss.regularizer.alpha == 0.5
algorithm_2 = algorithm_factory(task__loss__regularizer__alpha=0.7)
assert algorithm_2.task.loss.regularizer.alpha == 0.7
algorithm_3 = algorithm_factory(task__loss__regularizer=Regularizer(alpha=0.8))
assert algorithm_3.task.loss.regularizer.alpha == 0.8
| """`Factory` providers deep init injections example."""
from dependency_injector import providers
class Regularizer:
def __init__(self, alpha):
self.alpha = alpha
class Loss:
def __init__(self, regularizer):
self.regularizer = regularizer
class ClassificationTask:
def __init__(self, loss):
self.loss = loss
class Algorithm:
def __init__(self, task):
self.task = task
algorithm_factory = providers.Factory(
Algorithm,
task=providers.Factory(
ClassificationTask,
loss=providers.Factory(
Loss,
regularizer=providers.Factory(
Regularizer,
),
),
),
)
if __name__ == '__main__':
algorithm_1 = algorithm_factory(task__loss__regularizer__alpha=0.5)
assert algorithm_1.task.loss.regularizer.alpha == 0.5
algorithm_2 = algorithm_factory(task__loss__regularizer__alpha=0.7)
assert algorithm_2.task.loss.regularizer.alpha == 0.7
algorithm_3 = algorithm_factory(task__loss__regularizer=Regularizer(alpha=0.8))
assert algorithm_3.task.loss.regularizer.alpha == 0.8
| bsd-3-clause | Python |
b98839de990e3c4b1cd1a5730df512c2f9bc472a | add test models | LegoStormtroopr/django-spaghetti-and-meatballs,LegoStormtroopr/django-spaghetti-and-meatballs | django_spaghetti/tests/models.py | django_spaghetti/tests/models.py | from django.db import models
class PoliceOfficer(models.Model):
"""
An officer of the NYPD
"""
badge_number = models.IntegerField(max_length=10, primary_key=True)
first_name = models.CharField(max_length=200)
surname = models.CharField(max_length=200)
rank = models.CharField(max_length=200)
arrests = models.ManyToManyField("Arrest",related_name="arresting_officers")
class Precinct(PoliceStation):
number = models.IntegerField(max_length=10)
burrough = models.CharField(max_length=20)
captain = models.OneToOneField(PoliceOfficer)
officers = models.ForeignKey("PoliceOfficer",related_name="precinct")
class Meta:
unique_together = ("burrough","number")
def natural_key(self):
return (self.burrough,self.number)
class Division(PoliceStation):
name = models.CharField(max_length=200)
officers = models.ForeignKey("PoliceOfficer",related_name="division")
class Arrest(models.Model):
alleged_crime = models.CharField(max_length=20)
perp = models.ForeignKey("Perpetrator")
arrest_date = models.DateField()
processing_date = models.DateField()
class Perpetrator(models.Model):
first_name = models.CharField(max_length=200)
surname = models.CharField(max_length=200)
birth_date = models.DateField()
| from django.db import models
class PoliceOfficer(models.Model):
"""
An officer of the NYPD
"""
badge_number = models.IntegerField(max_length=10, primary_key=True)
first_name = models.CharField(max_length=200)
surname = models.CharField(max_length=200)
rank = models.CharField(max_length=200)
arrests = models.ManyToManyField("Arrest",related_name="arresting_officers")
station = models.ForeignKey("PoliceStation",related_name="officers")
class PoliceStation(models.Model):
pass
class Precinct(PoliceStation):
number = models.IntegerField(max_length=10, primary_key=True)
burrough = models.CharField(max_length=20)
captain = models.OneToOneField(PoliceOfficer)
class Meta:
unique_together = ("burrough","number")
def natural_key(self):
return (self.burrough,self.number)
class Division(PoliceStation):
name = models.CharField(max_length=200)
class Arrest(models.Model):
alleged_crime = models.CharField(max_length=20)
perp = models.ForeignKey("Perpetrator")
class Perpetrator(models.Model):
first_name = models.CharField(max_length=200)
surname = models.CharField(max_length=200)
| mit | Python |
2e69a9d7512faed13d876d4f58375fbb454e7fcb | test to get_group method use of mock | Jarsa/addons-jarsa,odoo-jarsa/addons-jarsa,odoo-jarsa/addons-jarsa,Jarsa/addons-jarsa,Jarsa-dev/addons-jarsa,Jarsa-dev/addons-jarsa | connector_cva/tests/test_cva_config_settings.py | connector_cva/tests/test_cva_config_settings.py | # -*- coding: utf-8 -*-
# © <2016> <Jarsa Sistemas, S.A. de C.V.>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp.tests.common import TransactionCase
from mock import MagicMock
from lxml import etree
class TestCvaConfigSettings(TransactionCase):
"""
This will test model cva config settings
"""
def setUp(self):
"""
Define global variables
"""
super(TestCvaConfigSettings, self).setUp()
self.cva = self.env['cva.config.settings']
def test_10_cva_config_settings_get_products(self):
cva = self.cva.create({
'name': '40762',
'allowed_groups': [(0, 0, {'name': 'AIRE ACONDICIONADO'})],
})
cva.execute()
cva.get_products()
def test_20_cva_config_settings_get_groups(self):
xml = ('<articulos><item><grupo>BACK PACK (MOCHILA)</grupo></item>'
'</articulos>')
cva = self.cva.create({
'name': '40762',
'main_location': self.env.ref('connector_cva.loc_torreon').id})
cva.execute()
cva.connect_cva = MagicMock()
cva.connect_cva.return_value = etree.XML(xml)
cva.get_groups()
| # -*- coding: utf-8 -*-
# © <2016> <Jarsa Sistemas, S.A. de C.V.>
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from openerp.tests.common import TransactionCase
class TestCvaConfigSettings(TransactionCase):
"""
This will test model cva config settings
"""
def setUp(self):
"""
Define global variables
"""
super(TestCvaConfigSettings, self).setUp()
def test_10_cva_config_settings_get_products(self):
cva_obj = self.env['cva.config.settings']
cva = cva_obj.create({
'name': '40762',
'allowed_groups': [(0, 0, {'name': 'AIRE ACONDICIONADO'})],
})
cva.execute()
cva.get_products()
| agpl-3.0 | Python |
e394d8b0550c0398d96b01ff3d49bb1e7b692908 | Bump version | thombashi/DateTimeRange | datetimerange/__version__.py | datetimerange/__version__.py | # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.6.1"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| # encoding: utf-8
from datetime import datetime
__author__ = "Tsuyoshi Hombashi"
__copyright__ = "Copyright 2016-{}, {}".format(datetime.now().year, __author__)
__license__ = "MIT License"
__version__ = "0.6.0"
__maintainer__ = __author__
__email__ = "tsuyoshi.hombashi@gmail.com"
| mit | Python |
945a90c7e1ad379b8310382ce7ceb61cf79f1e1d | make config.py output directly applicable | b3yond/ticketfrei,b3yond/ticketfrei,b3yond/ticketfrei | config.py | config.py | import pytoml as toml
import os
def load_env():
"""
load environment variables from the environment. If empty, use default
values from config.toml.example.
:return: config dictionary of dictionaries.
"""
with open('config.toml.example') as defaultconf:
configdict = toml.load(defaultconf)
try:
configdict['twitter']['consumer_key'] = os.environ['CONSUMER_KEY']
except KeyError:
pass
try:
configdict['twitter']['consumer_secret'] = os.environ['CONSUMER_SECRET']
except KeyError:
pass
try:
configdict['web']['host'] = os.environ['HOST']
except KeyError:
pass
try:
configdict['web']['port'] = os.environ['PORT']
except KeyError:
pass
try:
configdict['web']['contact'] = os.environ['CONTACT']
except KeyError:
pass
try:
configdict['mail']['mbox_user'] = os.environ['MBOX_USER']
except KeyError:
pass
try:
configdict['database']['db_path'] = os.environ['DB_PATH']
except KeyError:
pass
return configdict
# read config in TOML format (https://github.com/toml-lang/toml#toml)
try:
with open('config.toml') as configfile:
config = toml.load(configfile)
except FileNotFoundError:
config = load_env()
if __name__ == "__main__":
for category in config:
for key in config[category]:
print(key + "=" + str(config[category][key]))
| import pytoml as toml
import os
def load_env():
"""
load environment variables from the environment. If empty, use default
values from config.toml.example.
:return: config dictionary of dictionaries.
"""
with open('config.toml.example') as defaultconf:
configdict = toml.load(defaultconf)
try:
configdict['twitter']['consumer_key'] = os.environ['CONSUMER_KEY']
except KeyError:
pass
try:
configdict['twitter']['consumer_secret'] = os.environ['CONSUMER_SECRET']
except KeyError:
pass
try:
configdict['web']['host'] = os.environ['HOST']
except KeyError:
pass
try:
configdict['web']['port'] = os.environ['PORT']
except KeyError:
pass
try:
configdict['web']['contact'] = os.environ['CONTACT']
except KeyError:
pass
try:
configdict['mail']['mbox_user'] = os.environ['MBOX_USER']
except KeyError:
pass
try:
configdict['database']['db_path'] = os.environ['DB_PATH']
except KeyError:
pass
return configdict
# read config in TOML format (https://github.com/toml-lang/toml#toml)
try:
with open('config.toml') as configfile:
config = toml.load(configfile)
except FileNotFoundError:
config = load_env()
if __name__ == "__main__":
for category in config:
for key in config[category]:
print(key + " = " + str(config[category][key]))
| isc | Python |
83d0f621daa538524f381408081a86fcd7412573 | Improve YAML examples - windows/win_iis_webapplication.py (#19402) | thaim/ansible,thaim/ansible | lib/ansible/modules/windows/win_iis_webapplication.py | lib/ansible/modules/windows/win_iis_webapplication.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Henrik Wallström <henrik@wallstroms.nu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: win_iis_webapplication
version_added: "2.0"
short_description: Configures a IIS Web application.
description:
- Creates, Removes and configures a IIS Web applications
options:
name:
description:
- Name of the Web applicatio
required: true
default: null
aliases: []
site:
description:
- Name of the site on which the application is created.
required: true
default: null
aliases: []
state:
description:
- State of the web application
choices:
- present
- absent
required: false
default: null
aliases: []
physical_path:
description:
- The physical path on the remote host to use for the new applicatiojn. The specified folder must already exist.
required: false
default: null
aliases: []
application_pool:
description:
- The application pool in which the new site executes.
required: false
default: null
aliases: []
author: Henrik Wallström
'''
EXAMPLES = r'''
- name: Add ACME webapplication on IIS
win_iis_webapplication:
name: api
site: acme
state: present
physical_path: C:\apps\acme\api
'''
| #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Henrik Wallström <henrik@wallstroms.nu>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: win_iis_webapplication
version_added: "2.0"
short_description: Configures a IIS Web application.
description:
- Creates, Removes and configures a IIS Web applications
options:
name:
description:
- Name of the Web applicatio
required: true
default: null
aliases: []
site:
description:
- Name of the site on which the application is created.
required: true
default: null
aliases: []
state:
description:
- State of the web application
choices:
- present
- absent
required: false
default: null
aliases: []
physical_path:
description:
- The physical path on the remote host to use for the new applicatiojn. The specified folder must already exist.
required: false
default: null
aliases: []
application_pool:
description:
- The application pool in which the new site executes.
required: false
default: null
aliases: []
author: Henrik Wallström
'''
EXAMPLES = '''
$ ansible -i hosts -m win_iis_webapplication -a "name=api site=acme physical_path=c:\\apps\\acme\\api" host
'''
| mit | Python |
913ecdd419943af603f4556e4f44c5b59df17462 | Improve plan filters | globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service | dbaas/physical/admin/plan.py | dbaas/physical/admin/plan.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from django_services import admin as services_admin
from ..service.plan import PlanService
from ..models import PlanAttribute
from dbaas_cloudstack.models import PlanAttr
from dbaas_nfsaas.models import PlanAttr as PlanAttrNfsaas
from dbaas_dnsapi.models import PlanAttr as PlanAttrDNSAPI
from .. import forms
class PlanAttributeInline(admin.TabularInline):
model = PlanAttribute
formset = forms.PlanAttributeInlineFormset
class PlanAttrInline(admin.StackedInline):
model = PlanAttr
max_num = 1
template = 'admin/physical/shared/inline_form.html'
def has_delete_permission(self, request, obj=None):
return False
class PlanAttrNfsaasInline(admin.StackedInline):
model = PlanAttrNfsaas
max_num = 1
template = 'admin/physical/shared/inline_form.html'
def has_delete_permission(self, request, obj=None):
return False
class PlanAttrDNSAPIInline(admin.StackedInline):
model = PlanAttrDNSAPI
max_num = 1
template = 'admin/physical/shared/inline_form.html'
def has_delete_permission(self, request, obj=None):
return False
class PlanAdmin(services_admin.DjangoServicesAdmin):
form = forms.PlanForm
service_class = PlanService
save_on_top = True
search_fields = ["name"]
list_filter = ("is_active", "engine", "environments", "is_ha")
list_display = ("name", "engine", "environment",
"is_active", "is_default", "provider", "is_ha")
filter_horizontal = ("environments",)
inlines = [
PlanAttributeInline,
PlanAttrInline,
PlanAttrNfsaasInline,
PlanAttrDNSAPIInline,
]
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.contrib import admin
from django_services import admin as services_admin
from ..service.plan import PlanService
from ..models import PlanAttribute
from dbaas_cloudstack.models import PlanAttr
from dbaas_nfsaas.models import PlanAttr as PlanAttrNfsaas
from dbaas_dnsapi.models import PlanAttr as PlanAttrDNSAPI
from .. import forms
class PlanAttributeInline(admin.TabularInline):
model = PlanAttribute
formset = forms.PlanAttributeInlineFormset
class PlanAttrInline(admin.StackedInline):
model = PlanAttr
max_num = 1
template = 'admin/physical/shared/inline_form.html'
def has_delete_permission(self, request, obj=None):
return False
class PlanAttrNfsaasInline(admin.StackedInline):
model = PlanAttrNfsaas
max_num = 1
template = 'admin/physical/shared/inline_form.html'
def has_delete_permission(self, request, obj=None):
return False
class PlanAttrDNSAPIInline(admin.StackedInline):
model = PlanAttrDNSAPI
max_num = 1
template = 'admin/physical/shared/inline_form.html'
def has_delete_permission(self, request, obj=None):
return False
class PlanAdmin(services_admin.DjangoServicesAdmin):
form = forms.PlanForm
service_class = PlanService
save_on_top = True
search_fields = ["name"]
list_filter = ("is_active", )
list_display = ("name", "engine", "environment",
"is_active", "is_default", "provider", "is_ha")
filter_horizontal = ("environments",)
inlines = [
PlanAttributeInline,
PlanAttrInline,
PlanAttrNfsaasInline,
PlanAttrDNSAPIInline,
]
| bsd-3-clause | Python |
a4603e1233aaf816d68c10376799679ae36b4e42 | use string format to generate cache key | jairhenrique/django-elephant | elephant/keys.py | elephant/keys.py | # -*- coding: utf-8 -*-
from collections import OrderedDict
import inspect
def _namespace(obj):
module = obj.__module__ or __name__
if hasattr(obj, '__qualname__'):
name = obj.__qualname__
return '.'.join((module, name))
klass = getattr(obj, '__self__', None)
if klass and not inspect.isclass(klass):
klass = klass.__class__
if not klass:
klass = getattr(obj, 'im_class', None)
if klass:
name = '{}.{}'.format(
klass.__name__,
obj.__name__
)
else:
name = obj.__name__
return '.'.join((module, name))
def generic(obj, *args, **kwargs):
_args = None
_kwargs = None
generic_key = _namespace(obj)
if args:
_args = '.'.join(map(str, args))
generic_key = '{}.{}'.format(
generic_key,
_args
)
if kwargs:
kwargs = OrderedDict(sorted(kwargs.items(), key=lambda t: t[0]))
_kwargs = '.'.join(
['_'.join(map(str, item)) for item in kwargs.items()]
)
generic_key = '{}.{}'.format(
generic_key,
_kwargs
)
return generic_key
| # -*- coding: utf-8 -*-
from collections import OrderedDict
import inspect
def _namespace(obj):
module = obj.__module__ or __name__
if hasattr(obj, '__qualname__'):
name = obj.__qualname__
return '.'.join((module, name))
klass = getattr(obj, '__self__', None)
if klass and not inspect.isclass(klass):
klass = klass.__class__
if not klass:
klass = getattr(obj, 'im_class', None)
if klass:
name = klass.__name__ + '.' + obj.__name__
else:
name = obj.__name__
return '.'.join((module, name))
def generic(obj, *args, **kwargs):
_args = None
_kwargs = None
generic_key = _namespace(obj)
if args:
_args = '.'.join(map(str, args))
generic_key = generic_key + '.' + _args
if kwargs:
kwargs = OrderedDict(sorted(kwargs.items(), key=lambda t: t[0]))
_kwargs = '.'.join(
['_'.join(map(str, item)) for item in kwargs.items()]
)
generic_key = generic_key + '.' + _kwargs
return generic_key
| bsd-2-clause | Python |
465c36fddb0727622a901ead3a0c936aa13b56ce | Disable CSRF_TOKEN in config.py | lasa/website,lasa/website,lasa/website | config.py | config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
WTF_CSRF_ENABLED = False
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
| import os
basedir = os.path.abspath(os.path.dirname(__file__))
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, 'app.db')
SQLALCHEMY_MIGRATE_REPO = os.path.join(basedir, 'db_repository')
| agpl-3.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.