commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
1af1a7acface58cd8a6df5671d83b0a1a3ad4f3e | Set the tiddlywebwiki binary limit to 1MB. | FND/tiddlyspace,FND/tiddlyspace,TiddlySpace/tiddlyspace,FND/tiddlyspace,TiddlySpace/tiddlyspace,TiddlySpace/tiddlyspace | tiddlywebplugins/tiddlyspace/config.py | tiddlywebplugins/tiddlyspace/config.py | """
Base configuration for TiddlySpace.
This provides the basics which may be changed in tidlywebconfig.py.
"""
from tiddlywebplugins.instancer.util import get_tiddler_locations
from tiddlywebplugins.tiddlyspace.instance import store_contents
PACKAGE_NAME = 'tiddlywebplugins.tiddlyspace'
config = {
'instance_tiddlers': get_tiddler_locations(store_contents, PACKAGE_NAME),
'atom.default_filter': 'select=tag:!excludeLists;sort=-modified;limit=20',
'auth_systems': ['cookie_form', 'tiddlywebplugins.tiddlyspace.openid'],
'bag_create_policy': 'ANY',
'recipe_create_policy': 'ANY',
'css_uri': '/bags/common/tiddlers/tiddlyweb.css',
'socialusers.reserved_names': ['www', 'about', 'help', 'announcements',
'dev', 'info', 'api', 'status', 'login', 'frontpage'],
'cookie_age': '2592000', # 1 month
'server_store': ['tiddlywebplugins.mysql', {
'db_config': 'mysql:///tiddlyspace?charset=utf8&use_unicode=0'}],
'indexer': 'tiddlywebplugins.mysql',
'tiddlywebwiki.binary_limit': 1048576, # 1MB
}
| """
Base configuration for TiddlySpace.
This provides the basics which may be changed in tidlywebconfig.py.
"""
from tiddlywebplugins.instancer.util import get_tiddler_locations
from tiddlywebplugins.tiddlyspace.instance import store_contents
PACKAGE_NAME = 'tiddlywebplugins.tiddlyspace'
config = {
'instance_tiddlers': get_tiddler_locations(store_contents, PACKAGE_NAME),
'atom.default_filter': 'select=tag:!excludeLists;sort=-modified;limit=20',
'auth_systems': ['cookie_form', 'tiddlywebplugins.tiddlyspace.openid'],
'bag_create_policy': 'ANY',
'recipe_create_policy': 'ANY',
'css_uri': '/bags/common/tiddlers/tiddlyweb.css',
'socialusers.reserved_names': ['www', 'about', 'help', 'announcements',
'dev', 'info', 'api', 'status', 'login', 'frontpage'],
'cookie_age': '2592000', # 1 month
'server_store': ['tiddlywebplugins.mysql', {
'db_config': 'mysql:///tiddlyspace?charset=utf8&use_unicode=0'}],
'indexer': 'tiddlywebplugins.mysql',
}
| bsd-3-clause | Python |
d3591104c0300216bf4c91c23e7befb7d152086a | Update hoomd/pytest/test_dcd.py | joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue | hoomd/pytest/test_dcd.py | hoomd/pytest/test_dcd.py | import hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.run(10)
def test_set_period(tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
with pytest.raises(RuntimeError):
dcd_dump.set_period(1)
def test_enabled(tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
dcd_dump.enabled = False
with pytest.raises(RuntimeError):
dcd_dump.enable()
def test_write(simulation_factory, two_particle_snapshot_factory, tmp_path):
garnett = pytest.importorskip("garnett")
dcd_reader = garnett.reader.DCDFileReader()
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
snap = sim.state.snapshot
positions = []
for i in range(10):
snap = sim.state.snapshot
position1 = np.asarray(snap.particles.position[0])
position2 = np.asarray(snap.particles.position[1])
position1 += 0.1 * i * (-1)**i
position2 += 0.1 * (i + 1) * (-1)**(i - 1)
if snap.exists:
snap.particles.position[0] = position1
snap.particles.position[1] = position2
sim.state.snapshot = snap
sim.run(1)
positions.append([list(position1), list(position2)])
with open(filename, 'rb') as dcdfile:
traj = dcd_reader.read(dcdfile)
traj.load()
for i in range(len(traj)):
for j in [0, 1]:
np.testing.assert_allclose(traj[i].position[j], positions[i][j])
| import hoomd
import pytest
import numpy as np
def test_attach(simulation_factory, two_particle_snapshot_factory, tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
for i in range(10):
sim.run(1)
def test_set_period(tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
with pytest.raises(RuntimeError):
dcd_dump.set_period(1)
def test_enabled(tmp_path):
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
dcd_dump.enabled = False
with pytest.raises(RuntimeError):
dcd_dump.enable()
def test_write(simulation_factory, two_particle_snapshot_factory, tmp_path):
garnett = pytest.importorskip("garnett")
dcd_reader = garnett.reader.DCDFileReader()
d = tmp_path / "sub"
d.mkdir()
filename = d / "temporary_test_file.dcd"
sim = simulation_factory(two_particle_snapshot_factory())
dcd_dump = hoomd.write.DCD(filename, hoomd.trigger.Periodic(1))
sim.operations.add(dcd_dump)
sim.operations._schedule()
snap = sim.state.snapshot
positions = []
for i in range(10):
snap = sim.state.snapshot
position1 = np.asarray(snap.particles.position[0])
position2 = np.asarray(snap.particles.position[1])
position1 += 0.1 * i * (-1)**i
position2 += 0.1 * (i + 1) * (-1)**(i - 1)
if snap.exists:
snap.particles.position[0] = position1
snap.particles.position[1] = position2
sim.state.snapshot = snap
sim.run(1)
positions.append([list(position1), list(position2)])
with open(filename, 'rb') as dcdfile:
traj = dcd_reader.read(dcdfile)
traj.load()
for i in range(len(traj)):
for j in [0, 1]:
np.testing.assert_allclose(traj[i].position[j], positions[i][j])
| bsd-3-clause | Python |
17e14c82c84dda4cbe6ad76056c53bbce918da55 | use super | SiLab-Bonn/basil,MarcoVogt/basil,SiLab-Bonn/basil | host/TL/TransferLayer.py | host/TL/TransferLayer.py | #
# ------------------------------------------------------------
# Copyright (c) SILAB , Physics Institute of Bonn University
# ------------------------------------------------------------
#
# SVN revision information:
# $Rev:: $:
# $Author:: $:
# $Date:: $:
#
from pydaq import Base
class TransferLayer(Base):
'''Transfer Layer
'''
def __init__(self, conf):
super(TransferLayer, self).__init__(conf)
def read(self, addr, size):
pass
def write(self, addr, data):
pass
def init(self):
pass
| #
# ------------------------------------------------------------
# Copyright (c) SILAB , Physics Institute of Bonn University
# ------------------------------------------------------------
#
# SVN revision information:
# $Rev:: $:
# $Author:: $:
# $Date:: $:
#
from pydaq import Base
class TransferLayer(Base):
def __init__(self, conf):
Base.__init__(self, conf)
def read(self, addr, size):
pass
def write(self, addr, data):
pass
def init(self):
pass
| bsd-3-clause | Python |
18f29b2b1a99614b09591df4a60c1670c845aa9b | Add first set of exercises. | Baumelbi/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,Baumelbi/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016,weidnem/IntroPython2016,UWPCE-PythonCert/IntroPython2016 | students/crobison/session04/dict_lab.py | students/crobison/session04/dict_lab.py | # Charles Robison
# 2016.10.18
# Dictionary and Set Lab
# Create a dictionary containing “name”, “city”, and “cake”
# for “Chris” from “Seattle” who likes “Chocolate”.
d = {'name': 'Chris', 'city': 'Seattle', 'cake': 'Chocolate'}
#Display the dictionary.
d
# Delete the entry for “cake”.
del d['cake']
# Display the dictionary.
d
# Add an entry for “fruit” with “Mango” and display the dictionary.
d['fruit'] = 'Mango'
d
# Display the dictionary keys.
d.keys()
# Display the dictionary values.
d.values()
# Display whether or not “cake” is a key in the dictionary
# (i.e. False) (now).
'cake' in d
# Display whether or not “Mango” is a value in the dictionary
# (i.e. True).
'Mango' in d.values()
# Using the dictionary from item 1: Make a dictionary using
# the same keys but with the number of ‘t’s in each value.
# Create sets s2, s3 and s4 that contain numbers from zero through
# twenty, divisible 2, 3 and 4.
# Display the sets.
# Display if s3 is a subset of s2 (False)
# and if s4 is a subset of s2 (True).
# Create a set with the letters in ‘Python’ and add ‘i’ to the set.
# Create a frozenset with the letters in ‘marathon’
# display the union and intersection of the two sets.
| # Charles Robison
# 2016.10.18
# Dictionary and Set Lab
# Create a dictionary containing “name”, “city”, and “cake”
# for “Chris” from “Seattle” who likes “Chocolate”.
d = {'name': 'Chris', 'city': 'Seattle', 'cake': 'Chocolate'}
| unlicense | Python |
1314a5b99f64f96deee423e5189381f332c4419c | add fluct_limit to signal dividend | dyno/LMK,dyno/LMK | ATRCalculator.py | ATRCalculator.py | import log
class ATRCalculator(object):
def __init__(self, atr_period, fluct_limit=0.2):
self.atr_period = atr_period
self.tr_list = []
self.last_tick = None
self.atr = None
self.fluct_limit = fluct_limit
def __call__(self, tick):
# if not self.last_tick:
# => ValueError: 'The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()'
HL = tick["High"] - tick["Low"]
if not self.last_tick is None:
HCp = abs(tick["High"] - self.last_tick["Close"])
LCp = abs(tick["Low"] - self.last_tick["Close"])
tr = max(HL, HCp, LCp)
# stock devidend
if self.fluct_limit > 0:
if tr / self.last_tick["Close"] > self.fluct_limit:
log.logger.debug("%s: %.2f(tr) / %.2f(close) = %.2f > self.fluct_limit",
repr(tick), tr, self.last_tick["Close"], tr / self.last_tick["Close"])
self.tr_list = []
tr = HL
else:
tr = HL
self.last_tick = tick.copy()
if len(self.tr_list) < self.atr_period:
if tr != 0.0:
self.tr_list.append(tr)
self.atr = sum(self.tr_list) / len(self.tr_list)
else:
#self.atr = (self.atr * (atr_period - 1) + self.tr) / atr_period
self.atr += (tr - self.atr) / self.atr_period
return self.atr
if __name__ == "__main__":
from common import probe_proxy
from stock import Stock
probe_proxy()
log.init()
# http://stockcharts.com/help/doku.php?id=chart_school:technical_indicators:average_true_range_a
# stk = Stock("QQQ")
# stk.retrieve_history(start="2010/4/1", use_cache=False, no_volume=True)
# history = stk.history
# c = ATRCalculator(atr_period=14)
# history["ATR"] = history.apply(c, axis=1)
# print history["ATR"].loc["2010-4-21":]
stk = Stock("300027.SZ")
stk.retrieve_history(start="2013/1/1", use_cache=False, no_volume=False)
history = stk.history
c = ATRCalculator(atr_period=14, fluct_limit=0.2)
history["ATR"] = history.apply(c, axis=1)
print history["ATR"]
| class ATRCalculator(object):
def __init__(self, atr_period):
self.atr_period = atr_period
self.tr_list = []
self.last_tick = None
self.atr = None
def __call__(self, tick):
# if not self.last_tick:
# => ValueError: 'The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()'
if not self.last_tick is None:
HL = tick["High"] - tick["Low"]
HCp = abs(tick["High"] - self.last_tick["Close"])
LCp = abs(tick["Low"] - self.last_tick["Close"])
tr = max(HL, HCp, LCp)
else:
tr = tick["High"] - tick["Low"]
self.last_tick = tick.copy()
if len(self.tr_list) < self.atr_period:
if tr != 0.0:
self.tr_list.append(tr)
self.atr = sum(self.tr_list) / len(self.tr_list)
else:
#self.atr = (self.atr * (atr_period - 1) + self.tr) / atr_period
self.atr += (tr - self.atr) / self.atr_period
return self.atr
if __name__ == "__main__":
from pandas.io.data import DataReader
# http://stockcharts.com/help/doku.php?id=chart_school:technical_indicators:average_true_range_a
history = DataReader("QQQ", "yahoo", start="2010/4/1", end="2010/5/13")
c = ATRCalculator(atr_period=14)
history["ATR"] = history.apply(c, axis=1)
print history["ATR"].loc["2010-4-21":]
| mit | Python |
c9b6387702baee3da0a3bca8b302b619e69893f7 | Customize a ChunkList just for IFF chunks | gulopine/steel | steel/chunks/iff.py | steel/chunks/iff.py | import collections
import io
from steel.fields.numbers import BigEndian
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'ChunkList', 'Form']
class Chunk(base.Chunk):
id = fields.String(size=4, encoding='ascii')
size = fields.Integer(size=4, endianness=BigEndian)
payload = base.Payload(size=size)
class ChunkList(base.ChunkList):
def __init__(self, *args, **kwargs):
# Just a simple override to default to a list of IFF chunks
return super(ChunkList, self).__init__(Chunk, *args, **kwargs)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
| import collections
import io
from steel.fields.numbers import BigEndian
from steel import fields
from steel.chunks import base
__all__ = ['Chunk', 'Form']
class Chunk(base.Chunk):
id = fields.String(size=4, encoding='ascii')
size = fields.Integer(size=4, endianness=BigEndian)
payload = base.Payload(size=size)
class Form(base.Chunk, encoding='ascii'):
tag = fields.FixedString('FORM')
size = fields.Integer(size=4, endianness=BigEndian)
id = fields.String(size=4)
payload = base.Payload(size=size)
| bsd-3-clause | Python |
2c991dd13e0d8b5242e3e46cb4a782074ad46bed | Remove commented code | knyghty/strapmin,knyghty/strapmin,knyghty/strapmin | strapmin/widgets.py | strapmin/widgets.py | from django import forms
from django.forms.util import flatatt
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
class RichTextEditorWidget(forms.Textarea):
class Media:
js = ('admin/js/ckeditor/ckeditor.js',
'admin/js/ckeditor/jquery-ckeditor.js')
def render(self, name, value, attrs={}):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('ckeditor/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': force_text(value),
'id': final_attrs['id'],
}))
| from django import forms
from django.forms.util import flatatt
from django.template.loader import render_to_string
from django.utils.encoding import force_text
from django.utils.safestring import mark_safe
class RichTextEditorWidget(forms.Textarea):
#def __init__(self, *args, **kwargs):
# kwargs['attrs'] = {'class': 'ckeditor'}
# super(RichTextEditorWidget, self).__init__(*args, **kwargs)
def render(self, name, value, attrs={}):
if value is None:
value = ''
final_attrs = self.build_attrs(attrs, name=name)
return mark_safe(render_to_string('ckeditor/widget.html', {
'final_attrs': flatatt(final_attrs),
'value': force_text(value),
'id': final_attrs['id'],
}))
class Media:
js = ('admin/js/ckeditor/ckeditor.js',
'admin/js/ckeditor/jquery-ckeditor.js')
| bsd-2-clause | Python |
3e0e79bbed4b1a8854c90163ea77603beb1f0742 | Switch heroku to english | AmatanHead/collective-blog,AmatanHead/collective-blog,AmatanHead/collective-blog,AmatanHead/collective-blog | collective_blog/settings/prod_settings.py | collective_blog/settings/prod_settings.py | """Production settings
See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
"""
from __future__ import unicode_literals
from django.utils.translation import ugettext_lazy as _
import dj_database_url
print('\033[00;32mLoading production settings\033[0;00m')
DEBUG = False
from .settings import *
ADMINS = (('Zelta', 'dev.zelta@gmail.com'), )
MANAGERS = ()
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
# Use sqlite for heroku app
'default': dj_database_url.config(),
}
# Hosts setup
# https://docs.djangoproject.com/en/1.9/ref/settings/#std:setting-ALLOWED_HOSTS
ALLOWED_HOSTS = [
'collective-blog-hse-project.herokuapp.com',
'www.collective-blog-hse-project.herokuapp.com',
]
# Email
DEFAULT_FROM_EMAIL = os.getenv('DEFAULT_FROM_EMAIL', '')
EMAIL_HOST = os.getenv('EMAIL_HOST', '')
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD', '')
EMAIL_HOST_USER = os.getenv('EMAIL_HOST_USER', '')
EMAIL_PORT = 465
SERVER_EMAIL = os.getenv('SERVER_EMAIL', '')
EMAIL_SUBJECT_PREFIX = '[heroku collective blog] '
EMAIL_USE_SSL = True
if DEFAULT_FROM_EMAIL and EMAIL_HOST and EMAIL_HOST_PASSWORD and EMAIL_HOST_USER and SERVER_EMAIL:
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
else:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
print('\033[01;31mWarning: mail server config is missing!\033[0;00m')
print('\033[01;31mWarning: using console mail backend!\033[0;00m')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
INSTALLED_APPS += [
'gunicorn',
]
STATIC_ROOT = os.path.join(BASE_DIR, 'collective_blog', 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'collective_blog', 'static'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
LANGUAGES = [
# ('ru', _('Russian')),
('en', _('English')),
]
LANGUAGE_CODE = 'en'
| """Production settings
See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
"""
from __future__ import unicode_literals
import dj_database_url
print('\033[00;32mLoading production settings\033[0;00m')
DEBUG = False
from .settings import *
ADMINS = (('Zelta', 'dev.zelta@gmail.com'), )
MANAGERS = ()
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
# Use sqlite for heroku app
'default': dj_database_url.config(),
}
# Hosts setup
# https://docs.djangoproject.com/en/1.9/ref/settings/#std:setting-ALLOWED_HOSTS
ALLOWED_HOSTS = [
'collective-blog-hse-project.herokuapp.com',
'www.collective-blog-hse-project.herokuapp.com',
]
# Email
DEFAULT_FROM_EMAIL = os.getenv('DEFAULT_FROM_EMAIL', '')
EMAIL_HOST = os.getenv('EMAIL_HOST', '')
EMAIL_HOST_PASSWORD = os.getenv('EMAIL_HOST_PASSWORD', '')
EMAIL_HOST_USER = os.getenv('EMAIL_HOST_USER', '')
EMAIL_PORT = 465
SERVER_EMAIL = os.getenv('SERVER_EMAIL', '')
EMAIL_SUBJECT_PREFIX = '[heroku collective blog] '
EMAIL_USE_SSL = True
if DEFAULT_FROM_EMAIL and EMAIL_HOST and EMAIL_HOST_PASSWORD and EMAIL_HOST_USER and SERVER_EMAIL:
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
else:
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
print('\033[01;31mWarning: mail server config is missing!\033[0;00m')
print('\033[01;31mWarning: using console mail backend!\033[0;00m')
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
INSTALLED_APPS += [
'gunicorn',
]
STATIC_ROOT = os.path.join(BASE_DIR, 'collective_blog', 'staticfiles')
STATIC_URL = '/static/'
# Extra places for collectstatic to find static files.
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'collective_blog', 'static'),
)
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
| mit | Python |
3ed854140723ee7f0527ba15d9cfe7bba8bbc6e6 | Make the perl wrappers work with python 3 | dnanexus/dx-toolkit,dnanexus/dx-toolkit,dnanexus/dx-toolkit,dnanexus/dx-toolkit,dnanexus/dx-toolkit,dnanexus/dx-toolkit,dnanexus/dx-toolkit,dnanexus/dx-toolkit | contrib/perl/generatePerlAPIWrappers.py | contrib/perl/generatePerlAPIWrappers.py | #!/usr/bin/env python
#
# Copyright (C) 2013-2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys, json
preamble = '''# Do not modify this file by hand.
#
# It is automatically generated by src/api_wrappers/generatePerlAPIWrappers.py.
# (Run make api_wrappers to update it.)
package DNAnexus::API;
use strict;
use Exporter;
use DNAnexus qw(DXHTTPRequest);
'''
postscript = '''
our @ISA = "Exporter";
our @EXPORT_OK = qw({all_method_names});
'''
class_method_template = '''
sub {method_name}(;$%) {{
my ($input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return DXHTTPRequest('{route}', $input_params, %kwargs);
}}
'''
object_method_template = '''
sub {method_name}($;$%) {{
my ($object_id, $input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return DXHTTPRequest('/'.$object_id.'/{method_route}', $input_params, %kwargs);
}}
'''
app_object_method_template = '''
sub {method_name}($;$%) {{
my ($app_id_or_name, $input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return DXHTTPRequest('/'.$app_id_or_name.'/{method_route}', $input_params, %kwargs);
}}
sub {method_name}WithAlias($;$%) {{
my ($app_name, $app_alias, $input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return {method_name}($app_name.'/'.$app_alias, $input_params, %kwargs);
}}
'''
print(preamble)
all_method_names = []
for method in json.loads(sys.stdin.read()):
route, signature, opts = method
method_name = signature.split("(")[0]
if (opts['objectMethod']):
root, oid_route, method_route = route.split("/")
if oid_route == 'app-xxxx':
print(app_object_method_template.format(method_name=method_name, method_route=method_route))
else:
print(object_method_template.format(method_name=method_name, method_route=method_route))
else:
print(class_method_template.format(method_name=method_name, route=route))
all_method_names.append(method_name)
print(postscript.format(all_method_names=" ".join(all_method_names)))
| #!/usr/bin/env python2.7
#
# Copyright (C) 2013-2016 DNAnexus, Inc.
#
# This file is part of dx-toolkit (DNAnexus platform client libraries).
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys, json
preamble = '''# Do not modify this file by hand.
#
# It is automatically generated by src/api_wrappers/generatePerlAPIWrappers.py.
# (Run make api_wrappers to update it.)
package DNAnexus::API;
use strict;
use Exporter;
use DNAnexus qw(DXHTTPRequest);
'''
postscript = '''
our @ISA = "Exporter";
our @EXPORT_OK = qw({all_method_names});
'''
class_method_template = '''
sub {method_name}(;$%) {{
my ($input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return DXHTTPRequest('{route}', $input_params, %kwargs);
}}
'''
object_method_template = '''
sub {method_name}($;$%) {{
my ($object_id, $input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return DXHTTPRequest('/'.$object_id.'/{method_route}', $input_params, %kwargs);
}}
'''
app_object_method_template = '''
sub {method_name}($;$%) {{
my ($app_id_or_name, $input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return DXHTTPRequest('/'.$app_id_or_name.'/{method_route}', $input_params, %kwargs);
}}
sub {method_name}WithAlias($;$%) {{
my ($app_name, $app_alias, $input_params, %kwargs) = @_;
%kwargs = () unless %kwargs;
return {method_name}($app_name.'/'.$app_alias, $input_params, %kwargs);
}}
'''
print preamble
all_method_names = []
for method in json.loads(sys.stdin.read()):
route, signature, opts = method
method_name = signature.split("(")[0]
if (opts['objectMethod']):
root, oid_route, method_route = route.split("/")
if oid_route == 'app-xxxx':
print app_object_method_template.format(method_name=method_name, method_route=method_route)
else:
print object_method_template.format(method_name=method_name, method_route=method_route)
else:
print class_method_template.format(method_name=method_name, route=route)
all_method_names.append(method_name)
print postscript.format(all_method_names=" ".join(all_method_names))
| apache-2.0 | Python |
f7d4a3df11a67e3ae679b4c8f25780538c4c3c32 | Use the newer PostUpdate instead of PostMedia | osamak/student-portal,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,osamak/student-portal,osamak/student-portal,enjaz/enjaz | core/management/commands/send_tweets.py | core/management/commands/send_tweets.py | import twitter
from django.core.management.base import BaseCommand
from django.conf import settings
from core.models import Tweet
class Command(BaseCommand):
help = "Send out tweets."
def handle(self, *args, **options):
for tweet in Tweet.objects.filter(was_sent=False, failed_trails__lte=5):
user_tokens = tweet.user.social_auth.all()[0].tokens
api = twitter.Api(consumer_key=settings.SOCIAL_AUTH_TWITTER_KEY,
consumer_secret=settings.SOCIAL_AUTH_TWITTER_SECRET,
access_token_key=user_tokens['oauth_token'],
access_token_secret=user_tokens['oauth_token_secret'],)
try:
if tweet.media_path:
status = api.PostUpdate(tweet.text, media=tweet.media_path)
else:
status = api.PostUpdate(tweet.text)
except twitter.TwitterError, e:
print "Something went wrong with #{}: ".format(tweet.pk), e
tweet.failed_trails += 1
tweet.save()
continue
tweet.tweet_id = status.id
tweet.was_sent = True
tweet.save()
| import twitter
from django.core.management.base import BaseCommand
from django.conf import settings
from core.models import Tweet
class Command(BaseCommand):
help = "Send out tweets."
def handle(self, *args, **options):
for tweet in Tweet.objects.filter(was_sent=False, failed_trails__lte=5):
user_tokens = tweet.user.social_auth.all()[0].tokens
api = twitter.Api(consumer_key=settings.SOCIAL_AUTH_TWITTER_KEY,
consumer_secret=settings.SOCIAL_AUTH_TWITTER_SECRET,
access_token_key=user_tokens['oauth_token'],
access_token_secret=user_tokens['oauth_token_secret'],)
try:
if tweet.media_path:
status = api.PostMedia(tweet.text, tweet.media_path)
else:
status = api.PostUpdate(tweet.text)
except twitter.TwitterError, e:
print "Something went wrong with #{}: ".format(tweet.pk), e
tweet.failed_trails += 1
tweet.save()
continue
tweet.tweet_id = status.id
tweet.was_sent = True
tweet.save()
| agpl-3.0 | Python |
16bada3156354ef4d41505b37e31be054c949d93 | Add descripiton to policies in virtual_interfaces.py | jianghuaw/nova,jianghuaw/nova,jianghuaw/nova,vmturbo/nova,rahulunair/nova,gooddata/openstack-nova,vmturbo/nova,mikalstill/nova,jianghuaw/nova,gooddata/openstack-nova,rajalokan/nova,rajalokan/nova,Juniper/nova,rajalokan/nova,vmturbo/nova,phenoxim/nova,gooddata/openstack-nova,openstack/nova,mahak/nova,openstack/nova,Juniper/nova,phenoxim/nova,Juniper/nova,mahak/nova,vmturbo/nova,openstack/nova,rahulunair/nova,Juniper/nova,mikalstill/nova,mikalstill/nova,klmitch/nova,klmitch/nova,mahak/nova,gooddata/openstack-nova,rajalokan/nova,klmitch/nova,rahulunair/nova,klmitch/nova | nova/policies/virtual_interfaces.py | nova/policies/virtual_interfaces.py | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-virtual-interfaces'
POLICY_ROOT = 'os_compute_api:os-virtual-interfaces:%s'
virtual_interfaces_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'discoverable',
check_str=base.RULE_ANY),
base.create_rule_default(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
"""List Virtual Interfaces.
This works only with the nova-network service, which is now deprecated""",
[
{
'method': 'GET',
'path': '/servers/{server_id}/os-virtual-interfaces'
}
]),
]
def list_rules():
return virtual_interfaces_policies
| # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-virtual-interfaces'
POLICY_ROOT = 'os_compute_api:os-virtual-interfaces:%s'
virtual_interfaces_policies = [
policy.RuleDefault(
name=POLICY_ROOT % 'discoverable',
check_str=base.RULE_ANY),
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return virtual_interfaces_policies
| apache-2.0 | Python |
3c984a10d0665498f3d1d5f6abf72532cd9d58d5 | bump version | Infinidat/infi.conf,vmalloc/confetti | infi/conf/__version__.py | infi/conf/__version__.py | __version__ = "0.0.3"
| __version__ = "0.0.2"
| bsd-3-clause | Python |
7a952d605b629b9c8ef2c96c451ee4db4274d545 | Set max celery connections to 1. | SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev,SuddenDevs/SuddenDev | suddendev/config.py | suddendev/config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = os.urandom(32)
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
CELERY_BROKER_URL = os.environ['CLOUDAMQP_URL']
CELERY_RESULT_BACKEND = 'rpc'
REDIS_URL = os.environ['REDIS_URL']
CLIENT_ID = '690133088753-kk72josco183eb8smpq4dgkrqmd0eovm.apps.googleusercontent.com'
AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
USER_INFO = 'https://www.googleapis.com/userinfo/v2/me'
SCOPE = ['https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile']
CLIENT_SECRET = os.environ['CLIENT_SECRET']
REDIS_MAX_CONNECTIONS = 10
CELERY_MAX_CONNECTIONS = 1
class ProductionConfig(Config):
DEBUG = False
class StagingConfig(Config):
DEVELOPMENT = True
DEBUG = True
class DevelopmentConfig(Config):
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
TESTING = True
| import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
DEBUG = False
TESTING = False
CSRF_ENABLED = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = os.urandom(32)
SQLALCHEMY_DATABASE_URI = os.environ['DATABASE_URL']
CELERY_BROKER_URL = os.environ['CLOUDAMQP_URL']
CELERY_RESULT_BACKEND = 'rpc'
REDIS_URL = os.environ['REDIS_URL']
CLIENT_ID = '690133088753-kk72josco183eb8smpq4dgkrqmd0eovm.apps.googleusercontent.com'
AUTH_URI = 'https://accounts.google.com/o/oauth2/auth'
TOKEN_URI = 'https://accounts.google.com/o/oauth2/token'
USER_INFO = 'https://www.googleapis.com/userinfo/v2/me'
SCOPE = ['https://www.googleapis.com/auth/userinfo.email',
'https://www.googleapis.com/auth/userinfo.profile']
CLIENT_SECRET = os.environ['CLIENT_SECRET']
REDIS_MAX_CONNECTIONS = 10
CELERY_MAX_CONNECTIONS = 10
class ProductionConfig(Config):
DEBUG = False
class StagingConfig(Config):
DEVELOPMENT = True
DEBUG = True
class DevelopmentConfig(Config):
DEVELOPMENT = True
DEBUG = True
class TestingConfig(Config):
TESTING = True
| mit | Python |
2927c6bc4c4e0c975a875d7eb5aa736b6abd66cd | bump version | matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse,matrix-org/synapse | synapse/__init__.py | synapse/__init__.py | # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-9 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is a reference implementation of a Matrix home server.
"""
import sys
# Check that we're not running on an unsupported Python version.
if sys.version_info < (3, 5):
print("Synapse requires Python 3.5 or above.")
sys.exit(1)
try:
from twisted.internet import protocol
from twisted.internet.protocol import Factory
from twisted.names.dns import DNSDatagramProtocol
protocol.Factory.noisy = False
Factory.noisy = False
DNSDatagramProtocol.noisy = False
except ImportError:
pass
__version__ = "1.4.0rc1"
| # -*- coding: utf-8 -*-
# Copyright 2014-2016 OpenMarket Ltd
# Copyright 2018-9 New Vector Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" This is a reference implementation of a Matrix home server.
"""
import sys
# Check that we're not running on an unsupported Python version.
if sys.version_info < (3, 5):
print("Synapse requires Python 3.5 or above.")
sys.exit(1)
try:
from twisted.internet import protocol
from twisted.internet.protocol import Factory
from twisted.names.dns import DNSDatagramProtocol
protocol.Factory.noisy = False
Factory.noisy = False
DNSDatagramProtocol.noisy = False
except ImportError:
pass
__version__ = "1.3.1"
| apache-2.0 | Python |
5a23e9d7f412f040c12b1f48d258b83e9eeea5d3 | add crude error checking | jasonrhaas/ducking-adventure | mysite/skynet/views.py | mysite/skynet/views.py | from skynet.models import Messages
from skynet.serializers import SkynetSerializer
from rest_framework import generics
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import renderers
import sys
class SkynetList(generics.ListCreateAPIView):
queryset = Messages.objects.all()
serializer_class = SkynetSerializer
def get(self, request, format=None):
e1 = None
e2 = None
try:
city_count = Messages.objects.values_list('city', flat=True).count()
except:
e1 = sys.exc_info()[0]
try:
username_count = Messages.objects.values_list('username', flat=True).count()
except:
e2 = sys.exc_info()[0]
if e1 or e2:
errormsg = 'errors: {}, {}'.format(e1, e2)
content = {'result': 'error', 'error': errormsg}
else:
content = {'result': 'success', 'cities': city_count, 'users': username_count}
return Response(content)
| from skynet.models import Messages
from skynet.serializers import SkynetSerializer
from rest_framework import generics
from rest_framework.decorators import api_view
from rest_framework.response import Response
from rest_framework.reverse import reverse
from rest_framework import renderers
class SkynetList(generics.ListCreateAPIView):
queryset = Messages.objects.all()
serializer_class = SkynetSerializer
# @api_view(('GET',))
# def api_root(request, format=None):
# return Response({
# # 'users': reverse('user-list', request=request, format=format),
# 'messages': reverse('message-list', request=request, format=format)
# })
# def get(self, request, format=None):
# user_count = User.objects.count()
def get(self, request, format=None):
total_count = Messages.objects.count()
content = {'total_count': total_count}
city_count = Messages.objects.values_list('city', flat=True).count()
username_count = Messages.objects.values_list('username', flat=True).count()
content = {'cities': city_count, 'users': username_count}
return Response(content)
| mit | Python |
68dfaecce5d7162201d4851752df185cc2827d7f | Bump version 0.1.1 | slok/prometheus-python,slok/prometheus-python | prometheus/__init__.py | prometheus/__init__.py | __title__ = 'prometheus'
__version__ = '0.1.1'
__author__ = 'Xabier Larrakoetxea'
__license__ = 'MIT License'
__copyright__ = 'Copyright 2015 Xabier Larrakoetxea' | __title__ = 'prometheus'
__version__ = '0.1'
__author__ = 'Xabier Larrakoetxea'
__license__ = 'MIT License'
__copyright__ = 'Copyright 2015 Xabier Larrakoetxea' | mit | Python |
f393b9056169ac86276c867c5b548b91f738b890 | fix sklearn_api | stanfordmlgroup/ngboost,stanfordmlgroup/ngboost | ngboost/sklearn_api.py | ngboost/sklearn_api.py | import numpy as np
from sklearn.base import ClassifierMixin, RegressorMixin
from ngboost.ngboost import NGBoost
from ngboost.distns import Bernoulli, Normal
class NGBRegressor(NGBoost, RegressorMixin):
"""NGBoost for regression with Sklean API."""
def __init__(self, *args, **kwargs):
super(NGBRegressor, self).__init__(Dist=Normal, *args, **kwargs)
class NGBClassifier(NGBoost, ClassifierMixin):
"""NGBoost for classification with Sklean API.
Warning:
Dist need to be Bernoulli.
You can use this model for only binary classification.
"""
def __init__(self, *args, **kwargs):
super(NGBClassifier, self).__init__(Dist=Bernoulli, *args, **kwargs)
def predict(self, X):
dist = self.pred_dist(X)
return np.round(dist.prob)
| import numpy as np
from sklearn.base import ClassifierMixin, RegressorMixin
from ngboost.ngboost import NGBoost
class NGBRegressor(NGBoost, RegressorMixin):
"""NGBoost for regression with Sklean API."""
pass
class NGBClassifier(NGBoost, ClassifierMixin):
"""NGBoost for classification with Sklean API.
Warning:
Dist need to be Bernoulli.
You can use this model for only binary classification.
"""
def predict(self, X):
dist = self.pred_dist(X)
return np.round(dist.prob)
| apache-2.0 | Python |
c8f78f21a2855241c7e1eabfae6e837bd8f8c451 | Corrige migración | abertal/alpha,migonzalvar/alpha,abertal/alpha,migonzalvar/alpha,migonzalvar/alpha,abertal/alpha,abertal/alpha,migonzalvar/alpha | core/migrations/0012_membership_person.py | core/migrations/0012_membership_person.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-19 17:43
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0011_auto_20161219_1826'),
]
operations = [
migrations.AddField(
model_name='membership',
name='person',
field=models.ForeignKey(default='3c8ce552-47ba-42cc-be9a-6625117072ab', on_delete=django.db.models.deletion.CASCADE, to='core.Person'),
preserve_default=False,
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-19 17:43
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('core', '0011_auto_20161219_1826'),
]
operations = [
migrations.AddField(
model_name='membership',
name='person',
field=models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='core.Person'),
preserve_default=False,
),
]
| bsd-3-clause | Python |
f284768b9d844967c1b8d7c06422fb28a3e88176 | update the streamup.com plugin | bastimeyer/streamlink,wlerin/streamlink,back-to/streamlink,streamlink/streamlink,ethanhlc/streamlink,ethanhlc/streamlink,fishscene/streamlink,chhe/streamlink,mmetak/streamlink,gravyboat/streamlink,beardypig/streamlink,fishscene/streamlink,melmorabity/streamlink,javiercantero/streamlink,chhe/streamlink,sbstp/streamlink,bastimeyer/streamlink,melmorabity/streamlink,sbstp/streamlink,wlerin/streamlink,mmetak/streamlink,javiercantero/streamlink,streamlink/streamlink,gravyboat/streamlink,beardypig/streamlink,back-to/streamlink | src/livestreamer/plugins/streamupcom.py | src/livestreamer/plugins/streamupcom.py | import re
from livestreamer.compat import urljoin
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import RTMPStream, HLSStream
_url_re = re.compile("http(s)?://(\w+\.)?streamup.com/(?P<channel>[^/?]+)")
_hls_manifest_re = re.compile('HlsManifestUrl:\\s*"//"\\s*\\+\\s*response\\s*\\+\\s*"(.+)"')
class StreamupCom(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
res = http.get(self.url)
if not res: return
match = _hls_manifest_re.search(res.text)
url = match.group(1)
hls_url = "http://video-cdn.streamup.com{}".format(url)
return HLSStream.parse_variant_playlist(self.session, hls_url)
__plugin__ = StreamupCom
| import re
from livestreamer.compat import urljoin
from livestreamer.plugin import Plugin
from livestreamer.plugin.api import http, validate
from livestreamer.stream import RTMPStream
RTMP_URL = "rtmp://{0}/app/{1}"
CHANNEL_DETAILS_URI = "https://api.streamup.com/v1/channels/{0}?access_token={1}"
REDIRECT_SERVICE_URI = "https://lancer.streamup.com/api/redirect/{0}"
_url_re = re.compile("http(s)?://(\w+\.)?streamup.com/(?P<channel>[^/?]+)")
_flashvars_re = re.compile("flashvars\.(?P<var>\w+)\s?=\s?'(?P<value>[^']+)';")
_swf_url_re = re.compile("swfobject.embedSWF\(\s*\"(?P<player_url>[^\"]+)\",")
_schema = validate.Schema(
validate.union({
"vars": validate.all(
validate.transform(_flashvars_re.findall),
validate.transform(dict),
{
"owner": validate.text,
validate.optional("token"): validate.text
}
),
"swf": validate.all(
validate.transform(_swf_url_re.search),
validate.get("player_url"),
validate.endswith(".swf")
)
})
)
_channel_details_schema = validate.Schema({
"channel": {
"live": bool,
"slug": validate.text
}
})
class StreamupCom(Plugin):
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
def _get_streams(self):
res = http.get(self.url, schema=_schema)
if not res:
return
owner = res["vars"]["owner"]
token = res["vars"].get("token", "null")
swf_url = res["swf"]
# Check if the stream is online
res = http.get(CHANNEL_DETAILS_URI.format(owner, token))
channel_details = http.json(res, schema=_channel_details_schema)
if not channel_details["channel"]["live"]:
return
stream_ip = http.get(REDIRECT_SERVICE_URI.format(owner)).text
streams = {}
streams["live"] = RTMPStream(self.session, {
"rtmp": RTMP_URL.format(stream_ip, channel_details["channel"]["slug"]),
"pageUrl": self.url,
"swfUrl": urljoin(self.url, swf_url),
"live": True
})
return streams
__plugin__ = StreamupCom
| bsd-2-clause | Python |
7feb26a6478e3a96da57f2825821285c17651545 | Add a missing newline | scolby33/OCSPdash,scolby33/OCSPdash,scolby33/OCSPdash | src/ocspdash/web/blueprints/__init__.py | src/ocspdash/web/blueprints/__init__.py | # -*- coding: utf-8 -*-
from .api import api
from .ui import ui
__all__ = [
'api',
'ui'
]
| # -*- coding: utf-8 -*-
from .api import api
from .ui import ui
__all__ = [
'api',
'ui'
] | mit | Python |
26e915e391d8554f6e775dd962d63e565066708c | add missing HTTP headers | bastimeyer/streamlink,bastimeyer/streamlink,streamlink/streamlink,streamlink/streamlink,chhe/streamlink,chhe/streamlink | src/streamlink/plugins/goltelevision.py | src/streamlink/plugins/goltelevision.py | """
$description Spanish live TV sports channel owned by Gol Network.
$url goltelevision.com
$type live
$region Spain
"""
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
@pluginmatcher(re.compile(
r"https?://(?:www\.)?goltelevision\.com/en-directo"
))
class GOLTelevision(Plugin):
def _get_streams(self):
self.session.http.headers.update({
"Origin": "https://goltelevision.com",
"Referer": "https://goltelevision.com/",
})
url = self.session.http.get(
"https://play.goltelevision.com/api/stream/live",
schema=validate.Schema(
validate.parse_json(),
{"manifest": validate.url()},
validate.get("manifest"),
)
)
return HLSStream.parse_variant_playlist(self.session, url)
__plugin__ = GOLTelevision
| """
$description Spanish live TV sports channel owned by Gol Network.
$url goltelevision.com
$type live
$region Spain
"""
import re
from streamlink.plugin import Plugin, pluginmatcher
from streamlink.plugin.api import validate
from streamlink.stream.hls import HLSStream
@pluginmatcher(re.compile(
r"https?://(?:www\.)?goltelevision\.com/en-directo"
))
class GOLTelevision(Plugin):
def _get_streams(self):
url = self.session.http.get(
"https://play.goltelevision.com/api/stream/live",
schema=validate.Schema(
validate.parse_json(),
{"manifest": validate.url()},
validate.get("manifest")
)
)
return HLSStream.parse_variant_playlist(self.session, url)
__plugin__ = GOLTelevision
| bsd-2-clause | Python |
70a40f50e9988fadfbc42f236881c1e3e78f40f1 | Extend base settings for test settings. Don't use live cache backend for tests. | ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit | icekit/project/settings/_test.py | icekit/project/settings/_test.py | from ._base import *
# DJANGO ######################################################################
ALLOWED_HOSTS = ('*', )
CSRF_COOKIE_SECURE = False # Don't require HTTPS for CSRF cookie
SESSION_COOKIE_SECURE = False # Don't require HTTPS for session cookie
DATABASES['default'].update({
'TEST': {
'NAME': DATABASES['default']['NAME'],
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
})
INSTALLED_APPS += (
'fluent_pages.pagetypes.fluentpage',
'icekit.tests',
)
ROOT_URLCONF = 'icekit.tests.urls'
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
| from ._develop import *
# DJANGO ######################################################################
DATABASES['default'].update({
'TEST': {
'NAME': DATABASES['default']['NAME'],
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#serialize
'SERIALIZE': False,
},
})
INSTALLED_APPS += (
'fluent_pages.pagetypes.fluentpage',
'icekit.tests',
)
ROOT_URLCONF = 'icekit.tests.urls'
TEMPLATES_DJANGO['DIRS'].insert(
0, os.path.join(BASE_DIR, 'icekit', 'tests', 'templates')),
# ICEKIT ######################################################################
# RESPONSE_PAGE_PLUGINS = ['ImagePlugin', ]
# HAYSTACK ####################################################################
# HAYSTACK_CONNECTIONS = {
# 'default': {
# 'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
# },
# }
# TRAVIS ######################################################################
if 'TRAVIS' in os.environ:
NOSE_ARGS.remove('--with-progressive')
| mit | Python |
44a6da5f6bc61924c234fc49e90f679b2d9b4c52 | Bump @graknlabs_benchmark | graknlabs/grakn,lolski/grakn,lolski/grakn,lolski/grakn,graknlabs/grakn,lolski/grakn,graknlabs/grakn,graknlabs/grakn | dependencies/graknlabs/dependencies.bzl | dependencies/graknlabs/dependencies.bzl | #
# GRAKN.AI - THE KNOWLEDGE GRAPH
# Copyright (C) 2018 Grakn Labs Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "2231eb17fca4268c9bd63c088c641b7b934c2346", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_graql():
git_repository(
name = "graknlabs_graql",
remote = "https://github.com/graknlabs/graql",
commit = "8a9ccf9d3192acf64566d26679b52b5d66620c8f", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_graql
)
def graknlabs_protocol():
git_repository(
name = "graknlabs_protocol",
remote = "https://github.com/graknlabs/protocol",
commit = "ec8fea570a1436ddce06287f43154d32223157d0", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_protocol
)
def graknlabs_client_java():
git_repository(
name = "graknlabs_client_java",
remote = "https://github.com/graknlabs/client-java",
commit = "3b56478c6aa6a7dfca90a637605a2719f2188b96",
)
def graknlabs_benchmark():
git_repository(
name = "graknlabs_benchmark",
remote = "https://github.com/graknlabs/benchmark.git",
commit = "aa717279af73de2572cd754630b0796a22cbe6d0" # keep in sync with protocol changes
)
| #
# GRAKN.AI - THE KNOWLEDGE GRAPH
# Copyright (C) 2018 Grakn Labs Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
load("@bazel_tools//tools/build_defs/repo:git.bzl", "git_repository")
def graknlabs_build_tools():
git_repository(
name = "graknlabs_build_tools",
remote = "https://github.com/graknlabs/build-tools",
commit = "2231eb17fca4268c9bd63c088c641b7b934c2346", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_build_tools
)
def graknlabs_graql():
git_repository(
name = "graknlabs_graql",
remote = "https://github.com/graknlabs/graql",
commit = "8a9ccf9d3192acf64566d26679b52b5d66620c8f", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_graql
)
def graknlabs_protocol():
git_repository(
name = "graknlabs_protocol",
remote = "https://github.com/graknlabs/protocol",
commit = "ec8fea570a1436ddce06287f43154d32223157d0", # sync-marker: do not remove this comment, this is used for sync-dependencies by @graknlabs_protocol
)
def graknlabs_client_java():
git_repository(
name = "graknlabs_client_java",
remote = "https://github.com/graknlabs/client-java",
commit = "3b56478c6aa6a7dfca90a637605a2719f2188b96",
)
def graknlabs_benchmark():
git_repository(
name = "graknlabs_benchmark",
remote = "https://github.com/graknlabs/benchmark.git",
commit = "5fbc895720c199da403e7cc163aec5b603a2aa86" # keep in sync with protocol changes
)
| agpl-3.0 | Python |
c787d090adcfdaaf8d507fd5ca7d54b16d1670b9 | switch to staging_rack | agrc/surface-water-quality,agrc/surface-water-quality,agrc/surface-water-quality | scripts/surface_water_quality_pallet.py | scripts/surface_water_quality_pallet.py | #!/usr/bin/env python
# * coding: utf8 *
'''
surface_water_quality_pallet.py
A module that contains a pallet definition for the surface water quality project
'''
from forklift.models import Pallet
from os.path import join
class SurfaceWaterQualityPallet(Pallet):
def build(self, configuration):
self.arcgis_services = [('SurfaceWaterQuality/MapService', 'MapServer'),
('SurfaceWaterQuality/Toolbox', 'GPServer')]
self.sgid = join(self.garage, 'SGID10.sde')
self.water = join(self.staging_rack, 'water.gdb')
self.copy_data = [self.water]
self.add_crate(('StreamsNHDHighRes', self.sgid, self.water))
| #!/usr/bin/env python
# * coding: utf8 *
'''
surface_water_quality_pallet.py
A module that contains a pallet definition for the surface water quality project
'''
from forklift.models import Pallet
from os.path import join
class SurfaceWaterQualityPallet(Pallet):
def build(self, configuration):
self.arcgis_services = [('SurfaceWaterQuality/MapService', 'MapServer'),
('SurfaceWaterQuality/Toolbox', 'GPServer')]
self.staging = 'C:\\Scheduled\\staging'
self.sgid = join(self.garage, 'SGID10.sde')
self.water = join(self.staging, 'water.gdb')
self.copy_data = [self.water]
self.add_crate(('StreamsNHDHighRes', self.sgid, self.water))
| mit | Python |
1e3693eb60edaea9698ba3c761a9964ed51b55a7 | bump version to 0.0.8 | aromanovich/jinja2schema,aromanovich/jinja2schema,aromanovich/jinja2schema | jinja2schema/__init__.py | jinja2schema/__init__.py | # coding: utf-8
"""
jinja2schema
============
Type inference for Jinja2 templates.
See http://jinja2schema.rtfd.org/ for documentation.
:copyright: (c) 2014 Anton Romanovich
:license: BSD
"""
__title__ = 'jinja2schema'
__author__ = 'Anton Romanovich'
__license__ = 'BSD'
__copyright__ = 'Copyright 2014 Anton Romanovich'
__version__ = '0.0.8'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
from .config import Config
from .core import parse, infer, infer_from_ast, to_json_schema, JSONSchemaDraft4Encoder
from .exceptions import InferException, MergeException, InvalidExpression, UnexpectedExpression
| # coding: utf-8
"""
jinja2schema
============
Type inference for Jinja2 templates.
See http://jinja2schema.rtfd.org/ for documentation.
:copyright: (c) 2014 Anton Romanovich
:license: BSD
"""
__title__ = 'jinja2schema'
__author__ = 'Anton Romanovich'
__license__ = 'BSD'
__copyright__ = 'Copyright 2014 Anton Romanovich'
__version__ = '0.0.7'
__version_info__ = tuple(int(i) for i in __version__.split('.'))
from .config import Config
from .core import parse, infer, infer_from_ast, to_json_schema, JSONSchemaDraft4Encoder
from .exceptions import InferException, MergeException, InvalidExpression, UnexpectedExpression
| bsd-3-clause | Python |
4d9b2cecc592cc1075ddea6fcab980997434807f | remove include for admin | thelabnyc/django-activity-stream,justquick/django-activity-stream,justquick/django-activity-stream,pombredanne/django-activity-stream,thelabnyc/django-activity-stream,pombredanne/django-activity-stream | actstream/runtests/urls.py | actstream/runtests/urls.py | import os
from django.contrib import admin
from django.views.static import serve
try:
from django.urls import include, url
except ImportError:
from django.conf.urls import include, url
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^media/(?P<path>.*)$', serve,
{'document_root': os.path.join(os.path.dirname(__file__), 'media')}),
url(r'auth/', include('django.contrib.auth.urls')),
url(r'testapp/', include('testapp.urls')),
url(r'', include('actstream.urls')),
]
| import os
from django.contrib import admin
from django.views.static import serve
try:
from django.urls import include, url
except ImportError:
from django.conf.urls import include, url
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'^media/(?P<path>.*)$', serve,
{'document_root': os.path.join(os.path.dirname(__file__), 'media')}),
url(r'auth/', include('django.contrib.auth.urls')),
url(r'testapp/', include('testapp.urls')),
url(r'', include('actstream.urls')),
]
| bsd-3-clause | Python |
a1b6735b49ddebb63e43012fd17a4e467cb0ab1b | Read all files in subdirectories and train one big model | mikedelong/aarhus | demos/matrix_factorization.py | demos/matrix_factorization.py | import cPickle as pickle
import json
import logging
import os
import numpy
import sklearn.feature_extraction.text as text
from sklearn import decomposition
logging.basicConfig(format='%(asctime)s : %(levelname)s :: %(message)s', level=logging.DEBUG)
with open('./matrix_factorization_input.json') as data_file:
data = json.load(data_file)
logging.debug(data)
input_folder = data['input_folder']
pickle_file_name = data['pickle_file_name']
max_file_count = data['max_file_count']
topics_count = data['topics_count']
top_words_count = data['top_words_count']
file_names = [os.path.join(root, current) for root, subdirectories,files in os.walk(input_folder) for current in files]
# truncate
if max_file_count < len(file_names) and max_file_count != -1:
file_names = file_names[:max_file_count]
logging.debug('we are working with %d files.' % len(file_names))
# todo what is min_df
vectorizer = text.CountVectorizer(input='filename', stop_words='english', min_df=20, decode_error='ignore')
logging.debug('created vectorizer')
dtm = vectorizer.fit_transform(file_names).toarray()
logging.debug('created matrix')
vocabulary = numpy.array(vectorizer.get_feature_names())
logging.debug('matrix shape: %s, vocabulary size: %d', dtm.shape, len(vocabulary))
clf = decomposition.NMF(n_components=topics_count, random_state=0)
logging.debug('decomposition complete.')
doctopic = clf.fit_transform(dtm)
logging.debug('fit-transform complete.')
topic_words = []
for topic in clf.components_:
word_idx = numpy.argsort(topic)[::-1][0:top_words_count]
topic_words.append([vocabulary[word] for word in word_idx])
doctopic /= numpy.sum(doctopic, axis=1, keepdims=True)
names = []
for file_name in file_names:
basename = os.path.basename(file_name)
names.append(basename)
names = numpy.asarray(names)
doctopic_orig = doctopic.copy()
groups_count = len(set(names))
doctopic_grouped = numpy.zeros((groups_count, topics_count))
for i, name in enumerate(sorted(set(names))):
doctopic_grouped[i, :] = numpy.mean(doctopic[names == name, :], axis=0)
doctopic = doctopic_grouped
out_pickle = {
'doctopic' : doctopic,
'topic_words' : topic_words
}
pickle.dump(out_pickle, open( pickle_file_name, 'wb' ))
logging.debug('pickle file written.')
t0 = sorted(set(names))
logging.info("Top NMF topics in...")
for i in range(len(doctopic)):
top_topics = numpy.argsort(doctopic[i, :])[::-1][0:3]
top_topics_str = ' '.join(str(t) for t in top_topics)
# logging.info("{}: {}".format(names[i], top_topics_str))
for t in range(len(topic_words)):
logging.info("Topic {}: {}".format(t, ' '.join(topic_words[t][:top_words_count])))
| import cPickle as pickle
import json
import logging
import os
import numpy
import sklearn.feature_extraction.text as text
from sklearn import decomposition
logging.basicConfig(format='%(asctime)s : %(levelname)s :: %(message)s', level=logging.DEBUG)
with open('./matrix_factorization_input.json') as data_file:
data = json.load(data_file)
logging.debug(data)
input_folder = data['input_folder']
pickle_file_name = data['pickle_file_name']
max_file_count = data['max_file_count']
topics_count = data['topics_count']
top_words_count = data['top_words_count']
filenames = sorted([os.path.join(input_folder, file_name) for file_name in os.listdir(input_folder)])
# truncate
if max_file_count < len(filenames) and max_file_count != -1:
filenames = filenames[:max_file_count]
# todo what is min_df
vectorizer = text.CountVectorizer(input='filename', stop_words='english', min_df=20, decode_error='ignore')
logging.debug('created vectorizer')
dtm = vectorizer.fit_transform(filenames).toarray()
logging.debug('created matrix')
vocabulary = numpy.array(vectorizer.get_feature_names())
logging.debug('matrix shape: %s, vocabulary size: %d', dtm.shape, len(vocabulary))
clf = decomposition.NMF(n_components=topics_count, random_state=0)
logging.debug('decomposition complete.')
doctopic = clf.fit_transform(dtm)
logging.debug('fit-transform complete.')
topic_words = []
for topic in clf.components_:
word_idx = numpy.argsort(topic)[::-1][0:top_words_count]
topic_words.append([vocabulary[word] for word in word_idx])
doctopic /= numpy.sum(doctopic, axis=1, keepdims=True)
names = []
for file_name in filenames:
basename = os.path.basename(file_name)
names.append(basename)
names = numpy.asarray(names)
doctopic_orig = doctopic.copy()
groups_count = len(set(names))
doctopic_grouped = numpy.zeros((groups_count, topics_count))
for i, name in enumerate(sorted(set(names))):
doctopic_grouped[i, :] = numpy.mean(doctopic[names == name, :], axis=0)
doctopic = doctopic_grouped
t0 = sorted(set(names))
logging.info("Top NMF topics in...")
for i in range(len(doctopic)):
top_topics = numpy.argsort(doctopic[i, :])[::-1][0:3]
top_topics_str = ' '.join(str(t) for t in top_topics)
# logging.info("{}: {}".format(names[i], top_topics_str))
for t in range(len(topic_words)):
logging.info("Topic {}: {}".format(t, ' '.join(topic_words[t][:top_words_count])))
out_pickle = {
'doctopic' : doctopic,
'topic_words' : topic_words
}
pickle.dump(out_pickle, open( pickle_file_name, 'wb' ))
| apache-2.0 | Python |
dc76e57883a96ba26d1845c7d0633027d4fcf658 | add login url | yueyongyue/saltshaker,yueyongyue/saltshaker,yueyongyue/saltshaker | saltshaker/urls.py | saltshaker/urls.py | """saltshaker URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import dashboard,minions,execute,jobs,states_config,code_update,groups,system_setup,account
urlpatterns = [
url(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root':settings.STATIC_ROOT}),
url(r'^admin/', include(admin.site.urls)),
url(r'^index.html', include('dashboard.urls')),
url(r'^$', include('dashboard.urls')),
url(r'minions/', include('minions.urls')),
url(r'execute/', include('execute.urls')),
url(r'jobs/', include('jobs.urls')),
url(r'states_config/', include('states_config.urls')),
#url(r'code_update/', include('code_update.urls')),
url(r'groups/', include('groups.urls')),
#url(r'system_setup/', include('system_setup.urls')),
url(r'login.html/', include('account.urls')),
]
| """saltshaker URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.8/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import include, url
from django.contrib import admin
from django.conf import settings
from django.conf.urls.static import static
import dashboard,minions,execute,jobs,states_config,code_update,groups,system_setup
urlpatterns = [
url(r'^static/(?P<path>.*)$','django.views.static.serve',{'document_root':settings.STATIC_ROOT}),
url(r'^admin/', include(admin.site.urls)),
url(r'^index.html',include('dashboard.urls')),
url(r'^$', include('dashboard.urls')),
url(r'minions/', include('minions.urls')),
url(r'execute/', include('execute.urls')),
url(r'jobs/', include('jobs.urls')),
url(r'states_config/', include('states_config.urls')),
#url(r'code_update/', include('code_update.urls')),
url(r'groups/', include('groups.urls')),
#url(r'system_setup/', include('system_setup.urls')),
url(r'login/', include('dashboard.urls')),
]
| apache-2.0 | Python |
0d3a1adaf2ea6a57a44aa22b9e5715b562712a41 | change test array | windkeepblow/FastDot,windkeepblow/FastDot | test_utils.py | test_utils.py | import time
import numpy as np
from numpy import float32 as REAL
from fast_utils import fast_dot
from fast_utils import fast_dot_blas
def main():
A = np.array(np.random.random((200,300000)),dtype=REAL)
B = np.array(np.random.random((300000,200)),dtype=REAL)
C = np.array(np.random.random((200,300000)),dtype=REAL)
'''
A = np.array([[1.2,2.3,3.4,5.5],[31,4,3,5],[2,3,67,45]], dtype=REAL)
B = np.array([[5.9,6,3],[7,8,3],[1,2,3],[11,34,55.5]], dtype=REAL)
C = np.array([[5.9,7,1,11],[6,8,2,34],[3,3,3,55.5]], dtype=REAL)
'''
'''
st = time.time()
result_1 = np.empty((A.shape[0], B.shape[1]), dtype=REAL)
fast_dot(A, B, result_1, 1)
ed = time.time()
print "fast_dot time:%fs"%(ed-st)
'''
st = time.time()
result_2 = np.dot(A,B)
ed = time.time()
print "np.dot time:%fs"%(ed-st)
st = time.time()
result_3 = np.empty((A.shape[0], C.shape[0]), dtype=REAL)
fast_dot_blas(A, C, result_3, 1)
ed = time.time()
print "fast_dot_blas time:%fs"%(ed-st)
'''
print result_1
print result_2
print result_3
'''
if __name__=="__main__":
main()
| import time
import numpy as np
from numpy import float32 as REAL
from fast_utils import fast_dot
from fast_utils import fast_dot_blas
def main():
'''
A = np.array(np.random.random((2000,3000)),dtype=REAL)
B = np.array(np.random.random((3000,2000)),dtype=REAL)
C = np.array(np.random.random((2000,3000)),dtype=REAL)
'''
A = np.array([[1.2,2.3,3.4,5.5],[31,4,3,5],[2,3,67,45]], dtype=REAL)
B = np.array([[5.9,6,3],[7,8,3],[1,2,3],[11,34,55.5]], dtype=REAL)
C = np.array([[5.9,7,1,11],[6,8,2,34],[3,3,3,55.5]], dtype=REAL)
st = time.time()
result_1 = np.empty((A.shape[0], B.shape[1]), dtype=REAL)
fast_dot(A, B, result_1, 1)
ed = time.time()
print "fast_dot time:%fs"%(ed-st)
st = time.time()
result_2 = np.dot(A,B)
ed = time.time()
print "np.dot time:%fs"%(ed-st)
st = time.time()
result_3 = np.empty((A.shape[0], C.shape[0]), dtype=REAL)
fast_dot_blas(A, C, result_3, 1)
ed = time.time()
print "fast_dot_blas time:%fs"%(ed-st)
print result_1
print result_2
print result_3
if __name__=="__main__":
main()
| mit | Python |
7a94badfecd929028fb61d365c799dbc01c2833c | add setUpClass and tearDownClass in tests.base | hrbonz/django-flexisettings | tests/base.py | tests/base.py | import unittest2
import sys
import os
import shutil
from django.core.management import call_command
class BaseTestCase(unittest2.TestCase):
test_folder = 't'
test_project = 'testProject'
envvar = 'FLEXI_WRAPPED_MODULE'
@classmethod
def setUpClass(cls):
# create test folder
os.mkdir(cls.test_folder)
# create a sample project
call_command('startproject', cls.test_project, cls.test_folder)
# add this location to sys.path for import
sys.path.insert(0, os.path.join(os.getcwd(), cls.test_folder))
def setUp(self):
# change current directory to test folder
os.chdir(self.test_folder)
def tearDown(self):
if self.envvar in os.environ:
os.environ.pop(self.envvar)
os.chdir('..')
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.test_folder)
| import unittest2
import sys
import os
import shutil
from django.core.management import call_command
class BaseTestCase(unittest2.TestCase):
test_folder = 't'
test_project = 'testProject'
envvar = 'FLEXI_WRAPPED_MODULE'
def setUp(self):
# create test folder
os.mkdir(self.test_folder)
# create a sample project
call_command('startproject', self.test_project, self.test_folder)
# change current directory to test folder
os.chdir(self.test_folder)
# add this location to sys.path for import
sys.path.insert(0, os.getcwd())
def tearDown(self):
if self.envvar in os.environ:
os.environ.pop(self.envvar)
sys.path.pop(0)
os.chdir('..')
shutil.rmtree(self.test_folder)
| bsd-3-clause | Python |
f56c3df433906d89d35762b423c995cd779a9211 | Fix the run step. | Kraus-Lab/active-enhancers,Kraus-Lab/active-enhancers,Kraus-Lab/active-enhancers | tests/base.py | tests/base.py | import subprocess
import pytest
def check_docker_output(tool):
command = 'docker run active-enhancers ' + tool
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = process.communicate()
return output
| import subprocess
import pytest
def check_docker_output(tool):
command = 'docker run --rm -ti active-enhancers ' + tool
process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = process.communicate()
return output
| mit | Python |
dc5868414fc7c5ed22978f524463ffbef6d6d392 | Make cookies lazy to evaluate Give access to cookie Morsels | funkybob/antfarm | antfarm/request.py | antfarm/request.py |
from http.cookies import SimpleCookie
from urllib.parse import parse_qs
from .utils.functional import buffered_property
import logging
log = logging.getLogger(__name__)
DEFAULT_ENCODING = 'ISO-8859-1'
class Request(object):
def __init__(self, environ):
self.environ = environ
# XXX Handle encoding
self.path = environ.get('PATH_INFO', '/')
self.method = environ['REQUEST_METHOD']
self.content_type, self.content_params = self.parse_content_type()
self.data = self.parse_query_data()
@buffered_property
def query_data(self):
return parse_qs(
environ.get('QUERY_STRING', ''),
keep_blank_values=True
)
@buffered_property
def raw_cookies(self):
'''Raw access to cookies'''
cookie_data = self.environ.get('HTTP_COOKIE', '')
if not cookie_data:
return {}
cookies = SimpleCookie()
cookies.load(cookie_data)
return cookies
@buffered_property
def cookies(self):
'''Simplified Cookie access'''
return {
key: c.get(key).value
for key in c.keys()
}
def parse_query_data(self):
if self.method == 'POST':
# Should test content type
size = int(self.environ.get('CONTENT_LENGTH', 0))
if not size:
return {}
return parse_qs(self.environ['wsgi.input'].read(size))
def parse_content_type(self):
content_type, _, params = self.environ.get('CONTENT_TYPE', '').partition(';')
content_params = {}
for param in params.split(';'):
k, _, v = param.strip().partition('=')
content_params[k] = v
return content_type, content_params
|
from http.cookies import SimpleCookie
from urllib.parse import parse_qs
from .utils.functional import buffered_property
import logging
log = logging.getLogger(__name__)
DEFAULT_ENCODING = 'ISO-8859-1'
class Request(object):
def __init__(self, environ):
self.environ = environ
# XXX Handle encoding
self.path = environ.get('PATH_INFO', '/')
self.method = environ['REQUEST_METHOD']
self.content_type, self.content_params = self.parse_content_type()
self.cookies = self.parse_cookies()
self.data = self.parse_query_data()
@buffered_property
def query_data(self):
return parse_qs(
environ.get('QUERY_STRING', ''),
keep_blank_values=True
)
def parse_cookies(self):
cookies = self.environ.get('HTTP_COOKIE', '')
if cookies == '':
return {}
else:
c = SimpleCookie()
c.load(cookies)
return {
key: c.get(key).value
for key in c.keys()
}
def parse_query_data(self):
if self.method == 'POST':
# Should test content type
size = int(self.environ.get('CONTENT_LENGTH', 0))
if not size:
return {}
return parse_qs(self.environ['wsgi.input'].read(size))
def parse_content_type(self):
content_type, _, params = self.environ.get('CONTENT_TYPE', '').partition(';')
content_params = {}
for param in params.split(';'):
k, _, v = param.strip().partition('=')
content_params[k] = v
return content_type, content_params
| mit | Python |
88f02fbea11390ec8866c29912ed8beadc31e736 | Exclude preprints from queryset from account/register in the admin app. | adlius/osf.io,mfraezz/osf.io,cslzchen/osf.io,mfraezz/osf.io,baylee-d/osf.io,felliott/osf.io,mfraezz/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,pattisdr/osf.io,aaxelb/osf.io,adlius/osf.io,adlius/osf.io,felliott/osf.io,felliott/osf.io,mfraezz/osf.io,saradbowman/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,mattclark/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,baylee-d/osf.io,cslzchen/osf.io,felliott/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,mattclark/osf.io,aaxelb/osf.io,saradbowman/osf.io,cslzchen/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io | admin/common_auth/forms.py | admin/common_auth/forms.py | from __future__ import absolute_import
from django import forms
from django.db.models import Q
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(Q(name__startswith='collections_') | Q(name__startswith='preprint_')),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| from __future__ import absolute_import
from django import forms
from django.contrib.auth.models import Group
from osf.models import AdminProfile
class LoginForm(forms.Form):
email = forms.CharField(label=u'Email', required=True)
password = forms.CharField(
label=u'Password',
widget=forms.PasswordInput(render_value=False),
required=True
)
class UserRegistrationForm(forms.Form):
""" A form that finds an existing OSF User, and grants permissions to that
user so that they can use the admin app"""
osf_id = forms.CharField(required=True, max_length=5, min_length=5)
# TODO: Moving to guardian, find a better way to distinguish "admin-like" groups from object permission groups
group_perms = forms.ModelMultipleChoiceField(
queryset=Group.objects.exclude(name__startswith='collections_'),
required=False,
widget=forms.CheckboxSelectMultiple
)
class DeskUserForm(forms.ModelForm):
class Meta:
model = AdminProfile
fields = ['desk_token', 'desk_token_secret']
| apache-2.0 | Python |
378100d978a2bb91bb1e18ef81776ae0d5785a1c | Update Ascii.py | corpnewt/CorpBot.py,corpnewt/CorpBot.py | Cogs/Ascii.py | Cogs/Ascii.py | from discord.ext import commands
from Cogs import Utils, DisplayName, PickList, FuzzySearch, Message
import pyfiglet
def setup(bot):
# Add the bot
bot.add_cog(Ascii(bot))
class Ascii(commands.Cog):
def __init__(self, bot):
self.bot = bot
global Utils, DisplayName
Utils = self.bot.get_cog("Utils")
DisplayName = self.bot.get_cog("DisplayName")
self.font_list = pyfiglet.FigletFont.getFonts()
@commands.command(aliases=["font","fonts","fontlist"])
async def asciifonts(self, ctx, search_term = None):
"""Lists the available ascii fonts."""
if search_term is None:
return await PickList.PagePicker(
title="Available ASCII Fonts ({:,} total)".format(len(self.font_list)),
description="\n".join(["{}. {}".format(str(i).rjust(3),x) for i,x in enumerate(self.font_list,start=1)]),
d_header="```\n",
d_footer="\n```",
ctx=ctx
).pick()
# Let's see if it's a full match
if search_term.lower() in self.font_list:
return await Message.Embed(
title="Font Exists",
description="`{}` is in the font list.".format(search_term.lower()),
color=ctx.author
).send(ctx)
# Let's get 3 close matches
font_match = FuzzySearch.search(search_term.lower(), self.font_list)
font_mess = "\n".join(["`└─ {}`".format(x["Item"]) for x in font_match])
await Message.Embed(
title="Font \"{}\" Not Fount".format(search_term),
fields=[{"name":"Close Font Matches:","value":font_mess}],
color=ctx.author
).send(ctx)
@commands.command()
async def ascii(self, ctx, *, text : str = None):
"""Beautify some text."""
if text is None: return await ctx.channel.send('Usage: `{}ascii [font (optional)] [text]`'.format(ctx.prefix))
font = None
# Split text by space - and see if the first word is a font
parts = text.split()
if len(parts) > 1 and parts[0].lower() in self.font_list:
# We got a font!
font = parts[0]
text = " ".join(parts[1:])
output = pyfiglet.figlet_format(text,font=font or pyfiglet.DEFAULT_FONT)[:1993] # Limit to 2000 chars
if not output: return await ctx.send("I couldn't beautify that text :(")
await ctx.send("```\n{}```".format(output))
| from discord.ext import commands
from Cogs import Utils, DisplayName, PickList, FuzzySearch, Message
import pyfiglet
def setup(bot):
# Add the bot
bot.add_cog(Ascii(bot))
class Ascii(commands.Cog):
def __init__(self, bot):
self.bot = bot
global Utils, DisplayName
Utils = self.bot.get_cog("Utils")
DisplayName = self.bot.get_cog("DisplayName")
self.font_list = pyfiglet.FigletFont.getFonts()
@commands.command(aliases=["font","fonts","fontlist"])
async def asciifonts(self, ctx, search_term = None):
"""Lists the available ascii fonts."""
if search_term is None:
return await PickList.PagePicker(
title="Available ASCII Fonts ({:,} total)".format(len(self.font_list)),
description="\n".join(["{}. {}".format(str(i).rjust(3),x) for i,x in enumerate(self.font_list,start=1)]),
d_header="```\n",
d_footer="\n```",
ctx=ctx
).pick()
# Let's see if it's a full match
if search_term.lower() in self.font_list:
return await Message.Embed(
title="Font Exists",
description="`{}` is in the font list.".format(search_term.lower()),
color=ctx.author
).send(ctx)
# Let's get 3 close matches
font_match = FuzzySearch.search(search_term.lower(), self.font_list)
font_mess = "\n".join(["`└─ {}`".format(x["Item"]) for x in font_match])
await Message.Embed(
title="Font \"{}\" Not Fount".format(search_term),
fields=[{"name":"Close Font Matches:","value":font_mess}],
color=ctx.author
).send(ctx)
@commands.command(pass_context=True, no_pm=True)
async def ascii(self, ctx, *, text : str = None):
"""Beautify some text."""
if text is None: return await ctx.channel.send('Usage: `{}ascii [font (optional)] [text]`'.format(ctx.prefix))
font = None
# Split text by space - and see if the first word is a font
parts = text.split()
if len(parts) > 1 and parts[0].lower() in self.font_list:
# We got a font!
font = parts[0]
text = " ".join(parts[1:])
output = pyfiglet.figlet_format(text,font=font if font else pyfiglet.DEFAULT_FONT)[:1993] # Limit to 2000 chars
if not output: return await ctx.send("I couldn't beautify that text :(")
await ctx.send("```\n{}```".format(output))
| mit | Python |
756821bb357cb07d979b556dca2b9cd7324e8ff3 | add correct way to handle form usage | tassolom/twq-app,tassolom/twq-app,tassolom/twq-app,teamworkquality/twq-app,teamworkquality/twq-app,teamworkquality/twq-app,teamworkquality/twq-app,tassolom/twq-app | api/forms/views.py | api/forms/views.py | from django.core.exceptions import ObjectDoesNotExist
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Form
from .serializers import FormSerializer
class FormsView(APIView):
queryset = Form.objects.all()
def get(self, request, format=None, **kwargs):
if kwargs.get('form_id'):
try:
return Response(None)
except ObjectDoesNotExist:
return Response({"error": "could not find user"} , status=400)
else:
return Response(None)
def post(self, request, format=None, **kwargs):
response = Response()
serializer = FormSerializer(data=request.data)
if serializer.is_valid():
serializer.save()
response.status_code = 201
response.data = serializer.validated_data
else:
response.status_code = 400
return response | from django.core.exceptions import ObjectDoesNotExist
from rest_framework.views import APIView
from rest_framework.response import Response
from .models import Form
class FormsView(APIView):
queryset = Form.objects.all()
def get(self, request, format=None, **kwargs):
if kwargs.get('form_id'):
try:
return Response(None)
except ObjectDoesNotExist:
return Response({"error": "could not find user"} , status=400)
else:
return Response(None)
def post(self, request, format=None, **kwargs):
response = Response()
new_form = Form(**kwargs)
new_form.save()
if new_form:
response.status_code = 201
else:
response.status_code = 400
return response | mit | Python |
00eae4ad9eea6e161a4c6dee70f49f3946cf8916 | Fix database creation. | AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at,AKVorrat/ueberwachungspaket.at | ueberwachungspaket/__init__.py | ueberwachungspaket/__init__.py | from flask import Flask
from config import *
from database import init_db, db_session
from .representatives import Representatives
app = Flask(__name__)
app.config.from_pyfile("config.py")
app.config["TWILIO_NUMBERS"] = TWILIO_NUMBERS
reps = Representatives()
init_db()
if __name__ == "__main__":
app.run()
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
from . import views
| from flask import Flask
from config import *
from database import init_db, db_session
from .representatives import Representatives
app = Flask(__name__)
app.config.from_pyfile("config.py")
app.config["TWILIO_NUMBERS"] = TWILIO_NUMBERS
reps = Representatives()
if __name__ == "__main__":
app.run()
initdb()
@app.teardown_appcontext
def shutdown_session(exception=None):
db_session.remove()
from . import views
| mit | Python |
9b1d616ac857902dcb25f0cccc1203435d4852cb | Bump to 0.3.0 | aio-libs/aiohttp_jinja2 | aiohttp_jinja2/__init__.py | aiohttp_jinja2/__init__.py | import asyncio
import functools
import jinja2
from aiohttp import web
__version__ = '0.3.0'
__all__ = ('setup', 'get_env', 'render_template', 'template')
APP_KEY = 'aiohttp_jinja2_environment'
def setup(app, *args, app_key=APP_KEY, **kwargs):
env = jinja2.Environment(*args, **kwargs)
app[app_key] = env
return env
def get_env(app, *, app_key=APP_KEY):
return app.get(app_key)
def _render_template(template_name, request, response, context, *,
app_key, encoding):
env = request.app.get(app_key)
if env is None:
raise web.HTTPInternalServerError(
text=("Template engine is not initialized, "
"call aiohttp_jinja2.setup(app_key={}) first"
"".format(app_key)))
try:
template = env.get_template(template_name)
except jinja2.TemplateNotFound:
raise web.HTTPInternalServerError(
text="Template {} not found".format(template_name))
text = template.render(context)
response.content_type = 'text/html'
response.charset = encoding
response.text = text
def render_template(template_name, request, context, *,
app_key=APP_KEY, encoding='utf-8'):
response = web.Response()
_render_template(template_name, request, response, context,
app_key=app_key, encoding=encoding)
return response
def template(template_name, *, app_key=APP_KEY, encoding='utf-8', status=200):
def wrapper(func):
@asyncio.coroutine
@functools.wraps(func)
def wrapped(*args):
if asyncio.iscoroutinefunction(func):
coro = func
else:
coro = asyncio.coroutine(func)
response = web.Response()
context = yield from coro(*args)
request = args[-1]
_render_template(template_name, request, response, context,
app_key=app_key, encoding=encoding)
response.set_status(status)
return response
return wrapped
return wrapper
| import asyncio
import functools
import jinja2
from aiohttp import web
__version__ = '0.2.1'
__all__ = ('setup', 'get_env', 'render_template', 'template')
APP_KEY = 'aiohttp_jinja2_environment'
def setup(app, *args, app_key=APP_KEY, **kwargs):
env = jinja2.Environment(*args, **kwargs)
app[app_key] = env
return env
def get_env(app, *, app_key=APP_KEY):
return app.get(app_key)
def _render_template(template_name, request, response, context, *,
app_key, encoding):
env = request.app.get(app_key)
if env is None:
raise web.HTTPInternalServerError(
text=("Template engine is not initialized, "
"call aiohttp_jinja2.setup(app_key={}) first"
"".format(app_key)))
try:
template = env.get_template(template_name)
except jinja2.TemplateNotFound:
raise web.HTTPInternalServerError(
text="Template {} not found".format(template_name))
text = template.render(context)
response.content_type = 'text/html'
response.charset = encoding
response.text = text
def render_template(template_name, request, context, *,
app_key=APP_KEY, encoding='utf-8'):
response = web.Response()
_render_template(template_name, request, response, context,
app_key=app_key, encoding=encoding)
return response
def template(template_name, *, app_key=APP_KEY, encoding='utf-8', status=200):
def wrapper(func):
@asyncio.coroutine
@functools.wraps(func)
def wrapped(*args):
if asyncio.iscoroutinefunction(func):
coro = func
else:
coro = asyncio.coroutine(func)
response = web.Response()
context = yield from coro(*args)
request = args[-1]
_render_template(template_name, request, response, context,
app_key=app_key, encoding=encoding)
response.set_status(status)
return response
return wrapped
return wrapper
| apache-2.0 | Python |
d12fecd2eb012862b8d7654c879dccf5ccce833f | Enable Python RSA backend as a fallback. | mpdavis/python-jose | jose/backends/__init__.py | jose/backends/__init__.py |
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
try:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
except ImportError:
from jose.backends.rsa_backend import RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
|
try:
from jose.backends.pycrypto_backend import RSAKey
except ImportError:
from jose.backends.cryptography_backend import CryptographyRSAKey as RSAKey
try:
from jose.backends.cryptography_backend import CryptographyECKey as ECKey
except ImportError:
from jose.backends.ecdsa_backend import ECDSAECKey as ECKey
| mit | Python |
6e9deb97f951f0ce2ab4e7710f9cc79af6532dd1 | test connection | vmthunder/virtman | tests/demo.py | tests/demo.py | # -*- coding: utf-8 -*-
import os
import mock
import tests
from tests import test_demo
from tests.test_demo import FunDemo
from tests import base
from oslo_concurrency import processutils as putils
class MyDemo():
def show(self):
print 'funDemo'
class FakeDemo():
def show(self):
print 'FakeDemo'
fun = FunDemo()
print fun
with mock.patch('tests.test_demo.FunDemo', FakeDemo):
fun = FunDemo()
print fun
fun.show()
with mock.patch('__main__.MyDemo', FakeDemo):
fun = MyDemo()
print fun
fun.show()
@mock.patch('tests.test_demo.FunDemo', FakeDemo)
def test():
tests.test_demo.FunDemo().show()
print "hello" | # -*- coding: utf-8 -*-
import os
import mock
import tests
from tests import test_demo
from tests.test_demo import FunDemo
from tests import base
from oslo_concurrency import processutils as putils
class MyDemo():
def show(self):
print 'funDemo'
class FakeDemo():
def show(self):
print 'FakeDemo'
fun = FunDemo()
print fun
with mock.patch('tests.test_demo.FunDemo', FakeDemo):
fun = FunDemo()
print fun
fun.show()
with mock.patch('__main__.MyDemo', FakeDemo):
fun = MyDemo()
print fun
fun.show()
@mock.patch('tests.test_demo.FunDemo', FakeDemo)
def test():
tests.test_demo.FunDemo().show()
test() | apache-2.0 | Python |
8d6c275b771c73c3b36b38c0511e40f029bb1cfd | Refactor tests | Hipo/university-domains-list | tests/main.py | tests/main.py | import json
import unittest
import requests
class DomainsTests(unittest.TestCase):
def test_json_is_valid(self):
with open("../world_universities_and_domains.json") as json_file:
valid_json = json.load(json_file)
for university in valid_json:
self.assertIn("name", university)
self.assertIn("domains", university)
self.assertIn("web_pages", university)
self.assertIn("alpha_two_code", university)
self.assertIn("state-province", university)
self.assertIn("country", university)
def check_is_alive():
""" check url then if url isn't alive, add to file """
with open('../world_universities_and_domains.json') as json_raw:
universities = json.load(json_raw)
for university in universities[:]:
try:
for web_page in university["web_pages"]:
print(web_page)
requests.get(web_page, allow_redirects=False, timeout=10.0)
except requests.exceptions.ConnectionError as exc:
print('- Website doesn\'t exists: ', exc)
if __name__ == '__main__':
unittest.main(verbosity=2)
| import json
import unittest
import requests
class DomainsTests(unittest.TestCase):
def test_json_is_valid(self):
with open("../world_universities_and_domains.json") as json_file:
valid_json = json.load(json_file)
for university in valid_json:
university["name"]
university["domains"]
university["web_pages"]
university["alpha_two_code"]
university["state-province"]
university["country"]
def check_is_alive():
""" check url then if url isn't alive, add to file """
with open('../world_universities_and_domains.json') as json_raw:
universities = json.load(json_raw)
for university in universities[:]:
try:
for web_page in university["web_pages"]:
print(web_page)
requests.get(web_page, allow_redirects=False, timeout=10.0)
except requests.exceptions.ConnectionError as exc:
print('- Website doesn\'t exists: ', exc)
if __name__ == '__main__':
unittest.main(verbosity=2)
| mit | Python |
7d6559a450b52bae3e402bacb41fc1a7a4d77a77 | Make class declarations consistent. | HubbeKing/Hubbot_Twisted | IRCResponse.py | IRCResponse.py | from enumType import enum
ResponseType = enum('Say', 'Do', 'Notice', 'Raw')
class IRCResponse(object):
def __init__(self, messageType, response, target):
self.Type = messageType
try:
self.Response = unicode(response, 'utf-8')
except TypeError: # Already utf-8?
self.Response = response
self.Target = target
| from enumType import enum
ResponseType = enum('Say', 'Do', 'Notice', 'Raw')
class IRCResponse:
def __init__(self, messageType, response, target):
self.Type = messageType
try:
self.Response = unicode(response, 'utf-8')
except TypeError: # Already utf-8?
self.Response = response
self.Target = target | mit | Python |
3bdf9f01fa3e0454a3e28ab58886742a235f5215 | change config.py to use properties | googleinterns/cloud-monitoring-notification-delivery-integration-sample-code,googleinterns/cloud-monitoring-notification-delivery-integration-sample-code | config.py | config.py | # Copyright 2020 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import secrets
"""Flask config."""
class Config:
"""Base config."""
FLASK_ENV = 'production'
TESTING = False
DEBUG = False
class ProdConfig(Config):
def __init__(self):
self._philips_hue_ip = None
self._philips_hue_username = None
@property
def PHILIPS_HUE_IP(self):
if self._philips_hue_ip is None:
secret = secrets.GoogleSecretManagerSecret(
'alertmanager-2020-intern-r', 'philips_ip')
self._philips_hue_ip = secret.get_secret_value()
return self._philips_hue_ip
@property
def PHILIPS_HUE_USERNAME(self):
if self._philips_hue_username is None:
secret = secrets.GoogleSecretManagerSecret(
'alertmanager-2020-intern-r', 'philips_username')
self._philips_hue_username = secret.get_secret_value()
return self._philips_hue_username
class DevConfig(Config):
FLASK_ENV = 'development'
DEBUG = True
TESTING = True
def __init__(self):
self._philips_hue_ip = None
self._philips_hue_username = None
@property
def PHILIPS_HUE_IP(self):
if self._philips_hue_ip is None:
secret = secrets.EnvironmentVariableSecret('PHILIPS_HUE_IP')
self._philips_hue_ip = secret.get_secret_value()
return self._philips_hue_ip
@property
def PHILIPS_HUE_USERNAME(self):
if self._philips_hue_username is None:
secret = secrets.EnvironmentVariableSecret('PHILIPS_HUE_USERNAME')
self._philips_hue_username = secret.get_secret_value()
return self._philips_hue_username
configs = {
'prod': ProdConfig,
'dev': DevConfig,
'default': ProdConfig
}
| # Copyright 2020 Google, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import secrets
"""Flask config."""
class Config:
"""Base config."""
FLASK_ENV = 'production'
TESTING = False
DEBUG = False
class ProdConfig(Config):
PHILIPS_HUE_IP = secrets.GoogleSecretManagerSecret(
'alertmanager-2020-intern-r', 'philips_ip')
PHILIPS_HUE_USERNAME = secrets.GoogleSecretManagerSecret(
'alertmanager-2020-intern-r', 'philips_username')
def __init__(self):
self.PHILIPS_HUE_IP = self.PHILIPS_HUE_IP.get_secret_value()
self.PHILIPS_HUE_USERNAME = self.PHILIPS_HUE_USERNAME.get_secret_value()
class DevConfig(Config):
FLASK_ENV = 'development'
DEBUG = True
TESTING = True
PHILIPS_HUE_IP = secrets.EnvironmentVariableSecret('PHILIPS_HUE_IP')
PHILIPS_HUE_USERNAME = secrets.EnvironmentVariableSecret('PHILIPS_HUE_USERNAME')
def __init__(self):
self.PHILIPS_HUE_IP = self.PHILIPS_HUE_IP.get_secret_value()
self.PHILIPS_HUE_USERNAME = self.PHILIPS_HUE_USERNAME.get_secret_value()
configs = {
'prod': ProdConfig,
'dev': DevConfig,
'default': ProdConfig
}
| apache-2.0 | Python |
fef337b4b161aaf30a9352dd5ae4200ec667c3c5 | Add ASSETS_AUTO_BUILD set to False and set LESS_BIN to location of lessc executable. | mbucknell/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,USGS-CIDA/PubsWarehouse_UI,mbucknell/PubsWarehouse_UI,jkreft-usgs/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI,ayan-usgs/PubsWarehouse_UI | config.py | config.py | from datetime import timedelta
import os
import sys
PROJECT_HOME = os.path.dirname(__file__)
DEBUG = False
JS_DEBUG = False
SECRET_KEY = ''
VERIFY_CERT = True
COLLECT_STATIC_ROOT = 'static/'
COLLECT_STORAGE = 'flask.ext.collect.storage.file'
MAIL_USERNAME = 'PUBSV2_NO_REPLY'
PUB_URL = ''
LOOKUP_URL = ''
SUPERSEDES_URL = ''
BROWSE_URL = ''
BASE_SEARCH_URL = ''
BASE_CITATION_URL = ''
BROWSE_REPLACE = ''
RECAPTCHA_PUBLIC_KEY = '6LfisP0SAAAAAKcg5-a5bEeV4slFfQZr5_7XfqXf' # using google's recaptcha API
RECAPTCHA_PRIVATE_KEY = '' # see RECAPTCHA_PRIVATE_KEY in instance/config.py
WSGI_STR = ''
GOOGLE_ANALYTICS_CODE = ''
JSON_LD_ID_BASE_URL = ''
GOOGLE_WEBMASTER_TOOLS_CODE = 'ertoifsdbnerngdjnasdw9rsdn' # random string, set real code in instance/config.py on prod
ANNOUNCEMENT_BLOCK = ''
LOGGING_ON = False
REPLACE_PUBS_WITH_PUBS_TEST = False
ROBOTS_WELCOME = False
REMEMBER_COOKIE_NAME = 'remember_token'
REMEMBER_COOKIE_DURATION = timedelta(days=1)
AUTH_ENDPOINT_URL = ''
PREVIEW_ENDPOINT_URL = ''
LOGIN_PAGE_PATH = ''
CACHE_CONFIG = {'CACHE_TYPE': 'null'}
REDIS_CONFIG = ''
SCIENCEBASE_PARENT_UUID = '' #set to the sciecebase folder id for the core publications warehouse SB folder
# set to solve problem with backgrid-paginator
BOWER_TRY_MINIFIED = False
#Config for Flask-Assets
ASSETS_DEBUG = False # to disable compression of js and css set to True
ASSETS_AUTO_BUILD = False #Local developers will typically set this to True in their instance/config.py.
LESS_BIN = os.path.join(PROJECT_HOME, 'node_modules', 'less', 'bin', 'lessc')
CONTACT_RECIPIENTS = ['servicedesk@usgs.gov']
# Location of file containing the google analytics service account's JSON key.
GA_KEY_FILE_PATH = ''
GA_OAUTH2_SCOPE = 'https://www.googleapis.com/auth/analytics.readonly'
GA_PUBS_VIEW_ID = 'ga:20354817'
try:
from deploy_date import *
except ImportError:
pass
# variables used for testing purposes
nose_testing = sys.argv[0].endswith('nosetests') # returns True if 'nosetests' is a command line argument
if 'lettuce' in sys.argv[0]: # determine if a lettuce is being run
lettuce_testing = True
else:
lettuce_testing = False
if nose_testing or lettuce_testing:
WTF_CSRF_ENABLED = False
TESTING = True
BASE_SEARCH_URL = 'https://pubs-fake.er.usgs.gov/pubs-services/publication/'
PUB_URL = 'https://pubs-fake.er.usgs.gov/pubs-services/'
SUPERSEDES_URL = 'http://cida-eros-pubsfake.er.usgs.gov:8080/pubs2_ui/service/citation/json/extras?' | from datetime import timedelta
import sys
DEBUG = False
JS_DEBUG = False
SECRET_KEY = ''
VERIFY_CERT = True
COLLECT_STATIC_ROOT = 'static/'
COLLECT_STORAGE = 'flask.ext.collect.storage.file'
MAIL_USERNAME = 'PUBSV2_NO_REPLY'
PUB_URL = ''
LOOKUP_URL = ''
SUPERSEDES_URL = ''
BROWSE_URL = ''
BASE_SEARCH_URL = ''
BASE_CITATION_URL = ''
BROWSE_REPLACE = ''
RECAPTCHA_PUBLIC_KEY = '6LfisP0SAAAAAKcg5-a5bEeV4slFfQZr5_7XfqXf' # using google's recaptcha API
RECAPTCHA_PRIVATE_KEY = '' # see RECAPTCHA_PRIVATE_KEY in instance/config.py
WSGI_STR = ''
GOOGLE_ANALYTICS_CODE = ''
JSON_LD_ID_BASE_URL = ''
GOOGLE_WEBMASTER_TOOLS_CODE = 'ertoifsdbnerngdjnasdw9rsdn' # random string, set real code in instance/config.py on prod
ANNOUNCEMENT_BLOCK = ''
LOGGING_ON = False
REPLACE_PUBS_WITH_PUBS_TEST = False
ROBOTS_WELCOME = False
REMEMBER_COOKIE_NAME = 'remember_token'
REMEMBER_COOKIE_DURATION = timedelta(days=1)
AUTH_ENDPOINT_URL = ''
PREVIEW_ENDPOINT_URL = ''
LOGIN_PAGE_PATH = ''
CACHE_CONFIG = {'CACHE_TYPE': 'null'}
REDIS_CONFIG = ''
SCIENCEBASE_PARENT_UUID = '' #set to the sciecebase folder id for the core publications warehouse SB folder
# set to solve problem with backgrid-paginator
BOWER_TRY_MINIFIED = False
#Config for Flask-Assets
ASSETS_DEBUG = False # to disable compression of js and css set to True
CONTACT_RECIPIENTS = ['servicedesk@usgs.gov']
# Location of file containing the google analytics service account's JSON key.
GA_KEY_FILE_PATH = ''
GA_OAUTH2_SCOPE = 'https://www.googleapis.com/auth/analytics.readonly'
GA_PUBS_VIEW_ID = 'ga:20354817'
try:
from deploy_date import *
except ImportError:
pass
# variables used for testing purposes
nose_testing = sys.argv[0].endswith('nosetests') # returns True if 'nosetests' is a command line argument
if 'lettuce' in sys.argv[0]: # determine if a lettuce is being run
lettuce_testing = True
else:
lettuce_testing = False
if nose_testing or lettuce_testing:
WTF_CSRF_ENABLED = False
TESTING = True
BASE_SEARCH_URL = 'https://pubs-fake.er.usgs.gov/pubs-services/publication/'
PUB_URL = 'https://pubs-fake.er.usgs.gov/pubs-services/'
SUPERSEDES_URL = 'http://cida-eros-pubsfake.er.usgs.gov:8080/pubs2_ui/service/citation/json/extras?' | unlicense | Python |
d21b0898df0d745251735450bceebb341951e807 | Add SECRET_KEY | gordio/prom_test,gordio/prom_test | config.py | config.py | import os
from socket import gethostname
# Project relative -> absolute root path
_PROJECT_ROOT = os.path.realpath(os.path.dirname(__file__)).replace('\\', '/')
# Development hostnames, auto-switch to DEBUG mode
DEV_HOSTS = ('Sun', )
# Administrator auth
LOGIN = 'demo'
PASSWORD = 'demo'
SECRET_KEY = "random.get()"
# Auto switch to debug?
DEBUG = gethostname() in DEV_HOSTS
SQLALCHEMY_DATABASE_URI = "sqlite:///" + _PROJECT_ROOT + "/database.sqlite3"
| import os
from socket import gethostname
# Project relative -> absolute root path
_PROJECT_ROOT = os.path.realpath(os.path.dirname(__file__)).replace('\\', '/')
# Development hostnames, auto-switch to DEBUG mode
DEV_HOSTS = ('Sun', )
# Administrator auth
LOGIN = 'demo'
PASSWORD = 'demo'
# Auto switch to debug?
DEBUG = gethostname() in DEV_HOSTS
SQLALCHEMY_DATABASE_URI = "sqlite:///" + _PROJECT_ROOT + "/database.sqlite3"
| mit | Python |
5017f2bbdc22bafc3173c28f7cb076bffdb94201 | move configs from secret file to config.py | who-emro/meerkat_hermes,meerkat-code/meerkat_hermes,who-emro/meerkat_hermes,meerkat-code/meerkat_hermes | config.py | config.py | """
config.py
Configuration and settings
"""
import os
def from_env(env_var, default):
"""
Gets value from envrionment variable or uses default
Args:
env_var: name of envrionment variable
default: the default value
"""
new = os.environ.get(env_var)
if new:
return new
else:
return default
class Config(object):
DEBUG = False
TESTING = False
PRODUCTION = False
SUBSCRIBERS = 'hermes_subscribers'
SUBSCRIPTIONS = 'hermes_subscriptions'
LOG = 'hermes_log'
DB_URL = from_env("DB_URL", "http://dynamodb:8000")
ROOT_URL = from_env("MEERKAT_HERMES_ROOT", "/hermes")
SENDER = 'Notifications <notifications@emro.info>'
CHARSET = 'UTF-8'
FROM = 'Meerkat'
API_KEY = "test-hermes"
PUBLISH_RATE_LIMIT = int(from_env("MESSAGE RATE LIMIT", "40"))
CALL_TIMES = []
NEXMO_PUBLIC_KEY = ''
NEXMO_PRIVATE_KEY = ''
ERROR_REPORTING = ['error-reporting']
NOTIFY_DEV = ['notify-dev']
GCM_API_URL = "https://gcm-http.googleapis.com/gcm/send"
GCM_AUTHENTICATION_KEY = ''
GCM_ALLOWED_TOPICS = ['/topics/demo']
GCM_MOCK_RESPONSE_ONLY = 1
class Production(Config):
PRODUCTION = True
DB_URL = from_env("DB_URL", "https://dynamodb.eu-west-1.amazonaws.com")
GCM_MOCK_RESPONSE_ONLY = 0
GCM_ALLOWED_TOPICS = ['/topics/demo','/topics/jordan','/topics/madagascar','/topics/somalia','/topics/somaliland','/topics/puntland']
class Development(Config):
DEBUG = True
TESTING = True
class Testing(Config):
TESTING = True
API_KEY = ""
SUBSCRIBERS = 'test_hermes_subscribers'
SUBSCRIPTIONS = 'test_hermes_subscriptions'
LOG = 'test_hermes_log'
DB_URL = "https://dynamodb.eu-west-1.amazonaws.com"
GCM_MOCK_RESPONSE_ONLY = 0
| """
config.py
Configuration and settings
"""
import os
def from_env(env_var, default):
"""
Gets value from envrionment variable or uses default
Args:
env_var: name of envrionment variable
default: the default value
"""
new = os.environ.get(env_var)
if new:
return new
else:
return default
class Config(object):
DEBUG = False
TESTING = False
PRODUCTION = False
SUBSCRIBERS = 'hermes_subscribers'
SUBSCRIPTIONS = 'hermes_subscriptions'
LOG = 'hermes_log'
DB_URL = from_env("DB_URL", "http://dynamodb:8000")
ROOT_URL = from_env("MEERKAT_HERMES_ROOT", "/hermes")
SENDER = 'Notifications <notifications@emro.info>'
CHARSET = 'UTF-8'
FROM = 'Meerkat'
API_KEY = "test-hermes"
PUBLISH_RATE_LIMIT = int(from_env("MESSAGE RATE LIMIT", "40"))
CALL_TIMES = []
NEXMO_PUBLIC_KEY = ''
NEXMO_PRIVATE_KEY = ''
ERROR_REPORTING = ['error-reporting']
NOTIFY_DEV = ['notify-dev']
GCM_API_URL = "https://gcm-http.googleapis.com/gcm/send"
GCM_AUTHENTICATION_KEY = ''
GCM_ALLOWED_TOPICS = ['/topics/demo']
GCM_MOCK_RESPONSE_ONLY = 1
class Production(Config):
PRODUCTION = True
DB_URL = from_env("DB_URL", "https://dynamodb.eu-west-1.amazonaws.com")
GCM_DRYRUN = True
GCM_MOCK_RESPONSE_ONLY = 0
class Development(Config):
DEBUG = True
TESTING = True
class Testing(Config):
TESTING = True
API_KEY = ""
SUBSCRIBERS = 'test_hermes_subscribers'
SUBSCRIPTIONS = 'test_hermes_subscriptions'
LOG = 'test_hermes_log'
DB_URL = "https://dynamodb.eu-west-1.amazonaws.com"
GCM_MOCK_RESPONSE_ONLY = 0
| mit | Python |
dd2643db72ee1bb8560319565244c02e7ab7b6d2 | Allow configuring Gunicorn workers via env. | sgmap/mes-aides-ui,sgmap/mes-aides-ui,sgmap/mes-aides-ui,sgmap/mes-aides-ui | config.py | config.py | import os
bind = os.getenv('OPENFISCA_BIND_HOST', '127.0.0.1:2000')
timeout = 60
workers = os.getenv('OPENFISCA_WORKERS', 4)
| import os
bind = os.getenv('OPENFISCA_BIND_HOST', '127.0.0.1:2000')
timeout = 60
workers = 4
| agpl-3.0 | Python |
2d004e4132b07801d5e3246d114850b0e546c69f | Add news data scraper | gyanesh-m/Sentiment-analysis-of-financial-news-data | scrape_with_bs4.py | scrape_with_bs4.py | import requests
from bs4 import BeautifulSoup
import pandas as pd
import datetime
import os
import _pickle as pickle
def tracker(filename):
f = open("links/tracker.data",'a+')
f.write(filename+"\n")
f.close()
def tracked():
return [line.rstrip('\n') for line in open('links/tracker.data')]
def list_files(path):
# returns a list of names (with extension, without full path) of all files
# in folder path
# excludes temporary files also.
files = []
for name in os.listdir(path):
if os.path.isfile(os.path.join(path, name)) and name.find('tracker.data')==-1 and name.find('empty.txt')==-1 and name not in tracked() and name.find('~')==-1:
files.append(name)
else:
print("Skipping ",name)
return files
def make_directory(company):
try:
os.makedirs('content/'+company)
except Exception as e:
pass
def sc_reuters(bs):
data=[]
d=bs.find_all(id='article-text')
content=[i.get_text() for i in d]
n=content[-1].rfind("(")
content[-1]=content[-1][:n]
data.extend(content)
return data
def sc_thehindu(bs):
t=[]
temp=bs.select('div > p')
temp2=[j.get_text() for i,j in enumerate(temp) if i<len(temp)-3]
t.extend(temp2)
return t
def sc_econt(bs):
t=[]
data=bs.find_all(class_='Normal')
t.append([i.get_text() for i in data])
return t
NEWS={'reuters.com':sc_reuters,'thehindu.com':sc_thehindu,'economictimes.indiatimes':sc_econt}
for file in list_files('links/'):
print(file)
company = file.split('_')[2]
links = [line.rstrip('\n') for line in open('links/'+file)]
webp=file.split('_')[1]
print(webp)
b = {}
date = []
content = []
for url in links:
c,d= url.split('::')
r = requests.get(d)
print("Scraping url ",d)
soup = BeautifulSoup(r.content,"html.parser")
a=NEWS[webp](soup)
#head = soup.find_all('h1')
# for x in head:
# a.append(x.text)
str1 = ''.join(a)
c = datetime.datetime.strptime(c, '%d-%b-%Y')
date.append(c)
content.append(str1)
temp = {c:str1}
#print(url)
b.update(temp)
# with open('scraped_data.data', 'w', encoding='utf-8') as f:
# print(b, file=f)
# import json
# with open('content/result_'+file.split('data')[0]+'.json', 'w') as fp:
# json.dump(b, fp,indent=4)
make_directory(company)
with open('content/'+company+'/raw_'+file.split('.data')[0]+'.pkl', 'wb') as fp:
pickle.dump(b, fp)
temp = {'date':date,
'data':content}
df = pd.DataFrame(temp)
df.set_index('date',inplace=True)
df.to_pickle('content/'+company+'/'+file.split('.data')[0]+'_content.pkl')
df.to_csv('content/'+company+'/'+file.split('.data')[0]+'_content.csv')
tracker(file) | import requests
from bs4 import BeautifulSoup
import pandas as pd
import datetime
import os
import _pickle as pickle
def tracker(filename):
f = open("links/tracker.data",'a+')
f.write(filename+"\n")
f.close()
def tracked():
return [line.rstrip('\n') for line in open('links/tracker.data')]
def list_files(path):
# returns a list of names (with extension, without full path) of all files
# in folder path
files = []
for name in os.listdir(path):
if os.path.isfile(os.path.join(path, name)) and name != 'tracker.data' and name != 'empty.txt' and name not in tracked():
files.append(name)
else:
print("Skipping ",name)
return files
def make_directory(company):
try:
os.makedirs('content/'+company)
except Exception as e:
pass
for file in list_files('links/'):
print(file)
company = file.split('_')[1]
links = [line.rstrip('\n') for line in open('links/'+file)]
b = {}
date = []
content = []
for url in links:
c,d= url.split('::')
r = requests.get(d)
print("Scraping url ",d)
soup = BeautifulSoup(r.content,"html.parser")
link = soup.find_all("p")
#head = soup.find_all('h1')
a = []
# for x in head:
# a.append(x.text)
for data in link:
a.append(data.text);
str1 = ''.join(a)
c = datetime.datetime.strptime(c, '%d-%b-%Y')
date.append(c)
content.append(str1)
temp = {c:str1}
#print(url)
b.update(temp)
# with open('scraped_data.data', 'w', encoding='utf-8') as f:
# print(b, file=f)
# import json
# with open('content/result_'+file.split('data')[0]+'.json', 'w') as fp:
# json.dump(b, fp,indent=4)
make_directory(company)
with open('content/'+company+'/raw_'+file.split('.data')[0]+'.pkl', 'wb') as fp:
pickle.dump(b, fp)
temp = {'date':date,
'data':content}
df = pd.DataFrame(temp)
df.set_index('date',inplace=True)
df.to_pickle('content/'+company+'/'+file.split('.data')[0]+'_content.pkl')
df.to_csv('content/'+company+'/'+file.split('.data')[0]+'_content.csv')
tracker(file) | mit | Python |
518a66fa6c0fe8fe1cec87a679aa319553e2413e | Use new databse decorators for model management | CenterForOpenScience/scrapi,mehanig/scrapi,jeffreyliu3230/scrapi,mehanig/scrapi,felliott/scrapi,CenterForOpenScience/scrapi,icereval/scrapi,erinspace/scrapi,alexgarciac/scrapi,erinspace/scrapi,felliott/scrapi,fabianvf/scrapi,fabianvf/scrapi,ostwald/scrapi | scrapi/requests.py | scrapi/requests.py | from __future__ import absolute_import
import json
import logging
import functools
from datetime import datetime
import requests
import cqlengine
from cqlengine import columns
from scrapi import database
from scrapi import settings
logger = logging.getLogger(__name__)
@database.register_model
class HarvesterResponse(cqlengine.Model):
__table_name__ = 'responses'
__keyspace__ = settings.CASSANDRA_KEYSPACE
method = columns.Text(primary_key=True)
url = columns.Text(primary_key=True, required=True, index=True)
# Raw request data
content = columns.Bytes()
headers_str = columns.Text()
status_code = columns.Integer()
time_made = columns.DateTime(default=datetime.now)
@property
def json(self):
return json.loads(self.content)
@property
def headers(self):
# TODO: make case insensitive multidict
return json.loads(self.headers_str)
def record_or_load_response(method, url, **kwargs):
try:
return HarvesterResponse.get(url=url, method=method)
except HarvesterResponse.DoesNotExist:
response = requests.request(method, url, **kwargs)
return HarvesterResponse(
url=url,
method=method,
content=response.content,
status_code=response.status_code,
headers_str=json.dumps(response.headers)
).save()
def request(method, url, **kwargs):
if settings.RECORD_HTTP_TRANSACTIONS:
return record_or_load_response(method, url, **kwargs)
return requests.request(method, url, **kwargs)
get = functools.partial(request, 'get')
put = functools.partial(request, 'put')
post = functools.partial(request, 'post')
delete = functools.partial(request, 'delete')
| from __future__ import absolute_import
import json
import logging
import functools
from datetime import datetime
import requests
import cqlengine
from cqlengine import columns
from scrapi import database # noqa
from scrapi import settings
logger = logging.getLogger(__name__)
class HarvesterResponse(cqlengine.Model):
__table_name__ = 'responses'
__keyspace__ = settings.CASSANDRA_KEYSPACE
method = columns.Text(primary_key=True)
url = columns.Text(primary_key=True, required=True, index=True)
# Raw request data
content = columns.Bytes()
headers_str = columns.Text()
status_code = columns.Integer()
time_made = columns.DateTime(default=datetime.now)
@property
def json(self):
return json.loads(self.content)
@property
def headers(self):
# TODO: make case insensitive multidict
return json.loads(self.headers_str)
def record_or_load_response(method, url, **kwargs):
try:
return HarvesterResponse.get(url=url, method=method)
except HarvesterResponse.DoesNotExist:
response = requests.request(method, url, **kwargs)
return HarvesterResponse(
url=url,
method=method,
content=response.content,
status_code=response.status_code,
headers_str=json.dumps(response.headers)
).save()
def request(method, url, **kwargs):
if settings.RECORD_HTTP_TRANSACTIONS:
return record_or_load_response(method, url, **kwargs)
return requests.request(method, url, **kwargs)
get = functools.partial(request, 'get')
put = functools.partial(request, 'put')
post = functools.partial(request, 'post')
delete = functools.partial(request, 'delete')
| apache-2.0 | Python |
6d2e9f69f809c270cee57de5f761f5d915524546 | add default model | viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker,viraintel/OWASP-Nettacker | config.py | config.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import string
from core._time import now
def get_config():
return { # OWASP Nettacker Default Configuration
"language": "en",
"verbose_level": 0,
"show_version": False,
"check_update": False,
"log_in_file": "results/results_{0}_{1}.html".format(now(model="%Y_%m_%d_%H_%M_%S"),
''.join(random.choice(string.ascii_lowercase) for x in range(10))),
"graph_flag": "d3_tree_v1_graph",
"help_menu_flag": False,
"targets": None,
"targets_list": None,
"scan_method": None,
"exclude_method": None,
"users": None,
"users_list": None,
"passwds": None,
"passwds_list": None,
"ports": None,
"timeout_sec": 3.0,
"time_sleep": 0.0,
"check_ranges": False,
"check_subdomains": False,
"thread_number": 100,
"thread_number_host": 30,
"socks_proxy": None,
"retries": 3,
"ping_flag": False,
"methods_args": None,
"method_args_list": False,
"startup_check_for_update": False
}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import string
from core._time import now
def get_config():
return { # OWASP Nettacker Default Configuration
"language": "en",
"verbose_level": 0,
"show_version": False,
"check_update": False,
"log_in_file": "results/results_{0}_{1}.html".format(now(),
''.join(random.choice(string.ascii_lowercase) for x in range(10))),
"graph_flag": "d3_tree_v1_graph",
"help_menu_flag": False,
"targets": None,
"targets_list": None,
"scan_method": None,
"exclude_method": None,
"users": None,
"users_list": None,
"passwds": None,
"passwds_list": None,
"ports": None,
"timeout_sec": 3.0,
"time_sleep": 0.0,
"check_ranges": False,
"check_subdomains": False,
"thread_number": 100,
"thread_number_host": 30,
"socks_proxy": None,
"retries": 3,
"ping_flag": False,
"methods_args": None,
"method_args_list": False,
"startup_check_for_update": False
}
| apache-2.0 | Python |
03d47a0011dc7841bffc2856d49ab3d43ba157f7 | check if cache/flash-dir is writable at configload | balrok/Flashget | config.py | config.py | import os
class config(object):
cache_dir = '/mnt/sda6/prog/flashget/cache'
flash_dir = '/mnt/sda6/prog/flashget/flash'
if not os.access(cache_dir, os.W_OK):
print "your cache-dir isn't writeable please edit config.py"
if not os.access(flash_dir, os.W_OK):
print "your flash-dir isn't writeable please edit config.py"
|
class config(object):
cache_dir = '/mnt/sda6/prog/flashget/cache'
flash_dir = '/mnt/sda6/prog/flashget/flash'
# TODO: os.access(path, os.W_OK)
| mit | Python |
247e363c01e9ebbc98885f73d0d3bbc5f1699d6d | fix config for heroku | uhjish/4man,uhjish/4man,uhjish/4man,uhjish/4man | config.py | config.py | from datetime import timedelta
import os
pgurl = os.environ.get('HEROKU_POSTGRESQL_SILVER_URL')
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = ''
APP_NAME = 'ApplicationName'
SECRET_KEY = 'add_secret'
JWT_EXPIRATION_DELTA = timedelta(days=30)
JWT_AUTH_URL_RULE = '/api/v1/auth'
SECURITY_REGISTERABLE = True
SECURITY_RECOVERABLE = True
SECURITY_TRACKABLE = True
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = 'add_salt'
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/4man'
class DevelopmentConfig(Config):
#SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/4man'
SQLALCHEMY_DATABASE_URI = 'postgresql://dvyjrgvbtlqzqq:X-beURc-TZPoKNEs4nYZcGwROM@ec2-54-83-5-151.compute-1.amazonaws.com:5432/d51l61lh2nmpl0'
#SQLALCHEMY_DATABASE_URI = 'sqlite:///data.sqlite'
DEBUG = True
IMAGE_BUCKET = '4man-static-storage'
class HerokuConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql://dvyjrgvbtlqzqq:X-beURc-TZPoKNEs4nYZcGwROM@ec2-54-83-5-151.compute-1.amazonaws.com:5432/d51l61lh2nmpl0'
DEBUG = True
class TestingConfig(Config):
SQLALCHEMY_DATABASE_URI = 'sqlite://'
TESTING = True
| from datetime import timedelta
import os
pgurl = os.environ.get('HEROKU_POSTGRESQL_SILVER_URL')
class Config(object):
DEBUG = False
TESTING = False
SQLALCHEMY_DATABASE_URI = ''
APP_NAME = 'ApplicationName'
SECRET_KEY = 'add_secret'
JWT_EXPIRATION_DELTA = timedelta(days=30)
JWT_AUTH_URL_RULE = '/api/v1/auth'
SECURITY_REGISTERABLE = True
SECURITY_RECOVERABLE = True
SECURITY_TRACKABLE = True
SECURITY_PASSWORD_HASH = 'sha512_crypt'
SECURITY_PASSWORD_SALT = 'add_salt'
class ProductionConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/4man'
class DevelopmentConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql://localhost/4man'
#SQLALCHEMY_DATABASE_URI = 'postgresql://dvyjrgvbtlqzqq:X-beURc-TZPoKNEs4nYZcGwROM@ec2-54-83-5-151.compute-1.amazonaws.com:5432/d51l61lh2nmpl0'
#SQLALCHEMY_DATABASE_URI = 'sqlite:///data.sqlite'
DEBUG = True
IMAGE_BUCKET = '4man-static-storage'
class HerokuConfig(Config):
SQLALCHEMY_DATABASE_URI = 'postgresql://dvyjrgvbtlqzqq:X-beURc-TZPoKNEs4nYZcGwROM@ec2-54-83-5-151.compute-1.amazonaws.com:5432/d51l61lh2nmpl0'
DEBUG = True
class TestingConfig(Config):
SQLALCHEMY_DATABASE_URI = 'sqlite://'
TESTING = True
| bsd-2-clause | Python |
882e140c98ffd28d4117e5a3247cb1aaa836b792 | Update config.py formatting | theDrake/asteroids-py | config.py | config.py | #------------------------------------------------------------------------------
# Filename: config.py
#
# Author: David C. Drake (http://davidcdrake.com)
#
# Description: Configuration file for an Asteroids game written in Python 2.7.
#------------------------------------------------------------------------------
TITLE = 'Asteroids!'
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
FRAMES_PER_SECOND = 30
BACKGROUND_COLOR = (0, 0, 0)
BACKGROUND_MUSIC = 'asteroids.mp3'
SHIP_POINTS = [(30, 15),
(10, 20),
(15, 25),
(10, 30),
(0, 25),
(10, 20),
(0, 15),
(10, 10),
(0, 5),
(10, 0),
(15, 5),
(10, 10)]
SHIP_INITIAL_ROTATION = -90.0
SHIP_ACCELERATION_RATE = 0.5
SHIP_ROTATION_RATE = 10.0
SHIP_COLOR = (140, 140, 255)
ASTEROID_COUNT = 15
ASTEROID_MIN_POINTS = 6
ASTEROID_MAX_POINTS = 12
ASTEROID_MIN_RADIUS = 10.0
ASTEROID_MAX_RADIUS = 40.0
ASTEROID_COLOR = (139, 69, 19)
ASTEROID_COLOR_DEVIATION = 20
ASTEROID_MIN_SPEED = 1.0
ASTEROID_MAX_SPEED = 4.0
ASTEROID_MIN_ROTATION_SPEED = 1.0
ASTEROID_MAX_ROTATION_SPEED = 6.0
BULLET_COUNT = 10
BULLET_RADIUS = 3.0
BULLET_COLOR = (255, 255, 0)
BULLET_SPEED = 30.0
UPGRADE_RADIUS = 6.0
UPGRADE_REQUIREMENT = 5 # number of asteroids to destroy to earn upgrade
MAX_UPGRADE_LEVEL = 7
STAR_COUNT = 200
STAR_RADIUS = 2.0
STAR_TWINKLE_SPEED = 20
RESPAWN_DELAY = 50 # game cycles
VULNERABILITY_DELAY = 50 # game cycles
CIRCLE_POINT_COUNT = 8 # number of points to use for circle collision detection
| #-------------------------------------------------------------------------------
# Filename: config.py
#
# Author: David C. Drake (http://davidcdrake.com)
#
# Description: Configuration file for an Asteroids game. Developed using Python
# 2.7.
#-------------------------------------------------------------------------------
TITLE = 'Asteroids!'
SCREEN_WIDTH = 800
SCREEN_HEIGHT = 600
FRAMES_PER_SECOND = 30
BACKGROUND_COLOR = (0, 0, 0)
BACKGROUND_MUSIC = 'asteroids.mp3'
SHIP_POINTS = [(30, 15),
(10, 20),
(15, 25),
(10, 30),
(0, 25),
(10, 20),
(0, 15),
(10, 10),
(0, 5),
(10, 0),
(15, 5),
(10, 10)]
SHIP_INITIAL_ROTATION = -90.0
SHIP_ACCELERATION_RATE = 0.5
SHIP_ROTATION_RATE = 10.0
SHIP_COLOR = (140, 140, 255)
ASTEROID_COUNT = 15
ASTEROID_MIN_POINTS = 6
ASTEROID_MAX_POINTS = 12
ASTEROID_MIN_RADIUS = 10.0
ASTEROID_MAX_RADIUS = 40.0
ASTEROID_COLOR = (139, 69, 19)
ASTEROID_COLOR_DEVIATION = 20
ASTEROID_MIN_SPEED = 1.0
ASTEROID_MAX_SPEED = 4.0
ASTEROID_MIN_ROTATION_SPEED = 1.0
ASTEROID_MAX_ROTATION_SPEED = 6.0
BULLET_COUNT = 10
BULLET_RADIUS = 3.0
BULLET_COLOR = (255, 255, 0)
BULLET_SPEED = 30.0
UPGRADE_RADIUS = 6.0
UPGRADE_REQUIREMENT = 5 # Number of asteroids to destroy to earn upgrade.
MAX_UPGRADE_LEVEL = 7
STAR_COUNT = 200
STAR_RADIUS = 2.0
STAR_TWINKLE_SPEED = 20
RESPAWN_DELAY = 50 # Game cycles.
VULNERABILITY_DELAY = 50 # Game cycles.
CIRCLE_POINT_COUNT = 8 # Number of points to use for circle collision detection.
| mit | Python |
a5b08698e22b8c46e2f6bbdda2b980dbd8d580ec | move up time for testing again | MinnPost/salesforce-stripe,MinnPost/salesforce-stripe,MinnPost/salesforce-stripe,texastribune/salesforce-stripe,texastribune/salesforce-stripe,texastribune/salesforce-stripe | config.py | config.py | from celery.schedules import crontab
# from datetime import timedelta
import os
def bool_env(val):
"""Replaces string based environment values with Python booleans"""
return True if os.environ.get(val, False) == 'True' else False
TIMEZONE = os.getenv('TIMEZONE', "US/Central")
#######
# Flask
#
FLASK_SECRET_KEY = os.getenv('FLASK_SECRET_KEY')
########
# Celery
#
WHEN = '4,15,16,21'
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_ALWAYS_EAGER = bool_env('CELERY_ALWAYS_EAGER')
CHARGE_MINUTES_FREQUENCY = int(os.getenv('CHARGE_MINUTES_FREQUENCY', 1440))
CELERYBEAT_SCHEDULE = {
'every-day': {
'task': 'batch.charge_cards',
'schedule': crontab(minute='0, 10, 15', hour=WHEN)
},
}
######
# SMTP
#
MAIL_SERVER = os.getenv('MAIL_SERVER', 'localhost')
MAIL_USERNAME = os.getenv('MAIL_USERNAME', 'user')
MAIL_PASSWORD = os.getenv('MAIL_PASSWORD', 'pass')
MAIL_PORT = os.getenv('MAIL_PORT', '2525')
MAIL_USE_TLS = bool_env('MAIL_USE_TLS')
DEFAULT_MAIL_SENDER = os.getenv('DEFAULT_MAIL_SENDER', 'me@myplace.org')
MULTIPLE_ACCOUNT_WARNING_MAIL_RECIPIENT = os.getenv(
'MULTIPLE_ACCOUNT_WARNING_MAIL_RECIPIENT', '')
ACCOUNTING_MAIL_RECIPIENT = os.getenv('ACCOUNTING_MAIL_RECIPIENT', '')
############
# Salesforce
#
MEMBERSHIP_RECORDTYPEID = '01216000001IhHp'
DONATION_RECORDTYPEID = '01216000001IhI9'
TEXASWEEKLY_RECORDTYPEID = '01216000001IhQNAA0'
SALESFORCE = {
"CLIENT_ID": os.getenv('SALESFORCE_CLIENT_ID'),
"CLIENT_SECRET": os.getenv('SALESFORCE_CLIENT_SECRET'),
"USERNAME": os.getenv('SALESFORCE_USERNAME'),
"PASSWORD": os.getenv('SALESFORCE_PASSWORD'),
"HOST": os.getenv("SALESFORCE_HOST")
}
########
# Stripe
#
STRIPE_KEYS = {
'secret_key': os.getenv('SECRET_KEY'),
'publishable_key': os.getenv('PUBLISHABLE_KEY')
}
#######
# Slack
#
ENABLE_SLACK = bool_env('ENABLE_SLACK')
SLACK_CHANNEL = os.getenv('SLACK_CHANNEL', '#stripe')
SLACK_API_KEY = os.getenv('SLACK_API_KEY')
########
# Sentry
#
ENABLE_SENTRY = bool_env('ENABLE_SENTRY')
SENTRY_DSN = os.getenv('SENTRY_DSN')
| from celery.schedules import crontab
# from datetime import timedelta
import os
def bool_env(val):
"""Replaces string based environment values with Python booleans"""
return True if os.environ.get(val, False) == 'True' else False
TIMEZONE = os.getenv('TIMEZONE', "US/Central")
#######
# Flask
#
FLASK_SECRET_KEY = os.getenv('FLASK_SECRET_KEY')
########
# Celery
#
WHEN = '4,15,16'
CELERY_BROKER_URL = os.getenv('CELERY_BROKER_URL')
CELERY_RESULT_BACKEND = os.getenv('CELERY_RESULT_BACKEND')
CELERY_ALWAYS_EAGER = bool_env('CELERY_ALWAYS_EAGER')
CHARGE_MINUTES_FREQUENCY = int(os.getenv('CHARGE_MINUTES_FREQUENCY', 1440))
CELERYBEAT_SCHEDULE = {
'every-day': {
'task': 'batch.charge_cards',
'schedule': crontab(minute='0, 10', hour=WHEN)
},
}
######
# SMTP
#
MAIL_SERVER = os.getenv('MAIL_SERVER', 'localhost')
MAIL_USERNAME = os.getenv('MAIL_USERNAME', 'user')
MAIL_PASSWORD = os.getenv('MAIL_PASSWORD', 'pass')
MAIL_PORT = os.getenv('MAIL_PORT', '2525')
MAIL_USE_TLS = bool_env('MAIL_USE_TLS')
DEFAULT_MAIL_SENDER = os.getenv('DEFAULT_MAIL_SENDER', 'me@myplace.org')
MULTIPLE_ACCOUNT_WARNING_MAIL_RECIPIENT = os.getenv(
'MULTIPLE_ACCOUNT_WARNING_MAIL_RECIPIENT', '')
ACCOUNTING_MAIL_RECIPIENT = os.getenv('ACCOUNTING_MAIL_RECIPIENT', '')
############
# Salesforce
#
MEMBERSHIP_RECORDTYPEID = '01216000001IhHp'
DONATION_RECORDTYPEID = '01216000001IhI9'
TEXASWEEKLY_RECORDTYPEID = '01216000001IhQNAA0'
SALESFORCE = {
"CLIENT_ID": os.getenv('SALESFORCE_CLIENT_ID'),
"CLIENT_SECRET": os.getenv('SALESFORCE_CLIENT_SECRET'),
"USERNAME": os.getenv('SALESFORCE_USERNAME'),
"PASSWORD": os.getenv('SALESFORCE_PASSWORD'),
"HOST": os.getenv("SALESFORCE_HOST")
}
########
# Stripe
#
STRIPE_KEYS = {
'secret_key': os.getenv('SECRET_KEY'),
'publishable_key': os.getenv('PUBLISHABLE_KEY')
}
#######
# Slack
#
ENABLE_SLACK = bool_env('ENABLE_SLACK')
SLACK_CHANNEL = os.getenv('SLACK_CHANNEL', '#stripe')
SLACK_API_KEY = os.getenv('SLACK_API_KEY')
########
# Sentry
#
ENABLE_SENTRY = bool_env('ENABLE_SENTRY')
SENTRY_DSN = os.getenv('SENTRY_DSN')
| mit | Python |
69e8798137ca63b78adf0c41582e89973d2ea129 | Work on model file handling | edx/ease,edx/ease | create.py | create.py | import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
full_path=os.path.join(base_path,model_path)
util_functions.create_directory(full_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, full_path)
results['created']=True
"""
except:
results['errors'].append("could not write model to: {0}".format(model_path))
"""
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
| import os
import sys
base_path = os.path.dirname(__file__)
sys.path.append(base_path)
one_up_path = os.path.abspath(os.path.join(os.path.dirname(__file__),'..'))
sys.path.append(one_up_path)
import model_creator
import util_functions
def create(text,score,prompt_string,model_path):
model_path=util_functions.create_model_path(model_path)
results = {'errors': [],'created' : False}
try:
e_set = model_creator.create_essay_set(text, score, prompt_string)
except:
results['errors'].append("essay set creation failed.")
try:
feature_ext, classifier = model_creator.extract_features_and_generate_model(e_set)
except:
results['errors'].append("feature extraction and model creation failed.")
try:
util_functions.create_directory(model_path)
model_creator.dump_model_to_file(prompt_string, feature_ext, classifier, text, score, args.model_path)
results['created']=True
except:
results['errors'].append("could not write model to: {0}".format(model_path))
return results
def check(model_path):
model_path=util_functions.create_model_path(model_path)
try:
with open(model_path) as f: pass
except IOError as e:
return False
return True
| agpl-3.0 | Python |
98d0bebbf74c42676a373a3c77bd1bd192129995 | Update trace perf output with struct deserialize example | iovisor/bcc,tuxology/bcc,romain-intel/bcc,iovisor/bcc,mcaleavya/bcc,shodoco/bcc,zaafar/bcc,brendangregg/bcc,romain-intel/bcc,tuxology/bcc,mbudiu-bfn/bcc,mbudiu-bfn/bcc,romain-intel/bcc,zaafar/bcc,zaafar/bcc,brendangregg/bcc,zaafar/bcc,shodoco/bcc,mbudiu-bfn/bcc,mcaleavya/bcc,shodoco/bcc,iovisor/bcc,tuxology/bcc,tuxology/bcc,mkacik/bcc,iovisor/bcc,tuxology/bcc,brendangregg/bcc,mcaleavya/bcc,mbudiu-bfn/bcc,mkacik/bcc,iovisor/bcc,brendangregg/bcc,mbudiu-bfn/bcc,mkacik/bcc,mcaleavya/bcc,zaafar/bcc,shodoco/bcc,romain-intel/bcc,brendangregg/bcc,shodoco/bcc,mkacik/bcc,mcaleavya/bcc,romain-intel/bcc,mkacik/bcc | examples/tracing/trace_perf_output.py | examples/tracing/trace_perf_output.py | #!/usr/bin/env python
# Copyright (c) PLUMgrid, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
# This is an example of tracing an event and printing custom fields.
# run in project examples directory with:
# sudo ./trace_fields.py"
import atexit
from bcc import BPF
import ctypes as ct
class Data(ct.Structure):
_fields_ = [("ts", ct.c_ulonglong),
("magic", ct.c_ulonglong)]
counter = 0
def cb(cpu, data, size):
assert size >= ct.sizeof(Data)
event = ct.cast(data, ct.POINTER(Data)).contents
print("[%0d] %f: %x" % (cpu, float(event.ts) / 1000000, event.magic))
global counter
counter += 1
prog = """
BPF_PERF_OUTPUT(events);
BPF_TABLE("array", int, u64, counters, 10);
int kprobe__sys_clone(void *ctx) {
struct {
u64 ts;
u64 magic;
} data = {bpf_ktime_get_ns(), 0x12345678};
int rc;
if ((rc = events.perf_submit(ctx, &data, sizeof(data))) < 0)
bpf_trace_printk("perf_output failed: %d\\n", rc);
int zero = 0;
u64 *val = counters.lookup(&zero);
if (val) lock_xadd(val, 1);
return 0;
}
"""
b = BPF(text=prog)
b["events"].open_perf_buffer(cb)
@atexit.register
def print_counter():
global counter
global b
print("counter = %d vs %d" % (counter, b["counters"][ct.c_int(0)].value))
print("Tracing sys_write, try `dd if=/dev/zero of=/dev/null`")
print("Tracing... Hit Ctrl-C to end.")
while 1:
b.kprobe_poll()
| #!/usr/bin/env python
# Copyright (c) PLUMgrid, Inc.
# Licensed under the Apache License, Version 2.0 (the "License")
# This is an example of tracing an event and printing custom fields.
# run in project examples directory with:
# sudo ./trace_fields.py"
import atexit
from bcc import BPF
import ctypes
counter = 0
def cb(cpu, data, size):
global counter
counter += 1
prog = """
BPF_PERF_OUTPUT(events);
BPF_TABLE("array", int, u64, counters, 10);
int kprobe__sys_write(void *ctx) {
struct {
u64 ts;
} data = {bpf_ktime_get_ns()};
int rc;
if ((rc = events.perf_submit(ctx, &data, sizeof(data))) < 0)
bpf_trace_printk("perf_output failed: %d\\n", rc);
int zero = 0;
u64 *val = counters.lookup(&zero);
if (val) lock_xadd(val, 1);
return 0;
}
"""
b = BPF(text=prog)
b["events"].open_perf_buffer(cb)
@atexit.register
def print_counter():
global counter
global b
print("counter = %d vs %d" % (counter, b["counters"][ctypes.c_int(0)].value))
print("Tracing sys_write, try `dd if=/dev/zero of=/dev/null`")
print("Tracing... Hit Ctrl-C to end.")
while 1:
b.kprobe_poll()
| apache-2.0 | Python |
3654c65fef4eb28ca67ec1a6a63d1b2eed1b50c2 | Update traitmenu example | frostidaho/dynmen | examples/traitmenu_people_n_places.py | examples/traitmenu_people_n_places.py | #!/usr/bin/env python
from __future__ import print_function
import logging
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(levelname)-8s %(name)-12s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
# from faker import Factory
# from pprint import pprint
# fake = Factory.create('en_US')
# exdict = dict(((fake.name(), (fake.city(), fake.zipcode())) for i in range(20)))
# pprint(exdict)
exdict = {
'Alyssa Boyd': ('Brownmouth', '09044'),
'Amy Martin': ('Mikechester', '33477'),
'Angela Mcdonald': ('North Gwendolynberg', '29053'),
'Bradley Santos': ('Andrewsmouth', '72513'),
'Brittany Manning': ('South Danielmouth', '44482'),
'Candice Huber': ('New Kimberly', '11698'),
'Cheyenne Thornton': ('New Anthony', '88618'),
'Dr. Kelli Sharp MD': ('North Rhondashire', '71761'),
'Evan Osborne': ('Andrewsside', '14378'),
'Gary Hernandez': ('Burnshaven', '62267'),
'George Elliott': ('Calebton', '55053'),
'Hannah Williams': ('North Stacy', '50983'),
'James Taylor': ('Gallegoshaven', '95677'),
'John White': ('Hansenhaven', '44559'),
'Monique Mccoy': ('Katherinemouth', '42023'),
'Randy Campos': ('South Scotthaven', '47692'),
'Rebecca Wolfe': ('Torresburgh', '37979'),
'Ronald Parks': ('Turnerland', '96367'),
'Russell Schroeder': ('Smithfurt', '39696'),
'Trevor Kelly': ('South Jenniferport', '73366'),
}
from dynmen.rofi import Rofi
menu = Rofi(lines=5, hide_scrollbar=True)
menu.prompt = "Name of person: "
menu.case_insensitive = True
out = menu(exdict)
print('Output from rofi:', out)
from dynmen.dmenu import DMenu
menu = DMenu()
menu.font = 'Satisfy-20'
menu.nf = '#6FC3DF'
menu.nb = '#0C141F'
menu.sf = '#FFE64D'
menu.sb = '#0C141F'
menu.lines = 20
menu.prompt = "Name of person: "
menu.case_insensitive = True
out = menu(exdict)
print('Output from dmenu:', out)
| #!/usr/bin/env python
from __future__ import print_function
import logging
logger = logging.getLogger()
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(levelname)-8s %(name)-12s %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(logging.DEBUG)
# from faker import Factory
# from pprint import pprint
# fake = Factory.create('en_US')
# exdict = dict(((fake.name(), (fake.city(), fake.zipcode())) for i in range(20)))
# pprint(exdict)
exdict = {
'Alyssa Boyd': ('Brownmouth', '09044'),
'Amy Martin': ('Mikechester', '33477'),
'Angela Mcdonald': ('North Gwendolynberg', '29053'),
'Bradley Santos': ('Andrewsmouth', '72513'),
'Brittany Manning': ('South Danielmouth', '44482'),
'Candice Huber': ('New Kimberly', '11698'),
'Cheyenne Thornton': ('New Anthony', '88618'),
'Dr. Kelli Sharp MD': ('North Rhondashire', '71761'),
'Evan Osborne': ('Andrewsside', '14378'),
'Gary Hernandez': ('Burnshaven', '62267'),
'George Elliott': ('Calebton', '55053'),
'Hannah Williams': ('North Stacy', '50983'),
'James Taylor': ('Gallegoshaven', '95677'),
'John White': ('Hansenhaven', '44559'),
'Monique Mccoy': ('Katherinemouth', '42023'),
'Randy Campos': ('South Scotthaven', '47692'),
'Rebecca Wolfe': ('Torresburgh', '37979'),
'Ronald Parks': ('Turnerland', '96367'),
'Russell Schroeder': ('Smithfurt', '39696'),
'Trevor Kelly': ('South Jenniferport', '73366'),
}
from dynmen.rofi import Rofi
menu = Rofi(lines=5, hide_scrollbar=True)
menu.prompt = "Name of person: "
menu.case_insensitive = True
out = menu(exdict)
print('Output from rofi:', out)
from dynmen.dmenu import DMenu
menu = DMenu()
menu.font = 'Satisfy-20'
menu.color_fg_norm = '#6FC3DF'
menu.color_bg_norm = '#0C141F'
menu.color_fg_sel = '#FFE64D'
menu.color_bg_sel = '#0C141F'
menu.lines = 20
menu.prompt = "Name of person: "
menu.case_insensitive = True
out = menu(exdict)
print('Output from dmenu:', out)
from dynmen.fzf import FZF
menu = FZF()
menu.prompt = "Name of person: "
menu.case_insensitive = True
out = menu(exdict)
print('Output from fzf:', out)
| mit | Python |
37b28aed6002f56f4813436ca2ced1f6c98ab84a | Use MOOC title for list name and campaign name | p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc,p2pu/mechanical-mooc | sequence/models.py | sequence/models.py | from django.conf import settings
import db
from mailgun import api as mailgun_api
import datetime
def sequence_list_name( sequence_number ):
if settings.DEBUG:
return '{0}-{1}-all-test@{2}'.format(settings.MOOC_TITLE.replace(' ', '-').lower(), sequence_number, settings.EMAIL_DOMAIN)
return '{0}-{1}-all@{2}'.format(settings.MOOC_TITLE.replace(' ', '-').lower(), sequence_number, settings.EMAIL_DOMAIN)
def sequence_campaign( sequence_number ):
if settings.DEBUG:
return '{0}-{1}-campaign-test'.format(settings.MOOC_TITLE.replace(' ', '-').lower(), sequence_number)
return '{0}-{1}-campaign'.format(settings.MOOC_TITLE.replace(' ', '-').lower(), sequence_number)
def sequence2dict( sequence_db ):
return {
'id': sequence_db.id,
'start_date': sequence_db.start_date,
'signup_close_date': sequence_db.signup_close_date,
'global_list': sequence_list_name(sequence_db.id),
'campaign_id': sequence_campaign(sequence_db.id)
}
def create_sequence( start_date, signup_close_date ):
sequence_db = db.Sequence(
start_date = start_date,
signup_close_date = signup_close_date
)
sequence_db.save()
mailgun_api.create_list(
sequence_list_name(sequence_db.id),
'Sequence {0} global list'.format(sequence_db.id),
'List for all members of sequence {0}'.format(sequence_db.id),
'readonly'
)
mailgun_api.create_campaign(
sequence_campaign(sequence_db.id),
'Sequence {0} campaign'.format(sequence_db.id)
)
return sequence2dict(sequence_db)
def get_all_sequences( ):
return [ sequence2dict(seq) for seq in db.Sequence.objects.all() ]
def get_current_sequence( ):
""" return the first sequence where signup_close_date is in the future """
sequence_db = db.Sequence.objects.filter(signup_close_date__gte=datetime.datetime.utcnow().date()).order_by('start_date')
if sequence_db.count() == 0:
return None
return sequence2dict(sequence_db[0])
def get_current_sequence_number( ):
sequence_db = db.Sequence.objects.filter(signup_close_date__gte=datetime.datetime.utcnow().date()).order_by('start_date')
if sequence_db.count() == 0:
return None
return sequence_db[0].id
| from django.conf import settings
import db
from mailgun import api as mailgun_api
import datetime
def sequence_list_name( sequence_number ):
if settings.DEBUG:
return 'sequence-{0}-all-test@{1}'.format(sequence_number, settings.EMAIL_DOMAIN)
return 'sequence-{0}-all@{1}'.format(sequence_number, settings.EMAIL_DOMAIN)
def sequence_campaign( sequence_number ):
if settings.DEBUG:
return 'sequence-{0}-campaign-test'.format(sequence_number)
return 'sequence-{0}-campaign'.format(sequence_number)
def sequence2dict( sequence_db ):
return {
'id': sequence_db.id,
'start_date': sequence_db.start_date,
'signup_close_date': sequence_db.signup_close_date,
'global_list': sequence_list_name(sequence_db.id),
'campaign_id': sequence_campaign(sequence_db.id)
}
def create_sequence( start_date, signup_close_date ):
sequence_db = db.Sequence(
start_date = start_date,
signup_close_date = signup_close_date
)
sequence_db.save()
mailgun_api.create_list(
sequence_list_name(sequence_db.id),
'Sequence {0} global list'.format(sequence_db.id),
'List for all members of sequence {0}'.format(sequence_db.id),
'readonly'
)
mailgun_api.create_campaign(
sequence_campaign(sequence_db.id),
'Sequence {0} campaign'.format(sequence_db.id)
)
return sequence2dict(sequence_db)
def get_all_sequences( ):
return [ sequence2dict(seq) for seq in db.Sequence.objects.all() ]
def get_current_sequence( ):
""" return the first sequence where signup_close_date is in the future """
sequence_db = db.Sequence.objects.filter(signup_close_date__gte=datetime.datetime.utcnow().date()).order_by('start_date')
if sequence_db.count() == 0:
return None
return sequence2dict(sequence_db[0])
def get_current_sequence_number( ):
sequence_db = db.Sequence.objects.filter(signup_close_date__gte=datetime.datetime.utcnow().date()).order_by('start_date')
if sequence_db.count() == 0:
return None
return sequence_db[0].id
| mit | Python |
fb0a639061f3bb092f0334564b3737da7d00bb8c | Add logger to providers | beeworking/voyant,beeworking/voyant,beeworking/voyant,beeworking/voyant | server/provider.py | server/provider.py | import logging
class Provider(object):
"""Base provider class"""
regions = {}
def __init__(self, key):
self.key = key
self.logger = logging.getLogger('Provider')
def create(self, region):
raise NotImplemented()
def start(self):
raise NotImplemented()
def stop(self):
raise NotImplemented()
def destroy(self):
raise NotImplemented()
def list_servers(self):
raise NotImplemented()
def status(self):
raise NotImplemented()
@staticmethod
def server_to_json(server):
raise NotImplemented()
| class Provider(object):
"""Base provider class"""
regions = {}
def __init__(self, key):
self.key = key
def create(self, region):
raise NotImplemented()
def start(self):
raise NotImplemented()
def stop(self):
raise NotImplemented()
def destroy(self):
raise NotImplemented()
def list_servers(self):
raise NotImplemented()
def status(self):
raise NotImplemented()
@staticmethod
def server_to_json(server):
raise NotImplemented()
| mit | Python |
fb787e678641c68271fbecebe88d0a9d5371615b | Update tests: "login" & "logout" are no longer on the Kenya homepage | mysociety/pombola,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,patricmutwiri/pombola,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,ken-muturi/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,patricmutwiri/pombola,hzj123/56th,geoffkilpin/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,mysociety/pombola,mysociety/pombola,hzj123/56th,mysociety/pombola | mzalendo/core/tests/test_accounts.py | mzalendo/core/tests/test_accounts.py | import re
from django.conf import settings
from django.core import mail
from django_webtest import WebTest
from core import models
from django.test.client import Client
from django.contrib.auth.models import User
class AccountTest(WebTest):
def setUp(self):
pass
def test_create_account(self):
app = self.app
# Start at the login page and click 'Need an account'
res = (
app
.get( '/accounts/login/?next=/' )
.click( description='Need an account' )
)
# TODO - allow better usernames.
#
# Django (since 1.2) has allowed more chars in usernames but the
# registration app has lagged behind:
# https://bitbucket.org/ubernostrum/django-registration/issue/80/username-regex-shouldnt-be-more
# create an account
form = res.forms[1]
form['username'] = 'test_user'
form['email'] = 'test@example.com'
form['password1'] = 's3cr3t'
form['password2'] = 's3cr3t'
res = form.submit()
# check that user created but not active
user = User.objects.get(username='test_user')
self.assertFalse( user.is_active )
# check that an email has been sent
self.assertEqual(len(mail.outbox), 1)
confirm_url = re.search( r'/accounts/activate/\S+', mail.outbox[0].body ).group()
res = app.get( confirm_url, auto_follow=True )
self.assertContains( res, 'activation complete' )
# check that user now active
user = User.objects.get(username='test_user')
self.assertTrue( user.is_active )
# check that the user con login
res = res.click(description='login', index=2)
form = res.forms[1]
form['username'] = 'test_user'
form['password'] = 's3cr3t'
res = form.submit().follow()
# check that we are back on homepage and logged in
self.assertEqual('/', res.request.path, 'back on home page')
# logout
res = app.get('/accounts/logout/')
self.assertContains(res, 'Successfully logged out!')
self.assertEqual('/accounts/logout/', res.request.path)
| import re
from django.conf import settings
from django.core import mail
from django_webtest import WebTest
from core import models
from django.test.client import Client
from django.contrib.auth.models import User
class AccountTest(WebTest):
def setUp(self):
pass
def test_create_account(self):
app = self.app
# go to home page, go to login page, go to new account page
res = (
app
.get( '/' )
.click(description='login', index=1)
.click( description='Need an account' )
)
# TODO - allow better usernames.
#
# Django (since 1.2) has allowed more chars in usernames but the
# registration app has lagged behind:
# https://bitbucket.org/ubernostrum/django-registration/issue/80/username-regex-shouldnt-be-more
# create an account
form = res.forms[1]
form['username'] = 'test_user'
form['email'] = 'test@example.com'
form['password1'] = 's3cr3t'
form['password2'] = 's3cr3t'
res = form.submit()
# check that user created but not active
user = User.objects.get(username='test_user')
self.assertFalse( user.is_active )
# check that an email has been sent
self.assertEqual(len(mail.outbox), 1)
confirm_url = re.search( r'/accounts/activate/\S+', mail.outbox[0].body ).group()
res = app.get( confirm_url, auto_follow=True )
self.assertContains( res, 'activation complete' )
# check that user now active
user = User.objects.get(username='test_user')
self.assertTrue( user.is_active )
# check that the user con login
res = res.click(description='login', index=2)
form = res.forms[1]
form['username'] = 'test_user'
form['password'] = 's3cr3t'
res = form.submit().follow()
# check that we are back on homepage and logged in
self.assertEqual('/', res.request.path, 'back on home page')
self.assertContains( res, 'test_user' )
# logout
res = res.click( description='logout', index=1 )
self.assertEqual('/accounts/logout/', res.request.path)
| agpl-3.0 | Python |
23ca85e0911fa49bc2bd784e45ece42c047f830e | Bump to version 0.56.1 | nerevu/riko,nerevu/riko | riko/__init__.py | riko/__init__.py | # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
riko
~~~~
Provides functions for analyzing and processing streams of structured data
Examples:
basic usage::
>>> from itertools import chain
>>> from functools import partial
>>> from riko.modules import itembuilder, strreplace
>>> from riko.collections import SyncPipe
>>>
>>> ib_conf = {
... 'attrs': [
... {'key': 'link', 'value': 'www.google.com', },
... {'key': 'title', 'value': 'google', },
... {'key': 'author', 'value': 'Tommy'}]}
>>>
>>> sr_conf = {
... 'rule': [{'find': 'Tom', 'param': 'first', 'replace': 'Tim'}]}
>>>
>>> items = itembuilder.pipe(conf=ib_conf)
>>> pipe = partial(strreplace.pipe, conf=sr_conf, field='author')
>>> replaced = map(pipe, items)
>>> next(chain.from_iterable(replaced)) == {
... 'link': 'www.google.com', 'title': 'google',
... 'strreplace': 'Timmy', 'author': 'Tommy'}
True
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from os import path as p
from builtins import * # noqa # pylint: disable=unused-import
__version__ = '0.56.1'
__title__ = 'riko'
__package_name__ = 'riko'
__author__ = 'Reuben Cummings'
__description__ = 'A stream processing engine modeled after Yahoo! Pipes.'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
PARENT_DIR = p.abspath(p.dirname(__file__))
ENCODING = 'utf-8'
def get_path(name):
return 'file://%s' % p.join(PARENT_DIR, 'data', name)
| # -*- coding: utf-8 -*-
# vim: sw=4:ts=4:expandtab
"""
riko
~~~~
Provides functions for analyzing and processing streams of structured data
Examples:
basic usage::
>>> from itertools import chain
>>> from functools import partial
>>> from riko.modules import itembuilder, strreplace
>>> from riko.collections import SyncPipe
>>>
>>> ib_conf = {
... 'attrs': [
... {'key': 'link', 'value': 'www.google.com', },
... {'key': 'title', 'value': 'google', },
... {'key': 'author', 'value': 'Tommy'}]}
>>>
>>> sr_conf = {
... 'rule': [{'find': 'Tom', 'param': 'first', 'replace': 'Tim'}]}
>>>
>>> items = itembuilder.pipe(conf=ib_conf)
>>> pipe = partial(strreplace.pipe, conf=sr_conf, field='author')
>>> replaced = map(pipe, items)
>>> next(chain.from_iterable(replaced)) == {
... 'link': 'www.google.com', 'title': 'google',
... 'strreplace': 'Timmy', 'author': 'Tommy'}
True
"""
from __future__ import (
absolute_import, division, print_function, unicode_literals)
from os import path as p
from builtins import * # noqa # pylint: disable=unused-import
__version__ = '0.56.0'
__title__ = 'riko'
__package_name__ = 'riko'
__author__ = 'Reuben Cummings'
__description__ = 'A stream processing engine modeled after Yahoo! Pipes.'
__email__ = 'reubano@gmail.com'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015 Reuben Cummings'
PARENT_DIR = p.abspath(p.dirname(__file__))
ENCODING = 'utf-8'
def get_path(name):
return 'file://%s' % p.join(PARENT_DIR, 'data', name)
| mit | Python |
1187a9895d2ecada06780760c29fe3046eab0715 | Update ipc_lista1.5.py | any1m1c/ipc20161 | lista1/ipc_lista1.5.py | lista1/ipc_lista1.5.py | #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja converter em centímetros: ")
centimetros = metros * 100
print "Esse valor equivale a:
| #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja converter em centímetros: ")
centimetros = metros * 100
print "Esse valor equivale a
| apache-2.0 | Python |
78b9cd6ee72f59743f52d3a8c050d21d368b8ba6 | Update ipc_lista1.5.py | any1m1c/ipc20161 | lista1/ipc_lista1.5.py | lista1/ipc_lista1.5.py | #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja converter em
| #ipc_lista1.5
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que converta metros para centímetros.
metros = input("Digite o valor em metros que deseja converter em
| apache-2.0 | Python |
431aed87ed9ba90178117fa738e8c57ee542b7cc | Update ipc_lista1.8.py | any1m1c/ipc20161 | lista1/ipc_lista1.8.py | lista1/ipc_lista1.8.py | #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input("Entre com a quantidade de horas trabalhadas no mês: )
Salario =
| #ipc_lista1.8
#Professor: Jucimar Junior
#Any Mendes Carvalho - 1615310044
#
#
#
#
#Faça um programa que pergunte quanto você ganha por hora e o número de horas trabalhadas no mês.
#Calcule e mostre o total do seu salário no referido mês.
QntHora = input("Entre com o valor de seu rendimento por hora: ")
hT = input("Entre com a quantidade de horas trabalhadas no mês: )
Salario
| apache-2.0 | Python |
ad3a7d9bef598d1cdb31cce2f23faf5aa1608c42 | Bump to 0.5.1 | dinoperovic/djangoshop-shopit,dinoperovic/djangoshop-shopit,dinoperovic/djangoshop-shopit,dinoperovic/djangoshop-shopit | shopit/__init__.py | shopit/__init__.py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
"""
Release logic:
1. Bump the __version__.
2. git add shopit/__init__.py
3. git commit -m 'Bump to <version>'
4. git push
5. Make sure all tests pass on https://travis-ci.com/dinoperovic/django-shopit
6. git tag <version>
7. git push --tags
8. python setup.py sdist bdist_wheel
9. twine upload dist/*
10. Done!
"""
__version__ = '0.5.1'
default_app_config = 'shopit.apps.ShopitConfig'
| # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
"""
Release logic:
1. Bump the __version__.
2. git add shopit/__init__.py
3. git commit -m 'Bump to <version>'
4. git push
5. Make sure all tests pass on https://travis-ci.com/dinoperovic/django-shopit
6. git tag <version>
7. git push --tags
8. python setup.py sdist bdist_wheel
9. twine upload dist/*
10. Done!
"""
__version__ = '0.5.0'
default_app_config = 'shopit.apps.ShopitConfig'
| bsd-3-clause | Python |
0f1697c589de985c11c92cd9d74377023759875c | Make Elemental autodetect handle lib/ and lib64/ | davidsd/sdpb,davidsd/sdpb,davidsd/sdpb | elemental.py | elemental.py | #! /usr/bin/env python
# encoding: utf-8
def configure(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
import os
# Find Elemental
if conf.options.elemental_dir:
if not conf.options.elemental_incdir:
conf.options.elemental_incdir=conf.options.elemental_dir + "/include"
if not conf.options.elemental_libdir:
lib=conf.options.elemental_dir + "/lib"
if os.path.isdir(lib):
conf.options.elemental_libdir=lib
lib64=conf.options.elemental_dir + "/lib64"
if os.path.isdir(lib64):
conf.options.elemental_libdir+=" " + lib64
if conf.options.elemental_incdir:
elemental_incdir=conf.options.elemental_incdir.split()
else:
elemental_incdir=[]
if conf.options.elemental_libdir:
elemental_libdir=conf.options.elemental_libdir.split()
else:
elemental_libdir=[]
if conf.options.elemental_libs:
elemental_libs=conf.options.elemental_libs.split()
else:
elemental_libs=['El', 'pmrrr', 'ElSuiteSparse', 'pthread', 'm', 'metis' ]
conf.check_cxx(msg="Checking for Elemental",
fragment="#include <El.hpp>\nint main(int argc, char* argv[]) {El::Environment env( argc, argv ); El::BigFloat big;}\n",
includes=elemental_incdir,
uselib_store='elemental',
libpath=elemental_libdir,
rpath=elemental_libdir,
lib=elemental_libs,
use=['cxx14','gmpxx'])
def options(opt):
elemental=opt.add_option_group('Elemental Options')
elemental.add_option('--elemental-dir',
help='Base directory where elemental is installed')
elemental.add_option('--elemental-incdir',
help='Directory where elemental include files are installed')
elemental.add_option('--elemental-libdir',
help='Directory where elemental library files are installed')
elemental.add_option('--elemental-libs',
help='Names of the elemental libraries without prefix or suffix\n'
'(e.g. "El pmrrr ElSuiteSparse")')
| #! /usr/bin/env python
# encoding: utf-8
def configure(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
# Find Elemental
if conf.options.elemental_dir:
if not conf.options.elemental_incdir:
conf.options.elemental_incdir=conf.options.elemental_dir + "/include"
if not conf.options.elemental_libdir:
conf.options.elemental_libdir=conf.options.elemental_dir + "/lib"
if conf.options.elemental_incdir:
elemental_incdir=conf.options.elemental_incdir.split()
else:
elemental_incdir=[]
if conf.options.elemental_libdir:
elemental_libdir=conf.options.elemental_libdir.split()
else:
elemental_libdir=[]
if conf.options.elemental_libs:
elemental_libs=conf.options.elemental_libs.split()
else:
elemental_libs=['El', 'pmrrr', 'ElSuiteSparse', 'pthread', 'm', 'metis' ]
conf.check_cxx(msg="Checking for Elemental",
fragment="#include <El.hpp>\nint main(int argc, char* argv[]) {El::Environment env( argc, argv ); El::BigFloat big;}\n",
includes=elemental_incdir,
uselib_store='elemental',
libpath=elemental_libdir,
rpath=elemental_libdir,
lib=elemental_libs,
use=['cxx14','gmpxx'])
def options(opt):
elemental=opt.add_option_group('Elemental Options')
elemental.add_option('--elemental-dir',
help='Base directory where elemental is installed')
elemental.add_option('--elemental-incdir',
help='Directory where elemental include files are installed')
elemental.add_option('--elemental-libdir',
help='Directory where elemental library files are installed')
elemental.add_option('--elemental-libs',
help='Names of the elemental libraries without prefix or suffix\n'
'(e.g. "El pmrrr ElSuiteSparse")')
| mit | Python |
5c296010417b0a7c284d443b710f23edd747d3a6 | Fix demo setup.py. | bhy/cython-haoyu,bhy/cython-haoyu,bhy/cython-haoyu,bhy/cython-haoyu | Demos/setup.py | Demos/setup.py | import glob
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
try:
from numpy.distutils.misc_util import get_numpy_include_dirs
numpy_include_dirs = get_numpy_include_dirs()
except:
numpy_include_dirs = []
ext_modules=[
Extension("primes", ["primes.pyx"]),
Extension("spam", ["spam.pyx"]),
]
for file in glob.glob("*.pyx"):
if file != "numeric_demo.pyx":
ext_modules.append(Extension(file[:-4], [file], include_dirs = numpy_include_dirs))
setup(
name = 'Demos',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
)
| import glob
from distutils.core import setup
from distutils.extension import Extension
from Cython.Distutils import build_ext
try:
from numpy.distutils.misc_util import get_numpy_include_dirs
numpy_include_dirs = get_numpy_include_dirs()
except:
numpy_include_dirs = []
ext_modules=[
Extension("primes", ["primes.pyx"]),
Extension("spam", ["spam.pyx"]),
Extension("square", ["square.pyx"], language="c++"),
]
for file in glob.glob("*.pyx"):
if file != "numeric_demo.pyx" and file != "square.pyx":
ext_modules.append(Extension(file[:-4], [file], include_dirs = numpy_include_dirs))
setup(
name = 'Demos',
cmdclass = {'build_ext': build_ext},
ext_modules = ext_modules,
)
| apache-2.0 | Python |
fb159c1278b9a68e53223ebdf33a0764414f6301 | Fix 404 | otlet/JestemGraczem.pl,otlet/JestemGraczem.pl,otlet/JestemGraczem.pl | JestemGraczem/urls.py | JestemGraczem/urls.py | """JestemGraczem URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import include
from django.contrib.auth.models import User
from rest_framework import routers, serializers, viewsets
from stream.views import TwitchViewSet
from service import views as services
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
router = routers.DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'twitch', TwitchViewSet)
urlpatterns = [
path('admin/', admin.site.urls),
path('stream/', include('stream.urls'), name='stream'),
path('api/', include(router.urls)),
path('api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path('', include('service.urls'), name='service'),
]
handler404 = 'service.page_not_found'
| """JestemGraczem URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from django.conf.urls import include
from django.contrib.auth.models import User
from rest_framework import routers, serializers, viewsets
from stream.views import TwitchViewSet
from service import views as services
class UserSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = User
fields = ('url', 'username', 'email', 'is_staff')
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
router = routers.DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'twitch', TwitchViewSet)
urlpatterns = [
path('admin/', admin.site.urls),
path('stream/', include('stream.urls'), name='stream'),
path('api/', include(router.urls)),
path('api-auth/', include('rest_framework.urls', namespace='rest_framework')),
path('', include('service.urls'), name='service'),
]
handler404 = services.page_not_found
| agpl-3.0 | Python |
6e23132432bdbfa654b9ba7aeaa77259205bb29f | Make the test for Elemental more thorough | davidsd/sdpb,davidsd/sdpb,davidsd/sdpb | elemental.py | elemental.py | #! /usr/bin/env python
# encoding: utf-8
def configure(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
conf.load('compiler_cxx cxx14')
# Find Elemental
if conf.options.elemental_dir:
if not conf.options.elemental_incdir:
conf.options.elemental_incdir=conf.options.elemental_dir + "/include"
if not conf.options.elemental_libdir:
conf.options.elemental_libdir=conf.options.elemental_dir + "/lib"
if conf.options.elemental_incdir:
elemental_incdir=conf.options.elemental_incdir.split()
else:
elemental_incdir=[]
if conf.options.elemental_libdir:
elemental_libdir=conf.options.elemental_libdir.split()
else:
elemental_libdir=[]
if conf.options.elemental_libs:
elemental_libs=conf.options.elemental_libs.split()
else:
elemental_libs=['El', 'pmrrr', 'ElSuiteSparse', 'pthread', 'm', 'mpc',
'mpfr', 'gmp', 'metis' ]
conf.check_cxx(msg="Checking for Elemental",
header_name='El.hpp',
fragment="#include <El.hpp>\nint main(int argc, char* argv[]) {El::Environment env( argc, argv ); El::BigFloat big;}\n",
includes=elemental_incdir,
uselib_store='elemental',
libpath=elemental_libdir,
rpath=elemental_libdir,
lib=elemental_libs,
use='cxx14')
def options(opt):
elemental=opt.add_option_group('Elemental Options')
elemental.add_option('--elemental-dir',
help='Base directory where elemental is installed')
elemental.add_option('--elemental-incdir',
help='Directory where elemental include files are installed')
elemental.add_option('--elemental-libdir',
help='Directory where elemental library files are installed')
elemental.add_option('--elemental-libs',
help='Names of the elemental libraries without prefix or suffix\n'
'(e.g. "El pmrrr ElSuiteSparse")')
| #! /usr/bin/env python
# encoding: utf-8
def configure(conf):
def get_param(varname,default):
return getattr(Options.options,varname,'')or default
conf.load('compiler_cxx cxx14')
# Find Elemental
if conf.options.elemental_dir:
if not conf.options.elemental_incdir:
conf.options.elemental_incdir=conf.options.elemental_dir + "/include"
if not conf.options.elemental_libdir:
conf.options.elemental_libdir=conf.options.elemental_dir + "/lib"
if conf.options.elemental_incdir:
elemental_incdir=conf.options.elemental_incdir.split()
else:
elemental_incdir=[]
if conf.options.elemental_libdir:
elemental_libdir=conf.options.elemental_libdir.split()
else:
elemental_libdir=[]
if conf.options.elemental_libs:
elemental_libs=conf.options.elemental_libs.split()
else:
elemental_libs=['El', 'pmrrr', 'ElSuiteSparse', 'pthread', 'm', 'mpc',
'mpfr', 'gmp', 'metis' ]
conf.check_cxx(msg="Checking for Elemental",
header_name='El.hpp',
includes=elemental_incdir,
uselib_store='elemental',
libpath=elemental_libdir,
rpath=elemental_libdir,
lib=elemental_libs,
use='cxx14')
def options(opt):
elemental=opt.add_option_group('Elemental Options')
elemental.add_option('--elemental-dir',
help='Base directory where elemental is installed')
elemental.add_option('--elemental-incdir',
help='Directory where elemental include files are installed')
elemental.add_option('--elemental-libdir',
help='Directory where elemental library files are installed')
elemental.add_option('--elemental-libs',
help='Names of the elemental libraries without prefix or suffix\n'
'(e.g. "El pmrrr ElSuiteSparse")')
| mit | Python |
ac20902bad9623806010fbea6ea3e61fbb294664 | Add `MissingSettingException` | shoopio/shoop,suutari-ai/shoop,shoopio/shoop,shoopio/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,hrayr-artunyan/shuup,hrayr-artunyan/shuup,suutari-ai/shoop,suutari/shoop,suutari/shoop,suutari-ai/shoop,shawnadelic/shuup,shawnadelic/shuup,suutari/shoop | shuup/core/excs.py | shuup/core/excs.py | # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.utils.excs import Problem
class ImmutabilityError(ValueError):
pass
class NoProductsToShipException(Exception):
pass
class NoPaymentToCreateException(Exception):
pass
class NoRefundToCreateException(Exception):
pass
class RefundExceedsAmountException(Exception):
pass
class InvalidRefundAmountException(Exception):
pass
class MissingSettingException(Exception):
pass
class ProductNotOrderableProblem(Problem):
pass
class ProductNotVisibleProblem(Problem):
pass
class ImpossibleProductModeException(ValueError):
def __init__(self, message, code=None):
super(ImpossibleProductModeException, self).__init__(message)
self.code = code
| # -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from shuup.utils.excs import Problem
class ImmutabilityError(ValueError):
pass
class NoProductsToShipException(Exception):
pass
class NoPaymentToCreateException(Exception):
pass
class NoRefundToCreateException(Exception):
pass
class RefundExceedsAmountException(Exception):
pass
class InvalidRefundAmountException(Exception):
pass
class ProductNotOrderableProblem(Problem):
pass
class ProductNotVisibleProblem(Problem):
pass
class ImpossibleProductModeException(ValueError):
def __init__(self, message, code=None):
super(ImpossibleProductModeException, self).__init__(message)
self.code = code
| agpl-3.0 | Python |
33da9e13ce941690f2a202c46254901ddbbd4155 | Add some debugging | ionrock/dadd,ionrock/dadd,ionrock/dadd,ionrock/dadd | dadd/worker/processes/__init__.py | dadd/worker/processes/__init__.py | import os
import shlex
from subprocess import call, STDOUT
from dadd.worker import app
from dadd.worker.utils import printf
from dadd import client
class WorkerProcess(object):
def __init__(self, spec, output, sess=None):
self.spec = spec
# TODO: Add auth from the global config
self.conn = client.connect(app, sess)
self.output = output
self.returncode = None
def setup(self):
self.download_files()
def log(self, msg):
print(msg)
printf(msg, self.output)
def print_env(self):
call(['ls', '-la'], stderr=STDOUT, stdout=self.output)
call(['printenv'], stderr=STDOUT, stdout=self.output)
def start(self):
if isinstance(self.spec['cmd'], basestring):
parts = shlex.split(self.spec['cmd'])
else:
parts = self.spec['cmd']
cmd = []
for part in parts:
if part == '$APP_SETTINGS':
part = os.environ['APP_SETTINGS_JSON']
cmd.append(part)
# self.log('Current Environment')
# self.print_env()
self.log('Running: %s' % ' '.join(cmd))
self.returncode = call(cmd, stdout=self.output, stderr=STDOUT)
def download_files(self):
self.log('Downloading: %s' % self.spec.get('download_urls'))
for filename, url in self.spec.get('download_urls', {}).iteritems():
resp = self.conn.sess.get(url, stream=True)
if not resp.ok:
resp.raise_for_status()
with open(filename, 'w+') as fh:
for chunk in resp:
fh.write(chunk)
self.log('Downloaded: %s to %s' % (url, filename))
def finish(self):
state = 'success'
if self.returncode:
state = 'failed'
client.set_process_state(self.conn, self.proc.pid, state)
@property
def code(self):
return self.proc.returncode
| import os
import shlex
from subprocess import call, STDOUT
from dadd.worker import app
from dadd.worker.utils import printf
from dadd import client
class WorkerProcess(object):
def __init__(self, spec, output, sess=None):
self.spec = spec
# TODO: Add auth from the global config
self.conn = client.connect(app, sess)
self.output = output
self.returncode = None
def setup(self):
self.download_files()
def log(self, msg):
printf(msg, self.output)
def start(self):
if isinstance(self.spec['cmd'], basestring):
parts = shlex.split(self.spec['cmd'])
else:
parts = self.spec['cmd']
cmd = []
for part in parts:
if part == '$APP_SETTINGS':
part = os.environ['APP_SETTINGS_JSON']
cmd.append(part)
self.log('Running: %s' % ' '.join(cmd))
self.returncode = call(cmd, stdout=self.output, stderr=STDOUT)
def download_files(self):
for filename, url in self.spec.get('download_urls', {}).iteritems():
resp = self.conn.sess.get(url, stream=True)
resp.raise_for_status()
with open(filename, 'w+') as fh:
for chunk in resp:
fh.write(chunk)
self.log('Downloaded: %s to %s' % (url, filename))
def finish(self):
state = 'success'
if self.returncode:
state = 'failed'
client.set_process_state(self.conn, self.proc.pid, state)
@property
def code(self):
return self.proc.returncode
| bsd-3-clause | Python |
1a6d947022b7e9c7a44939dc69b2ec4ed9e13a5b | Fix a typo in dispatcher.py (main -> ip_whitelist). | modulexcite/catapult,catapult-project/catapult,catapult-project/catapult-csm,zeptonaut/catapult,SummerLW/Perf-Insight-Report,sahiljain/catapult,catapult-project/catapult-csm,benschmaus/catapult,danbeam/catapult,catapult-project/catapult-csm,dstockwell/catapult,scottmcmaster/catapult,zeptonaut/catapult,catapult-project/catapult,0x90sled/catapult,SummerLW/Perf-Insight-Report,dstockwell/catapult,catapult-project/catapult,sahiljain/catapult,sahiljain/catapult,catapult-project/catapult,SummerLW/Perf-Insight-Report,danbeam/catapult,benschmaus/catapult,SummerLW/Perf-Insight-Report,catapult-project/catapult,zeptonaut/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,dstockwell/catapult,catapult-project/catapult-csm,danbeam/catapult,sahiljain/catapult,benschmaus/catapult,scottmcmaster/catapult,catapult-project/catapult,0x90sled/catapult,dstockwell/catapult,0x90sled/catapult,modulexcite/catapult,benschmaus/catapult,sahiljain/catapult,catapult-project/catapult-csm,SummerLW/Perf-Insight-Report,scottmcmaster/catapult,benschmaus/catapult,catapult-project/catapult,benschmaus/catapult,catapult-project/catapult-csm,sahiljain/catapult,danbeam/catapult,modulexcite/catapult,benschmaus/catapult | dashboard/dashboard/dispatcher.py | dashboard/dashboard/dispatcher.py | """Dispatches requests to RequestHandler classes."""
import webapp2
from dashboard import ip_whitelist
from dashboard import main
_ROUTING_TABLE = [
('/ip_whitelist', ip_whitelist.IpWhitelistHandler),
('/', main.MainHandler),
]
app = webapp2.WSGIApplication(_ROUTING_TABLE, debug=True)
| """Dispatches requests to RequestHandler classes."""
import webapp2
from dashboard import ip_whitelist
from dashboard import main
_ROUTING_TABLE = [
('/ip_whitelist', main.IpWhitelistHandler),
('/', main.MainHandler),
]
app = webapp2.WSGIApplication(_ROUTING_TABLE, debug=True)
| bsd-3-clause | Python |
764c2952ac1dd18a213348d3a07097fc40ea9493 | Refresh after drawing menu. | mharriger/reui | reui/Menu.py | reui/Menu.py | from reui import Box
'''
A Box containing a vertical-scrolling menu. Generates event when a menu item is selected. Supports submenus.
'''
class Menu(Box.Box):
'''
Initialize Menu object
'''
items = [("Item 1",),("Item 2",),("Item 3",)]
def __init__(self, width, height, border_flags = 0):
Box.Box.__init__(self, width, height, border_flags)
def draw(self):
pos = 1
for item in self.items:
self.draw_text(2, pos, item[0])
pos += 9
self.refresh()
| from reui import Box
'''
A Box containing a vertical-scrolling menu. Generates event when a menu item is selected. Supports submenus.
'''
class Menu(Box.Box):
'''
Initialize Menu object
'''
items = [("Item 1",),("Item 2",),("Item 3",)]
def __init__(self, width, height, border_flags = 0):
Box.Box.__init__(self, width, height, border_flags)
def draw(self):
pos = 1
for item in self.items:
self.draw_text(2, pos, item[0])
pos += 9
| mit | Python |
668a42a92aaa794a0fa2d0e269108217d7c32ab9 | fix example to be py3 compatible | mkomitee/wsgi-kerberos | example/example_application.py | example/example_application.py | #!/usr/bin/env python
import sys
def example(environ, start_response):
user = environ.get('REMOTE_USER', 'ANONYMOUS')
start_response('200 OK', [('Content-Type', 'text/plain')])
data = "Hello {}".format(user)
return [data.encode()]
if __name__ == '__main__':
from wsgiref.simple_server import make_server
from wsgi_kerberos import KerberosAuthMiddleware
from socket import gethostname
import logging
logging.basicConfig(level=logging.DEBUG)
application = KerberosAuthMiddleware(example)
server = make_server(gethostname(), 8080, application)
server.serve_forever()
| #!/usr/bin/env python
def example(environ, start_response):
user = environ.get('REMOTE_USER', 'ANONYMOUS')
start_response('200 OK', [('Content-Type', 'text/plain')])
return ["Hello, %s" % user]
if __name__ == '__main__':
from wsgiref.simple_server import make_server
from wsgi_kerberos import KerberosAuthMiddleware
from socket import gethostname
import logging
logging.basicConfig(level=logging.DEBUG)
application = KerberosAuthMiddleware(example)
server = make_server(gethostname(), 8080, application)
server.serve_forever()
| bsd-2-clause | Python |
c7036d4aa7b02bb7691327cbaf4b31c74bb19349 | Add blank values support | abakar/django-whatever,kmmbvnr/django-any,abakar/django-whatever | django_any/fields.py | django_any/fields.py | #-*- coding: utf-8 -*-
"""
Values generators for common Django Fields
"""
import random
from decimal import Decimal
from django.db import models
import xunit
from multimethod import multimethod, multimethod_decorator
@multimethod_decorator
def any(function):
"""
Selection from field.choices
"""
def wrapper(field, **kwargs):
if field.choices:
return random.choice([choice[0] for choice in field.choices])
return function(field, **kwargs)
return wrapper
@multimethod_decorator
def any(function):
"""
Sometimes return None if field could be blank
"""
def wrapper(field, **kwargs):
if field.blank and random.random < 0.1:
return None
return function(field, **kwargs)
return wrapper
@multimethod(models.BooleanField)
def any(field, **kwargs):
"""
Return random value for BooleanField
>>> result = any(models.BooleanField())
>>> type(result)
<type 'bool'>
"""
return xunit.any_boolean()
@multimethod(models.PositiveIntegerField)
def any(field, **kwargs):
xunit.any_int(min_value=0, max_value=9999)
@multimethod(models.DecimalField)
def any(field, **kwargs):
min_value = 0
max_value = Decimal('%s.%s' % ('9'*(field.max_digits-field.decimal_places),
'9'*field.decimal_places))
return xunit.any_decimal(min_value=min_value, max_value=max_value,
decimal_places = field.decimal_places)
@multimethod(models.CharField)
def any(field, **kwargs):
"""
Return random value for CharField
>>> result = any(models.CharField(max_length=10))
>>> type(result)
<type 'str'>
"""
return xunit.any_string(min_length=1, max_length=field.max_length)
@multimethod(models.DateField)
def any(field, **kwargs):
return xunit.any_date()
@multimethod(models.DateTimeField)
def any(field, **kwargs):
return xunit.any_datetime()
@multimethod(models.EmailField)
def any(field, **kwargs):
return "%s@%s.%s" % (xunit.any_string(max_length=10),
xunit.any_string(max_length=10),
xunit.any_string(min_length=2, max_length=3))
| #-*- coding: utf-8 -*-
"""
Values generators for common Django Fields
"""
import random
from decimal import Decimal
from django.db import models
import xunit
from multimethod import multimethod, multimethod_decorator
@multimethod_decorator
def any(function):
"""
Selection from field.choices
"""
def wrapper(field, **kwargs):
if field.choices:
return random.choice([choice[0] for choice in field.choices])
return function(field, **kwargs)
return wrapper
@multimethod(models.BooleanField)
def any(field, **kwargs):
"""
Return random value for BooleanField
>>> result = any(models.BooleanField())
>>> type(result)
<type 'bool'>
"""
return xunit.any_boolean()
@multimethod(models.PositiveIntegerField)
def any(field, **kwargs):
xunit.any_int(min_value=0, max_value=9999)
@multimethod(models.DecimalField)
def any(field, **kwargs):
min_value = 0
max_value = Decimal('%s.%s' % ('9'*(field.max_digits-field.decimal_places),
'9'*field.decimal_places))
return xunit.any_decimal(min_value=min_value, max_value=max_value,
decimal_places = field.decimal_places)
@multimethod(models.CharField)
def any(field, **kwargs):
"""
Return random value for CharField
>>> result = any(models.CharField(max_length=10))
>>> type(result)
<type 'str'>
"""
return xunit.any_string(min_length=1, max_length=field.max_length)
@multimethod(models.DateField)
def any(field, **kwargs):
return xunit.any_date()
@multimethod(models.DateTimeField)
def any(field, **kwargs):
return xunit.any_datetime()
@multimethod(models.EmailField)
def any(field, **kwargs):
return "%s@%s.%s" % (xunit.any_string(max_length=10),
xunit.any_string(max_length=10),
xunit.any_string(min_length=2, max_length=3))
| mit | Python |
cb97f453284658da56d12ab696ef6b7d7991c727 | TEST - add test for value | jyeatman/dipy,beni55/dipy,samuelstjean/dipy,FrancoisRheaultUS/dipy,demianw/dipy,demianw/dipy,nilgoyyou/dipy,jyeatman/dipy,Messaoud-Boudjada/dipy,maurozucchelli/dipy,Messaoud-Boudjada/dipy,StongeEtienne/dipy,villalonreina/dipy,JohnGriffiths/dipy,rfdougherty/dipy,villalonreina/dipy,sinkpoint/dipy,JohnGriffiths/dipy,FrancoisRheaultUS/dipy,maurozucchelli/dipy,oesteban/dipy,sinkpoint/dipy,samuelstjean/dipy,samuelstjean/dipy,StongeEtienne/dipy,mdesco/dipy,rfdougherty/dipy,oesteban/dipy,matthieudumont/dipy,matthieudumont/dipy,beni55/dipy,maurozucchelli/dipy,mdesco/dipy,nilgoyyou/dipy | dipy/io/tests/test_csareader.py | dipy/io/tests/test_csareader.py | """ Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
yield assert_equal(tags['NumberOfImagesInMosaic']['value'],
'48')
| """ Testing Siemens CSA header reader
"""
import os
from os.path import join as pjoin
import numpy as np
import dipy.io.csareader as csa
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
from dipy.testing import parametric
data_path = pjoin(os.path.dirname(__file__), 'data')
CSA2_B0 = open(pjoin(data_path, 'csa2_b0.bin')).read()
CSA2_B1000 = open(pjoin(data_path, 'csa2_b1000.bin')).read()
@parametric
def test_csa():
csa_info = csa.read(CSA2_B0)
yield assert_equal(csa_info['type'], 2)
yield assert_equal(csa_info['n_tags'], 83)
tags = csa_info['tags']
yield assert_equal(len(tags), 83)
print csa_info
| bsd-3-clause | Python |
07374a55b70232120b92d92613b8bfede9631121 | Update blacklist | UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine,UPOLSearch/UPOL-Search-Engine | upol_search_engine/__main__.py | upol_search_engine/__main__.py | from datetime import datetime
from time import sleep
from upol_search_engine.upol_crawler import tasks
def main():
blacklist = """portal.upol.cz
stag.upol.cz
library.upol.cz
adfs.upol.cz
portalbeta.upol.cz
idp.upol.cz
famaplus.upol.cz
es.upol.cz
smlouvy.upol.cz
menza.upol.cz
edis.upol.cz
courseware.upol.cz
m.zurnal.upol.cz"""
crawler_settings = {'limit_domain': 'upol.cz',
'max_depth': 10,
'connect_max_timeout': 3.05,
'read_max_timeout': 10,
'frequency_per_server': 0.5,
'blacklist': blacklist}
seed = """https://www.upol.cz
https://www.cmtf.upol.cz
https://www.lf.upol.cz
https://www.ff.upol.cz
https://www.prf.upol.cz
https://www.pdf.upol.cz
https://ftk.upol.cz
https://www.pf.upol.cz
https://www.fzv.upol.cz"""
print("Launching crawler")
feeder = tasks.feeder_task.delay(
crawler_settings=crawler_settings,
seed=seed,
batch_size=300,
delay_between_feeding=30)
start_time = datetime.now()
while feeder.status != 'SUCCESS':
print(feeder.status)
print(feeder.info)
duration = datetime.now() - start_time
print(duration)
sleep(10)
print("Crawler done")
print("Launching pagerank calculation")
pagerank = tasks.calculate_pagerank_task.delay(crawler_settings)
while pagerank.status != 'SUCCESS':
print(pagerank.status)
sleep(5)
end_time = datetime.now()
duration = end_time - start_time
print(duration)
print("Pagerank done")
if __name__ == "__main__":
main()
| from datetime import datetime
from time import sleep
from upol_search_engine.upol_crawler import tasks
def main():
blacklist = """portal.upol.cz
stag.upol.cz
library.upol.cz
adfs.upol.cz
portalbeta.upol.cz
idp.upol.cz
famaplus.upol.cz
es.upol.cz
smlouvy.upol.cz
menza.upol.cz
edis.upol.cz
courseware.upol.cz"""
crawler_settings = {'limit_domain': 'upol.cz',
'max_depth': 10,
'connect_max_timeout': 3.05,
'read_max_timeout': 10,
'frequency_per_server': 0.5,
'blacklist': blacklist}
seed = """https://www.upol.cz
https://www.cmtf.upol.cz
https://www.lf.upol.cz
https://www.ff.upol.cz
https://www.prf.upol.cz
https://www.pdf.upol.cz
https://ftk.upol.cz
https://www.pf.upol.cz
https://www.fzv.upol.cz"""
print("Launching crawler")
feeder = tasks.feeder_task.delay(
crawler_settings=crawler_settings,
seed=seed,
batch_size=300,
delay_between_feeding=30)
start_time = datetime.now()
while feeder.status != 'SUCCESS':
print(feeder.status)
print(feeder.info)
duration = datetime.now() - start_time
print(duration)
sleep(10)
print("Crawler done")
print("Launching pagerank calculation")
pagerank = tasks.calculate_pagerank_task.delay(crawler_settings)
while pagerank.status != 'SUCCESS':
print(pagerank.status)
sleep(5)
end_time = datetime.now()
duration = end_time - start_time
print(duration)
print("Pagerank done")
if __name__ == "__main__":
main()
| mit | Python |
a625fe202134831e2cbb430e33669ab8b846bccc | Add remove tasks | Code4SA/mma-dexter,Code4SA/mma-dexter,Code4SA/mma-dexter | dexter/config/celeryconfig.py | dexter/config/celeryconfig.py | from celery.schedules import crontab
# uses AWS creds from the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env variables
BROKER_URL = 'sqs://'
BROKER_TRANSPORT_OPTIONS = {
'region': 'eu-west-1',
'polling_interval': 15 * 1,
'queue_name_prefix': 'mma-dexter-',
'visibility_timeout': 3600*12,
}
# all our tasks can by retried if the worker fails
CELERY_ACKS_LATE = True
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TIMEZONE = 'Africa/Johannesburg'
CELERY_ENABLE_UTC = True
CELERYBEAT_SCHEDULE = {
'fetch-yesterdays-feeds': {
'schedule': crontab(hour=15, minute=30),
'task': 'dexter.tasks.fetch_yesterdays_feeds',
},
# 'back-process-feeds': {
# 'schedule': crontab(hour=11, minute=0),
# 'task': 'dexter.tasks.back_process_feeds',
# },
# 'fetch_yesterdays_feeds_rerun': {
# 'schedule': crontab(hour=16, minute=30),
# 'task': 'dexter.tasks.fetch_yesterdays_feeds_rerun',
# },
# 'backfill-taxonomies': {
# 'schedule': crontab(hour=21, minute=0),
# 'task': 'dexter.tasks.backfill_taxonomies',
# },
}
| from celery.schedules import crontab
# uses AWS creds from the AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env variables
BROKER_URL = 'sqs://'
BROKER_TRANSPORT_OPTIONS = {
'region': 'eu-west-1',
'polling_interval': 15 * 1,
'queue_name_prefix': 'mma-dexter-',
'visibility_timeout': 3600*12,
}
# all our tasks can by retried if the worker fails
CELERY_ACKS_LATE = True
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TIMEZONE = 'Africa/Johannesburg'
CELERY_ENABLE_UTC = True
CELERYBEAT_SCHEDULE = {
'fetch-yesterdays-feeds': {
'schedule': crontab(hour=15, minute=0),
'task': 'dexter.tasks.fetch_yesterdays_feeds',
},
# 'back-process-feeds': {
# 'schedule': crontab(hour=11, minute=0),
# 'task': 'dexter.tasks.back_process_feeds',
# },
# 'fetch_yesterdays_feeds_rerun': {
# 'schedule': crontab(hour=16, minute=30),
# 'task': 'dexter.tasks.fetch_yesterdays_feeds_rerun',
# },
# 'backfill-taxonomies': {
# 'schedule': crontab(hour=21, minute=0),
# 'task': 'dexter.tasks.backfill_taxonomies',
# },
}
| apache-2.0 | Python |
bf8e5606dbcd0bf852205174099ae0d160ea7837 | Add a pause. | c00w/bitHopper,c00w/bitHopper | HTTPCloser.py | HTTPCloser.py | import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
gevent.sleep(0.3)
| import time, gevent
seen = None
def used(url, http_pool):
__patch()
seen[(url, http_pool)] = time.time()
def __patch():
global seen
if seen is None:
seen = {}
gevent.spawn(clean)
def clean():
while True:
for k, last_seen in seen.items:
if time.time()-last_seen < 0.3:
continue
url, pool = k
pool.request(url, 'GET', headers = {'Connection':'close'})
| mit | Python |
4303e9569456093b3f9674071e12a55b36c2a280 | improve memcpy sample to compare synchronous and asynchronous DtoH / HtoD copies | cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy | examples/stream/cupy_memcpy.py | examples/stream/cupy_memcpy.py | # nvprof --print-gpu-trace python examples/stream/cupy_memcpy.py
import cupy
import numpy
pinned_memory_pool = cupy.cuda.PinnedMemoryPool()
cupy.cuda.set_pinned_memory_allocator(pinned_memory_pool.malloc)
def _pin_memory(array):
mem = cupy.cuda.alloc_pinned_memory(array.nbytes)
ret = numpy.frombuffer(mem, array.dtype, array.size).reshape(array.shape)
ret[...] = array
return ret
SIZE = 1024 * 1024
x_cpu_src = numpy.arange(SIZE, dtype=numpy.float32)
x_gpu_src = cupy.arange(SIZE, dtype=numpy.float32)
# synchronous
stream = cupy.cuda.Stream.null
start = stream.record()
x_gpu_dst = cupy.empty(x_cpu_src.shape, x_cpu_src.dtype)
x_gpu_dst.set(x_cpu_src)
x_cpu_dst = x_gpu_src.get()
end = stream.record()
print('Synchronous Device to Host / Host to Device (ms)')
print(cupy.cuda.get_elapsed_time(start, end))
# asynchronous
x_gpu_dst = cupy.empty(x_cpu_src.shape, x_cpu_src.dtype)
x_cpu_dst = numpy.empty(x_gpu_src.shape, x_gpu_src.dtype)
x_pinned_cpu_src = _pin_memory(x_cpu_src)
x_pinned_cpu_dst = _pin_memory(x_cpu_dst)
with cupy.cuda.stream.Stream() as stream_htod:
start = stream_htod.record()
x_gpu_dst.set(x_pinned_cpu_src)
with cupy.cuda.stream.Stream() as stream_dtoh:
x_gpu_src.get(out=x_pinned_cpu_dst)
stream_dtoh.synchronize()
stream_htod.synchronize()
end = stream_htod.record()
print('Asynchronous Device to Host / Host to Device (ms)')
print(cupy.cuda.get_elapsed_time(start, end))
| # nvprof --print-gpu-trace python examples/stream/cupy_memcpy.py
import cupy
import numpy
pinned_memory_pool = cupy.cuda.PinnedMemoryPool()
cupy.cuda.set_pinned_memory_allocator(pinned_memory_pool.malloc)
def _pin_memory(array):
mem = cupy.cuda.alloc_pinned_memory(array.nbytes)
ret = numpy.frombuffer(mem, array.dtype, array.size).reshape(array.shape)
ret[...] = array
return ret
x_cpu = numpy.array([1, 2, 3], dtype=numpy.float32)
x_pinned_cpu = _pin_memory(x_cpu)
x_gpu = cupy.core.ndarray((3,), dtype=numpy.float32)
with cupy.cuda.stream.Stream():
x_gpu.set(x_pinned_cpu)
stream = cupy.cuda.stream.Stream()
stream.use()
x_pinned_cpu = x_gpu.get()
| mit | Python |
d395c0f895121e5c75820ff2b4fe502086b3fb01 | Bump version to 0.0.4 | Parsely/parsely_raw_data,Parsely/parsely_raw_data | parsely_raw_data/__init__.py | parsely_raw_data/__init__.py | __license__ = """
Copyright 2016 Parsely, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .event import (Event, SlotInfo, SessionInfo, TimestampInfo, VisitorInfo,
DisplayInfo, SlotsMixin, Metadata)
__version__ = '0.0.4'
__all__ = ["Event", "SlotInfo", "SessionInfo", "TimestampInfo", "VisitorInfo",
"DisplayInfo", "SlotsMixin", "Metadata"]
| __license__ = """
Copyright 2016 Parsely, Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from .event import (Event, SlotInfo, SessionInfo, TimestampInfo, VisitorInfo,
DisplayInfo, SlotsMixin, Metadata)
__version__ = '0.0.3'
__all__ = ["Event", "SlotInfo", "SessionInfo", "TimestampInfo", "VisitorInfo",
"DisplayInfo", "SlotsMixin", "Metadata"]
| apache-2.0 | Python |
78b52553129cbcd94b3e288eed0d60fe91e537aa | Append to sys.path in tests/test.py to find dask_condor libraries | matyasselmeci/dask_condor,matyasselmeci/dask_condor | tests/test.py | tests/test.py | #!/usr/bin/env python
from __future__ import print_function
import logging
import os
import sys
import unittest
import dask.array
import distributed
if __name__ == "__main__" and __package__ is None:
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from tests import HTCondorClusterTestCase
from dask_condor import HTCondorCluster
class TestDaskCondor(HTCondorClusterTestCase):
def test_array(self):
self.cluster.start_workers(n=4)
x = dask.array.ones((500, 500), chunks=(10, 10))
future = self.client.compute(x.sum())
result = int(self.client.gather(future))
self.assertEqual(result, 500**2)
def test_simple(self):
self.cluster.start_workers(n=4)
future = self.client.map(chr, [72, 69, 76, 76, 79, 32, 67, 79, 78, 68, 79, 82])
self.assertEqual(''.join(self.client.gather(future)), 'HELLO CONDOR')
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('distributed.comm.tcp').setLevel(logging.ERROR)
logging.getLogger('tornado.application').setLevel(logging.CRITICAL)
unittest.main(verbosity=2)
| #!/usr/bin/env python
from __future__ import print_function
import logging
import sys
import unittest
import dask.array
import distributed
sys.path.insert(0, '.')
from dask_condor.tests import HTCondorClusterTestCase
from dask_condor import HTCondorCluster
class TestDaskCondor(HTCondorClusterTestCase):
def test_array(self):
self.cluster.start_workers(n=4)
x = dask.array.ones((500, 500), chunks=(10, 10))
future = self.client.compute(x.sum())
result = int(self.client.gather(future))
self.assertEqual(result, 500**2)
def test_simple(self):
self.cluster.start_workers(n=4)
future = self.client.map(chr, [72, 69, 76, 76, 79, 32, 67, 79, 78, 68, 79, 82])
self.assertEqual(''.join(self.client.gather(future)), 'HELLO CONDOR')
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG)
logging.getLogger('distributed.comm.tcp').setLevel(logging.ERROR)
logging.getLogger('tornado.application').setLevel(logging.CRITICAL)
unittest.main(verbosity=2)
| apache-2.0 | Python |
52d449592feeae436e1df85d9ddec7ef323647fa | Add some basic version information | sbaechler/feincms-elephantblog,michaelkuty/feincms-elephantblog,feincms/feincms-elephantblog,feincms/feincms-elephantblog,michaelkuty/feincms-elephantblog,joshuajonah/feincms-elephantblog,michaelkuty/feincms-elephantblog,matthiask/feincms-elephantblog,matthiask/feincms-elephantblog,matthiask/feincms-elephantblog,joshuajonah/feincms-elephantblog,sbaechler/feincms-elephantblog,joshuajonah/feincms-elephantblog,sbaechler/feincms-elephantblog | elephantblog/__init__.py | elephantblog/__init__.py | VERSION = (0, 1, 0)
__version__ = '.'.join(map(str, VERSION))
| bsd-3-clause | Python | |
e11e751dcb6ea967f44503aad8d1c67f3e12826c | test threads | jonathanunderwood/python-lz4,python-lz4/python-lz4,jonathanunderwood/python-lz4,python-lz4/python-lz4 | tests/test.py | tests/test.py | import lz4
import sys
from multiprocessing.pool import ThreadPool
import unittest
import os
class TestLZ4(unittest.TestCase):
def test_random(self):
DATA = os.urandom(128 * 1024) # Read 128kb
self.assertEqual(DATA, lz4.loads(lz4.dumps(DATA)))
def test_threads(self):
data = [os.urandom(128 * 1024) for i in range(100)]
def roundtrip(x):
return lz4.loads(lz4.dumps(x))
pool = ThreadPool(8)
out = pool.map(roundtrip, data)
assert data == out
pool.close()
if __name__ == '__main__':
unittest.main()
| import lz4
import sys
import unittest
import os
class TestLZ4(unittest.TestCase):
def test_random(self):
DATA = os.urandom(128 * 1024) # Read 128kb
self.assertEqual(DATA, lz4.loads(lz4.dumps(DATA)))
if __name__ == '__main__':
unittest.main()
| bsd-3-clause | Python |
64dc354022f7c9eef73c0218ddf8df79e499c219 | expand sitetree utils to allow updating of programs and program_family permissions | masschallenge/django-accelerator,masschallenge/django-accelerator | accelerator/sitetree_navigation/utils.py | accelerator/sitetree_navigation/utils.py | from accelerator.models import (
NavTreeItem,
UserRole,
Program,
ProgramFamily,
)
def create_items(tree, item_props_list, parent=None):
for item_props in item_props_list:
item_kwargs = dict(item_props)
item_kwargs.pop('user_roles', None)
NavTreeItem.objects.update_or_create(
tree=tree,
**item_kwargs
)
def _add_user_roles_to_item(item_props):
allowed_user_roles = item_props.get('user_roles', [])
if not allowed_user_roles:
return None
user_roles = UserRole.objects.filter(name__in=allowed_user_roles)
tree_item = NavTreeItem.objects.filter(alias=item_props["alias"]).first()
tree_item.user_role.clear()
for user_role in user_roles:
tree_item.user_role.add(user_role)
def _add_allowed_programs_to_item(item_props):
allowed_programs = item_props.get('programs', [])
if not allowed_programs:
return None
programs = Program.objects.filter(id__in=allowed_programs)
tree_item = NavTreeItem.objects.filter(alias=item_props["alias"]).first()
tree_item.program.clear()
for program in programs:
tree_item.program.add(program)
def _add_allowed_program_families_to_item(item_props):
allowed_program_families = item_props.get('program_families', [])
if not allowed_program_families:
return None
program_families = ProgramFamily.objects.filter(
id__in=allowed_program_families)
tree_item = NavTreeItem.objects.filter(alias=item_props["alias"]).first()
tree_item.program_family.clear()
for program_family in program_families:
tree_item.program_family.add(program_family)
def add_user_roles_to_nav_items(item_props_list):
for item_props in item_props_list:
_add_user_roles_to_item(item_props)
def add_user_roles_to_side_nav_items(item_props_list):
add_user_roles_to_nav_items(item_props_list)
| from accelerator.models import (
NavTreeItem,
UserRole,
)
def create_items(tree, item_props_list, parent=None):
for item_props in item_props_list:
item_kwargs = dict(item_props)
item_kwargs.pop('user_roles', None)
NavTreeItem.objects.update_or_create(
tree=tree,
**item_kwargs
)
def _add_user_roles_to_item(item_props):
allowed_user_roles = item_props.get('user_roles', [])
if not allowed_user_roles:
return None
user_roles = UserRole.objects.filter(name__in=allowed_user_roles)
tree_item = NavTreeItem.objects.filter(alias=item_props["alias"]).first()
tree_item.user_role.clear()
for user_role in user_roles:
tree_item.user_role.add(user_role)
def add_user_roles_to_nav_items(item_props_list):
for item_props in item_props_list:
_add_user_roles_to_item(item_props)
def add_user_roles_to_side_nav_items(item_props_list):
add_user_roles_to_nav_items(item_props_list)
| mit | Python |
63ca4ab4fc7237a9b32d82d73160b7f02c3ac133 | Add "config file was not fount" error handler | vv-p/jira-reports,vv-p/jira-reports | settings.py | settings.py | # coding: utf-8
import os.path
import yaml
import logging
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
logger = logging.getLogger(__name__)
try:
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
except FileNotFoundError:
logging.error('Config file was not found: settings.yaml')
logging.error('You must create it first')
exit(1)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
| # coding: utf-8
import os.path
import yaml
CONFIG_PATH = os.path.join(os.path.dirname(__file__), 'settings.yaml')
with open(CONFIG_PATH, 'r') as fh:
config = yaml.load(fh)
# Jira settings
JIRA_URL = config['jira']['url']
JIRA_USER = config['jira']['user']
JIRA_PASS = config['jira']['pass']
JIRA_PROJECT = config['jira']['project']
# SMTP settings
SMTP_HOST = config['smtp']['host']
SMTP_PORT = config['smtp']['port']
SMTP_USER = config['smtp']['user']
SMTP_PASS = config['smtp']['pass']
# Mail settings
EMAIL_FROM = config['email']['from']
EMAIL_TO = config['email']['to']
DAILY_SUBJECT = config['email']['daily_subject']
QUEUE_SUBJECT = config['email']['queue_subject']
AGES_SUBJECT = config['email']['ages_subject']
WEEKLY_SUBJECT = config['email']['weekly_subject']
# Team settings
TEAM = [x['mail'] for x in config['team']]
FUNC = [x['mail'] for x in config['team'] if x['role'] == 'manual']
AUTO = [x['mail'] for x in config['team'] if x['role'] == 'auto']
| mit | Python |
7db99f61668060c09f3d5c9b638578013869f464 | add bot.start() to tests to ensure the (now single) eventloop is running | Halibot/halibot,Halibot/halibot | tests/util.py | tests/util.py | # Utilities to assist in writing tests
import halibot
import unittest
import logging
import time
def waitOrTimeout(timeout, condition):
for i in range(timeout):
if condition():
break
time.sleep(0.1)
else:
print("warning: timeout reached") # pragma: no cover
# Provides a unique bot in self.bot for every test case
class HalibotTestCase(unittest.TestCase):
def setUp(self):
# Silence expected error messages
logging.basicConfig(level=logging.CRITICAL)
self.bot = halibot.Halibot(use_config=False)
self.bot.start(block=False)
def tearDown(self):
self.bot.shutdown()
| # Utilities to assist in writing tests
import halibot
import unittest
import logging
import time
def waitOrTimeout(timeout, condition):
for i in range(timeout):
if condition():
break
time.sleep(0.1)
else:
print("warning: timeout reached") # pragma: no cover
# Provides a unique bot in self.bot for every test case
class HalibotTestCase(unittest.TestCase):
def setUp(self):
# Silence expected error messages
logging.basicConfig(level=logging.CRITICAL)
self.bot = halibot.Halibot(use_config=False)
def tearDown(self):
self.bot.shutdown()
| bsd-3-clause | Python |
14c9151d211ec30dd5e00b604bf670a4b957e71a | revert fallback name | b-mueller/mythril,b-mueller/mythril,b-mueller/mythril,b-mueller/mythril | mythril/laser/ethereum/transaction.py | mythril/laser/ethereum/transaction.py | import logging
from mythril.laser.ethereum.state import GlobalState, Environment, CalldataType
from mythril.laser.ethereum.cfg import Node, Edge, JumpType
from z3 import BitVec
class MessageCall:
""" Represents a call value transaction """
def __init__(self, callee_address):
"""
Constructor for Call transaction, sets up all symbolic parameters
:param callee_address: Address of the contract that will be called
"""
self.callee_address = callee_address
self.caller = BitVec("caller", 256)
self.gas_price = BitVec("gasprice", 256)
self.call_value = BitVec("callvalue", 256)
self.origin = BitVec("origin", 256)
self.open_states = None
@property
def has_ran(self):
return self.open_states is not None
def run(self, open_world_states: list, evm):
""" Runs this transaction on the evm starting from the open world states """
# Consume the open states
open_states = open_world_states[:]
del open_world_states[:]
for open_world_state in open_states:
# Initialize the execution environment
environment = Environment(
open_world_state[self.callee_address],
self.caller,
[],
self.gas_price,
self.call_value,
self.origin,
calldata_type=CalldataType.SYMBOLIC,
)
new_node = Node(environment.active_account.contract_name)
evm.instructions_covered = [False for _ in environment.code.instruction_list]
evm.nodes[new_node.uid] = new_node
if open_world_state.node:
evm.edges.append(Edge(open_world_state.node.uid, new_node.uid, edge_type=JumpType.Transaction, condition=None))
global_state = GlobalState(open_world_state.accounts, environment, new_node)
global_state.environment.active_function_name = 'fallback'
new_node.states.append(global_state)
evm.work_list.append(global_state)
evm.exec()
logging.info("Execution complete")
logging.info("Achieved {0:.3g}% coverage".format(evm.coverage))
| import logging
from mythril.laser.ethereum.state import GlobalState, Environment, CalldataType
from mythril.laser.ethereum.cfg import Node, Edge, JumpType
from z3 import BitVec
class MessageCall:
""" Represents a call value transaction """
def __init__(self, callee_address):
"""
Constructor for Call transaction, sets up all symbolic parameters
:param callee_address: Address of the contract that will be called
"""
self.callee_address = callee_address
self.caller = BitVec("caller", 256)
self.gas_price = BitVec("gasprice", 256)
self.call_value = BitVec("callvalue", 256)
self.origin = BitVec("origin", 256)
self.open_states = None
@property
def has_ran(self):
return self.open_states is not None
def run(self, open_world_states: list, evm):
""" Runs this transaction on the evm starting from the open world states """
# Consume the open states
open_states = open_world_states[:]
del open_world_states[:]
for open_world_state in open_states:
# Initialize the execution environment
environment = Environment(
open_world_state[self.callee_address],
self.caller,
[],
self.gas_price,
self.call_value,
self.origin,
calldata_type=CalldataType.SYMBOLIC,
)
new_node = Node(environment.active_account.contract_name)
evm.instructions_covered = [False for _ in environment.code.instruction_list]
evm.nodes[new_node.uid] = new_node
if open_world_state.node:
evm.edges.append(Edge(open_world_state.node.uid, new_node.uid, edge_type=JumpType.Transaction, condition=None))
global_state = GlobalState(open_world_state.accounts, environment, new_node)
global_state.environment.active_function_name = 'unnamed fallback'
new_node.states.append(global_state)
evm.work_list.append(global_state)
evm.exec()
logging.info("Execution complete")
logging.info("Achieved {0:.3g}% coverage".format(evm.coverage))
| mit | Python |
edae444521312c3a90ae71896122f23a9d6d2e61 | make capfd workarounds more specific, always use capsys under idle | xflr6/graphviz | run-tests.py | run-tests.py | #!/usr/bin/env python
# run-tests.py
import sys
import pytest
ARGS = [
#'--exitfirst',
#'--pdb',
]
if 'idlelib' in sys.modules or 'thonny' in sys.modules:
ARGS.extend(['--capture=sys', '--color=no'])
elif sys.version_info[0] == 2 and 'win_unicode_console' in sys.modules:
ARGS.append('--capture=sys')
pytest.main(ARGS + sys.argv[1:])
| #!/usr/bin/env python
# run-tests.py
import sys
import platform
import pytest
ARGS = [
#'--exitfirst',
#'--pdb',
]
if 'idlelib' in sys.modules or 'thonny' in sys.modules:
ARGS.append('--color=no')
if platform.system().lower() == 'windows':
ARGS.append('--capture=sys')
pytest.main(ARGS + sys.argv[1:])
| mit | Python |
269a4062de8071551048a683fd05c63d31622aea | Update documentation | pa-pyrus/ircCommander | leader.py | leader.py | # vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
from database import Session
from database.models import LeaderBoardEntry, UberAccount
from twisted.internet.defer import Deferred, succeed
from twisted.python import log
class LeaderParser(object):
"""
Retrieves most recently cached rankings for the specified league.
Provides deferred functions that can be called from other Twisted
applications.
"""
def __init__(self):
"""Initialize a database session."""
log.msg("Initializing Ubernet Leaderboard parser.")
self.session = Session()
def startUpdate(self):
"""
Initiate an update using Twisted.
The request is handled asynchronously. It will call onUpdate if it's
successful and onError otherwise.
"""
deferred = succeed(None)
deferred.addCallbacks(self.onUpdate, self.onError)
return deferred
def onUpdate(self, value):
"""Patches are updated by a cronjob, no need to do it here."""
return None
def onError(self, error):
"""Error callback for retrieving Uberent API data."""
log.err("Encountered an error: {0}".format(
error.getErrorMessage()))
return error
def top(self, league):
"""Start an update and return a deferred containing the results."""
updateDeferred = self.startUpdate()
newDeferred = Deferred()
league = league.capitalize()
def updateDone(value):
"""Callback method for update."""
entries = (self.session.query(LeaderBoardEntry.uid,
UberAccount.dname)
.outerjoin(UberAccount,
UberAccount.uid == LeaderBoardEntry.uid)
.filter(LeaderBoardEntry.league == league)
.order_by(LeaderBoardEntry.rank))
newDeferred.callback([e[1] for e in entries])
self.session.close()
updateDeferred.addCallback(updateDone)
return newDeferred
| # vim:fileencoding=utf-8:ts=8:et:sw=4:sts=4:tw=79
from database import Session
from database.models import LeaderBoardEntry, UberAccount
from twisted.internet.defer import Deferred, succeed
from twisted.python import log
class LeaderParser(object):
"""
Retrieves most recently cached rankings for the specified league.
Provides deferred functions that can be called from other Twisted
applications.
"""
def __init__(self):
"""Read configuration from a file or store defaults."""
log.msg("Initializing Ubernet Leaderboard parser.")
self.session = Session()
def startUpdate(self):
"""
Initiate an update using Twisted.
The request is handled asynchronously. It will call onUpdate if it's
successful and onError otherwise.
"""
deferred = succeed(None)
deferred.addCallbacks(self.onUpdate, self.onError)
return deferred
def onUpdate(self, value):
"""Patches are updated by a cronjob, no need to do it here."""
return None
def onError(self, error):
"""Error callback for retrieving Uberent API data."""
log.err("Encountered an error: {0}".format(
error.getErrorMessage()))
return error
def top(self, league):
"""Start an update and return a deferred containing the results."""
updateDeferred = self.startUpdate()
newDeferred = Deferred()
league = league.capitalize()
def updateDone(value):
"""Callback method for update."""
entries = (self.session.query(LeaderBoardEntry.uid,
UberAccount.dname)
.outerjoin(UberAccount,
UberAccount.uid == LeaderBoardEntry.uid)
.filter(LeaderBoardEntry.league == league)
.order_by(LeaderBoardEntry.rank))
newDeferred.callback([e[1] for e in entries])
self.session.close()
updateDeferred.addCallback(updateDone)
return newDeferred
| mit | Python |
0ad6cb338bbf10c48049d5649b5cd41eab0ed8d1 | Add optional authorizer parameter to session class and function. | praw-dev/prawcore | prawcore/sessions.py | prawcore/sessions.py | """prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self, authorizer=None):
"""Preprare the connection to reddit's API.
:param authorizer: An instance of :class:`Authorizer`.
"""
self.authorizer = authorizer
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session(authorizer=None):
"""Return a :class:`Session` instance.
:param authorizer: An instance of :class:`Authorizer`.
"""
return Session(authorizer=authorizer)
| """prawcore.sessions: Provides prawcore.Session and prawcore.session."""
import requests
class Session(object):
"""The low-level connection interface to reddit's API."""
def __init__(self):
"""Preprare the connection to reddit's API."""
self._session = requests.Session()
def __enter__(self):
"""Allow this object to be used as a context manager."""
return self
def __exit__(self, *_args):
"""Allow this object to be used as a context manager."""
self.close()
def close(self):
"""Close the session and perform any clean up."""
self._session.close()
def session():
"""Return a :class:`Session` instance."""
return Session()
| bsd-2-clause | Python |
d6e2cf3354af9e4e7e15c486664f65fbd4525ce0 | test error handling a bit. | timo/zasim,timo/zasim | test/test_config.py | test/test_config.py | from __future__ import absolute_import
from zasim import config
import pytest
class TestConfig:
def test_random_1d(self):
a = config.RandomInitialConfiguration()
arr = a.generate((1000,))
assert not any(arr > 2)
assert not any(arr < 0)
assert any(arr == 0)
assert any(arr == 1)
assert len(arr) == 1000
b = config.RandomInitialConfiguration(base=3)
brr = b.generate((1000,))
assert not any(brr > 3)
assert not any(brr < 0)
assert any(brr == 0)
assert any(brr == 1)
assert any(brr == 2)
def test_random_1d_probabilities(self):
a = config.RandomInitialConfiguration(2, 0)
arr = a.generate((1000,))
assert not any(arr == 0)
assert all(arr == 1)
b = config.RandomInitialConfiguration(2, 1)
brr = b.generate((1000,))
assert not any(brr == 1)
assert all(brr == 0)
c = config.RandomInitialConfiguration(3, 0)
crr = c.generate((1000,))
assert not any(crr == 0)
assert any(crr == 1)
assert any(crr == 2)
def test_random_errors_1d(self):
with pytest.raises(ValueError):
a = config.RandomInitialConfiguration(2, 1.0, 1.0)
with pytest.raises(ValueError):
b = config.RandomInitialConfiguration(2, 0.1, 0.1, 0.8)
with pytest.raises(TypeError):
c = config.RandomInitialConfiguration(2, [0.1, 0.9])
with pytest.raises(ValueError):
d = config.RandomInitialConfiguration(2, 0.1, 0.8)
| from __future__ import absolute_import
from zasim import config
import pytest
class TestConfig:
def test_random_1d(self):
a = config.RandomInitialConfiguration()
arr = a.generate((1000,))
assert not any(arr > 2)
assert not any(arr < 0)
assert any(arr == 0)
assert any(arr == 1)
assert len(arr) == 1000
b = config.RandomInitialConfiguration(base=3)
brr = b.generate((1000,))
assert not any(brr > 3)
assert not any(brr < 0)
assert any(brr == 0)
assert any(brr == 1)
assert any(brr == 2)
def test_random_1d_probabilities(self):
a = config.RandomInitialConfiguration(2, 0)
arr = a.generate((1000,))
assert not any(arr == 0)
assert all(arr == 1)
b = config.RandomInitialConfiguration(2, 1)
brr = b.generate((1000,))
assert not any(brr == 1)
assert all(brr == 0)
c = config.RandomInitialConfiguration(3, 0)
crr = c.generate((1000,))
assert not any(crr == 0)
assert any(crr == 1)
assert any(crr == 2)
| bsd-3-clause | Python |
0be901aa2a73ba0eeaaebb7b5069c3a2756bdd7e | Put debug back | OzuYatamutsu/klima,OzuYatamutsu/klima,OzuYatamutsu/klima | test/test_influx.py | test/test_influx.py | from influx.influx_adapter import *
from influx.datapoint_utils import *
from influx.measurement_strings import *
from unittest import TestCase, main
from time import sleep
class TestInflux(TestCase):
def setUp(self):
self.db = get_client()
self.temp_measurement_test_str = 'klima-test_temperature'
self.humid_measurement_test_str = 'klima-test_humidity'
def test_if_db_initialized(self):
"""
Test whether we have a database object
"""
self.assertIsNotNone(self.db)
def test_can_write_data(self):
"""
Tests if we can construct and write a datapoint to the database
"""
influx_push_data(temp_val=10.0, humid_val=20.0, datapoint_type=DatapointType.SENSOR)
def test_can_read_data(self):
"""
Tests if we can read a previously written datapoint from the database
"""
influx_push_data(temp_val=10.0, humid_val=20.0, datapoint_type=DatapointType.SENSOR)
self.assertGreaterEqual(len(self.db.query("SELECT * FROM %s LIMIT 1" % temp_measurement_str)), 1)
self.assertGreaterEqual(len(self.db.query("SELECT * FROM %s LIMIT 1" % humidity_measurement_str)), 1)
def test_can_query_for_previous_timescale(self):
"""
Tests whether we can get a previous datapoint from a previous time
"""
influx_push_data(temp_val=10.0, humid_val=20.0, datapoint_type=DatapointType.SENSOR)
# Wait 2 seconds before query
sleep(2)
# DEBUG THE TEST
print(str(self.db.query("SELECT * FROM %s WHERE time >= now() - %s LIMIT 1" % (temp_measurement_str, '5s'))))
self.assertGreaterEqual(len(get_data_at_relative_time(temp_measurement_str, '5s')), 1)
self.assertGreaterEqual(len(get_data_at_relative_time(humidity_measurement_str, '5s')), 1)
if __name__ == '__main__':
main()
| from influx.influx_adapter import *
from influx.datapoint_utils import *
from influx.measurement_strings import *
from unittest import TestCase, main
from time import sleep
class TestInflux(TestCase):
def setUp(self):
self.db = get_client()
self.temp_measurement_test_str = 'klima-test_temperature'
self.humid_measurement_test_str = 'klima-test_humidity'
def test_if_db_initialized(self):
"""
Test whether we have a database object
"""
self.assertIsNotNone(self.db)
def test_can_write_data(self):
"""
Tests if we can construct and write a datapoint to the database
"""
influx_push_data(temp_val=10.0, humid_val=20.0, datapoint_type=DatapointType.SENSOR)
def test_can_read_data(self):
"""
Tests if we can read a previously written datapoint from the database
"""
influx_push_data(temp_val=10.0, humid_val=20.0, datapoint_type=DatapointType.SENSOR)
self.assertGreaterEqual(len(self.db.query("SELECT * FROM %s LIMIT 1" % temp_measurement_str)), 1)
self.assertGreaterEqual(len(self.db.query("SELECT * FROM %s LIMIT 1" % humidity_measurement_str)), 1)
def test_can_query_for_previous_timescale(self):
"""
Tests whether we can get a previous datapoint from a previous time
"""
influx_push_data(temp_val=10.0, humid_val=20.0, datapoint_type=DatapointType.SENSOR)
# Wait 2 seconds before query
sleep(2)
self.assertGreaterEqual(len(get_data_at_relative_time(temp_measurement_str, '5s')), 1)
self.assertGreaterEqual(len(get_data_at_relative_time(humidity_measurement_str, '5s')), 1)
if __name__ == '__main__':
main()
| mit | Python |
3f1fdf4223e1fcc324d23848e3927294516f287a | Remove extraneous whitespace. | microserv/frontend,microserv/frontend,microserv/frontend | editor_backend/editor_backend/views.py | editor_backend/editor_backend/views.py | from django.http import HttpResponse
from django.template.loader import get_template
from django.template import Context
from django.shortcuts import render
import json
import requests
NODE_ADDR = "http://127.0.0.1:9001"
publish_base_url = "http://despina.128.no/publish"
def get_publisher_url():
r = requests.get(NODE_ADDR + "/" + "publishing")
response_as_json = json.loads(r.text)
if response_as_json:
return response_as_json
else:
return None
def homepage(request):
return render(request, "homepage.html", {});
def editor(request):
return render(request, "editor_page.html", {});
def upload_article(request):
dict = request.POST.dict()
article = {"tags": dict["tags"], "description": dict["description"], "title": dict["title"]}
article["article"] = dict["article"].replace("src=\"//www.", "src=\"http://www.")
publisher_url = get_publisher_url()
if publisher_url:
r = requests.post("http://"+publisher_url+"/save_article", data = article)
else:
# Do some error handling here.
pass
return render(request, "editor_page.html", {});
def articles(request):
r = requests.get(publish_base_url + "/list")
d = r.json()
d["publisher_url"] = publish_base_url
return render(request, "articles.html", d);
def search(request):
return render(request, "search.html", {});
def about(request):
return render(request, "about.html", {});
| from django.http import HttpResponse
from django.template.loader import get_template
from django.template import Context
from django.shortcuts import render
import json
import requests
NODE_ADDR = "http://127.0.0.1:9001"
publish_base_url = "http://despina.128.no/publish"
def get_publisher_url():
r = requests.get(NODE_ADDR + "/" + "publishing")
response_as_json = json.loads(r.text)
if response_as_json:
return response_as_json
else:
return None
def homepage(request):
return render(request, "homepage.html", {});
def editor(request):
return render(request, "editor_page.html", {});
def upload_article(request):
dict = request.POST.dict()
article = {"tags": dict["tags"], "description": dict["description"], "title": dict["title"]}
article["article"] = dict["article"].replace("src=\"//www.", "src=\"http://www.")
publisher_url = get_publisher_url()
if publisher_url:
r = requests.post("http://"+publisher_url+"/save_article", data = article)
else:
# Do some error handling here.
pass
return render(request, "editor_page.html", {});
def articles(request):
r = requests.get(publish_base_url + "/list")
d = r.json()
d["publisher_url"] = publish_base_url
return render(request, "articles.html", d);
def search(request):
return render(request, "search.html", {});
def about(request):
return render(request, "about.html", {});
| mit | Python |
1dbacf5e964aab52278868fdb0050925f1b51d7e | bump version to 0.19.0-dev in develop after branching release/0.18.0 | dannyroberts/eulxml,emory-libraries/eulxml | eulxml/__init__.py | eulxml/__init__.py | # file eulxml/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version_info__ = (0, 19, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([ str(i) for i in __version_info__[:-1] ])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| # file eulxml/__init__.py
#
# Copyright 2010,2011 Emory University Libraries
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version_info__ = (0, 18, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([ str(i) for i in __version_info__[:-1] ])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
| apache-2.0 | Python |
e841662f28629a4ed841be99e82d8d92a42b1820 | Remove newline at EOF (flake8) | smanolloff/SublimeLinter-contrib-elixirc,doitian/SublimeLinter-contrib-elixirc | linter.py | linter.py | """This module exports the elixirc plugin class."""
import tempfile
import os
from SublimeLinter.lint import Linter
class Elixirc(Linter):
"""Provides an interface to elixirc."""
syntax = ("elixir")
executable = "elixirc"
tempfile_suffix = "-"
regex = (
r"(?:\*+\s\(.+\) )?(?P<filename>.+):(?P<line>\d+):"
r"(?:(?P<warning>\swarning:\s)|(?P<error>\s))"
r"(?P<message>.+)"
)
defaults = {
"include_dirs": [],
"pa": []
}
def cmd(self):
"""Override to accept options `include_dirs` and `pa`."""
tmpdir = os.path.join(tempfile.gettempdir(), 'SublimeLinter3')
command = [
self.executable_path,
'--warnings-as-errors',
'--ignore-module-conflict',
'-o', tmpdir
]
settings = self.get_view_settings()
dirs = settings.get('include_dirs', [])
paths = settings.get('pa', [])
for p in paths:
command.extend(["-pa", p])
for d in dirs:
command.extend(["-r", "%s/**/*.ex" % d])
return command
def split_match(self, match):
"""
Return the components of the match.
We override this because unrelated library files can throw errors,
and we only want errors from the linted file.
"""
if match:
# The linter seems to always change its working
# dir to that of the linted given file, so the
# reported error will contain a basename only.
if match.group('filename') != os.path.basename(self.filename):
match = None
return super().split_match(match)
| """This module exports the elixirc plugin class."""
import tempfile
import os
from SublimeLinter.lint import Linter
class Elixirc(Linter):
"""Provides an interface to elixirc."""
syntax = ("elixir")
executable = "elixirc"
tempfile_suffix = "-"
regex = (
r"(?:\*+\s\(.+\) )?(?P<filename>.+):(?P<line>\d+):"
r"(?:(?P<warning>\swarning:\s)|(?P<error>\s))"
r"(?P<message>.+)"
)
defaults = {
"include_dirs": [],
"pa": []
}
def cmd(self):
"""Override to accept options `include_dirs` and `pa`."""
tmpdir = os.path.join(tempfile.gettempdir(), 'SublimeLinter3')
command = [
self.executable_path,
'--warnings-as-errors',
'--ignore-module-conflict',
'-o', tmpdir
]
settings = self.get_view_settings()
dirs = settings.get('include_dirs', [])
paths = settings.get('pa', [])
for p in paths:
command.extend(["-pa", p])
for d in dirs:
command.extend(["-r", "%s/**/*.ex" % d])
return command
def split_match(self, match):
"""
Return the components of the match.
We override this because unrelated library files can throw errors,
and we only want errors from the linted file.
"""
if match:
# The linter seems to always change its working
# dir to that of the linted given file, so the
# reported error will contain a basename only.
if match.group('filename') != os.path.basename(self.filename):
match = None
return super().split_match(match)
| mit | Python |
4fa8f7cb8a0592ed1d37efa20fd4a23d12e88713 | Update regexp due to changes in stylint | jackbrewer/SublimeLinter-contrib-stylint | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 1.5.6, < 1.6.0'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# /path/to/file/example.styl
^.*$\s*
# 177:24 colors warning hexidecimal color should be a variable
(?P<line>\d+):(?P<near>\d+)\s*\w+\s*((?P<warning>warning)|(?P<error>error))\s*(?P<message>.+)$\s*
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jack Brewer
# Copyright (c) 2015 Jack Brewer
#
# License: MIT
#
"""This module exports the Stylint plugin class."""
from SublimeLinter.lint import NodeLinter, util
class Stylint(NodeLinter):
"""Provides an interface to stylint."""
npm_name = 'stylint'
syntax = 'stylus'
cmd = 'stylint @ *'
executable = 'stylint'
version_requirement = '>= 0.9.3'
regex = r'''(?xi)
# Comments show example output for each line of a Stylint warning
# 'Near' can contain trailing whitespace, which we avoid capturing
# Warning: commas must be followed by a space for readability
^((?P<warning>warning)|(?P<error>Error)):\s*(?P<message>.+)$\s*
# File: /path/to/file/example.styl
^.*$\s*
# Line: 46: color rgba(0,0,0,.5)
^Line:\s*(?P<line>\d+):\s*(?P<near>.*\S)
'''
multiline = True
error_stream = util.STREAM_STDOUT
tempfile_suffix = 'styl'
config_file = ('--config', '.stylintrc', '~')
| mit | Python |
982230c4d19bdda0b1e2f76cc731655e7fa4446f | Fix radar recruitment graph | renalreg/radar,renalreg/radar,renalreg/radar,renalreg/radar | api/radar_api/views/recruitment_stats.py | api/radar_api/views/recruitment_stats.py | from flask import request
from radar.cohorts import get_radar_cohort
from radar_api.serializers.recruitment_stats import DataPointsSerializer, CohortRecruitmentRequestSerializer, \
OrganisationRecruitmentRequestSerializer
from radar.models import CohortPatient, OrganisationPatient
from radar.recruitment_stats import recruitment_by_month
from radar.validation.core import ValidationError
from radar.views.core import response_json, ApiView
class CohortRecruitmentStatsView(ApiView):
@response_json(DataPointsSerializer)
def get(self):
serializer = CohortRecruitmentRequestSerializer()
args = serializer.args_to_value(request.args)
cohort = args.get('cohort')
if cohort is None:
raise ValidationError({'cohort': 'This field is required.'})
points = recruitment_by_month(CohortPatient.created_date, [CohortPatient.cohort == cohort])
return {'points': points}
class OrganisationRecruitmentStatsView(ApiView):
@response_json(DataPointsSerializer)
def get(self):
serializer = OrganisationRecruitmentRequestSerializer()
args = serializer.args_to_value(request.args)
organisation = args.get('organisation')
if organisation is None:
raise ValidationError({'organisation': 'This field is required.'})
points = recruitment_by_month(OrganisationPatient.created_date, [OrganisationPatient.organisation == organisation])
return {'points': points}
class PatientRecruitmentStatsView(ApiView):
@response_json(DataPointsSerializer)
def get(self):
cohort = get_radar_cohort()
points = recruitment_by_month(CohortPatient.recruited_date, [CohortPatient.cohort == cohort])
return {'points': points}
def register_views(app):
app.add_url_rule('/cohort-recruitment-stats', view_func=CohortRecruitmentStatsView.as_view('cohort_recruitment_stats'))
app.add_url_rule('/organisation-recruitment-stats', view_func=OrganisationRecruitmentStatsView.as_view('organisation_recruitment_stats'))
app.add_url_rule('/patient-recruitment-stats', view_func=PatientRecruitmentStatsView.as_view('patient_recruitment_stats'))
| from flask import request
from radar.organisations import get_radar_organisation
from radar_api.serializers.recruitment_stats import DataPointsSerializer, CohortRecruitmentRequestSerializer, \
OrganisationRecruitmentRequestSerializer
from radar.models import CohortPatient, OrganisationPatient
from radar.recruitment_stats import recruitment_by_month
from radar.validation.core import ValidationError
from radar.views.core import response_json, ApiView
class CohortRecruitmentStatsView(ApiView):
@response_json(DataPointsSerializer)
def get(self):
serializer = CohortRecruitmentRequestSerializer()
args = serializer.args_to_value(request.args)
cohort = args.get('cohort')
if cohort is None:
raise ValidationError({'cohort': 'This field is required.'})
points = recruitment_by_month(CohortPatient.created_date, [CohortPatient.cohort == cohort])
return {'points': points}
class OrganisationRecruitmentStatsView(ApiView):
@response_json(DataPointsSerializer)
def get(self):
serializer = OrganisationRecruitmentRequestSerializer()
args = serializer.args_to_value(request.args)
organisation = args.get('organisation')
if organisation is None:
raise ValidationError({'organisation': 'This field is required.'})
points = recruitment_by_month(OrganisationPatient.created_date, [OrganisationPatient.organisation == organisation])
return {'points': points}
class PatientRecruitmentStatsView(ApiView):
@response_json(DataPointsSerializer)
def get(self):
organisation = get_radar_organisation()
points = recruitment_by_month(OrganisationPatient.created_date, [OrganisationPatient.organisation == organisation])
return {'points': points}
def register_views(app):
app.add_url_rule('/cohort-recruitment-stats', view_func=CohortRecruitmentStatsView.as_view('cohort_recruitment_stats'))
app.add_url_rule('/organisation-recruitment-stats', view_func=OrganisationRecruitmentStatsView.as_view('organisation_recruitment_stats'))
app.add_url_rule('/patient-recruitment-stats', view_func=PatientRecruitmentStatsView.as_view('patient_recruitment_stats'))
| agpl-3.0 | Python |
cb60f8968c4dc772c60ebf9c5ca77bc950feed29 | fix linting of unsaved files (fix #18) | nirm03/SublimeLinter-clang,Optiligence/SublimeLinter-clang | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by nirm03
# Copyright (c) 2013 nirm03
#
# License: MIT
#
"""This module exports the Clang plugin class."""
import shlex
from SublimeLinter.lint import Linter, persist
import sublime
import os
import string
def get_project_folder():
proj_file = sublime.active_window().project_file_name()
if proj_file:
return os.path.dirname(proj_file)
# Use current file's folder when no project file is opened.
proj_file = sublime.active_window().active_view().file_name()
if proj_file:
return os.path.dirname(proj_file)
return '.'
def apply_template(s):
mapping = {
"project_folder": get_project_folder()
}
templ = string.Template(s)
return templ.safe_substitute(mapping)
class Clang(Linter):
"""Provides an interface to clang."""
syntax = ('c', 'c improved', 'c++', 'c++11')
executable = 'clang'
regex = (r'<stdin>:(?P<line>\d+):'
r'((?P<col>\d*): )?'# column number, colon and space are only applicable for single line messages
# several lines of anything followed by
# either error/warning/note or newline (= irrelevant backtrace content)
# (lazy quantifiers so we don’t skip what we seek)
r'(.*?((?P<error>error)|(?P<warning>warning|note)|\r?\n))+?'
r': (?P<message>.+)'# match the remaining content of the current line for output
)
multiline = True
defaults = {
'include_dirs': [],
'extra_flags': ""
}
base_cmd = (
'clang -fsyntax-only '
'-fno-caret-diagnostics -Wall '
)
def cmd(self):
"""
Return the command line to execute.
We override this method, so we can add extra flags and include paths
based on the 'include_dirs' and 'extra_flags' settings.
"""
result = self.base_cmd
if persist.get_syntax(self.view) in ['c', 'c improved']:
result += ' -x c '
elif persist.get_syntax(self.view) in ['c++', 'c++11']:
result += ' -x c++ '
settings = self.get_view_settings()
result += apply_template( settings.get('extra_flags', '') )
include_dirs = settings.get('include_dirs', [])
if include_dirs:
result += apply_template( ' '.join([' -I ' + shlex.quote(include) for include in include_dirs]) )
return result + ' -'
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by nirm03
# Copyright (c) 2013 nirm03
#
# License: MIT
#
"""This module exports the Clang plugin class."""
import shlex
from SublimeLinter.lint import Linter, persist
import sublime
import os
import string
def get_project_folder():
proj_file = sublime.active_window().project_file_name()
if proj_file:
return os.path.dirname(proj_file)
# Use current file's folder when no project file is opened.
return os.path.dirname( sublime.active_window().active_view().file_name() )
def apply_template(s):
mapping = {
"project_folder": get_project_folder()
}
templ = string.Template(s)
return templ.safe_substitute(mapping)
class Clang(Linter):
"""Provides an interface to clang."""
syntax = ('c', 'c improved', 'c++', 'c++11')
executable = 'clang'
regex = (r'<stdin>:(?P<line>\d+):'
r'((?P<col>\d*): )?'# column number, colon and space are only applicable for single line messages
# several lines of anything followed by
# either error/warning/note or newline (= irrelevant backtrace content)
# (lazy quantifiers so we don’t skip what we seek)
r'(.*?((?P<error>error)|(?P<warning>warning|note)|\r?\n))+?'
r': (?P<message>.+)'# match the remaining content of the current line for output
)
multiline = True
defaults = {
'include_dirs': [],
'extra_flags': ""
}
base_cmd = (
'clang -fsyntax-only '
'-fno-caret-diagnostics -Wall '
)
def cmd(self):
"""
Return the command line to execute.
We override this method, so we can add extra flags and include paths
based on the 'include_dirs' and 'extra_flags' settings.
"""
result = self.base_cmd
if persist.get_syntax(self.view) in ['c', 'c improved']:
result += ' -x c '
elif persist.get_syntax(self.view) in ['c++', 'c++11']:
result += ' -x c++ '
settings = self.get_view_settings()
result += apply_template( settings.get('extra_flags', '') )
include_dirs = settings.get('include_dirs', [])
if include_dirs:
result += apply_template( ' '.join([' -I ' + shlex.quote(include) for include in include_dirs]) )
return result + ' -'
| mit | Python |
4e779e736f9a79298c884ab5c34fc9032d9cdb3e | Move form calls to views.py. | rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python,rockwolf/python | fade/fade/views.py | fade/fade/views.py | #!/usr/bin/env python
"""
See LICENSE.txt file for copyright and license details.
"""
from flask import render_template, session, request, abort
from forms import FormLeveragedContracts
from ctypes import cdll
lcf = cdll.LoadLibrary('calculator_finance.so')
@app.route('/')
@app.route('/home')
@app.route('/home/')
def render_home():
"""
Renders the index page.
"""
l_user = 'admin'
l_message = 'Welcome!'
return render_template('index.tpl', p_user = l_user, p_message = l_message)
@app.route('/leverage', methods = ['GET', 'POST'])
@app.route('/leverage/', methods = ['GET', 'POST'])
def render_leverage():
"""
Renders the leverage page.
"""
l_form = FormLeveragedContracts()
if l_form.validate_on_submit():
l_leveraged_contracts = lcf.calculate_leveraged_contracts(int(request.form['p_contracts']))
return render_template('leverage.tpl', p_form = l_form, p_leveraged_contracts = l_leveraged_contracts)
return render_template('leverage.tpl', p_form = l_form)
@app.route('/tradingjournal/', methods = ['GET', 'POST'])
def render_tradingjournal():
"""
Renders the trading journal page.
"""
l_form = FormTradingJournal()
if l_form.validate_on_submit():
l_trade_id = request.form['p_trade_id']
return render_template('leverage.tpl', p_form = l_form, p_leveraged_contracts = l_leveraged_contracts)
return render_template('tradingjournal.tpl', p_form = l_form)
| bsd-3-clause | Python | |
266e0976ee41e4dd1a9c543c84d422a8fba61230 | Check if epages6 settings are configured | ePages-rnd/SublimeLinter-contrib-tlec | linter.py | linter.py | #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
if self.view.settings().get('ep6vm'):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
else:
return []
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
| #
# linter.py
# Linter for SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Jonas Gratz
# Copyright (c) 2015 Jonas Gratz
#
# License: MIT
#
"""This module exports the Tlec plugin class."""
import sublime
from SublimeLinter.lint import Linter, util
class Tlec(Linter):
"""Provides an interface to tlec."""
def cmd(self):
return [self.executable_path, sublime.packages_path() + '/Epages6/ep6-tools.py', '--vm', self.view.settings().get('ep6vm')['vm'], '--lint', '--file', self.view.file_name(), '--user', 'root', '--password', 'qwert6', '--ignore-me', '@'];
executable = 'python3'
syntax = ('html', 'tle')
regex = r'(?P<message>.+?) at line (?P<line>\d+)(, near (?P<near>.+?))?'
error_stream = util.STREAM_BOTH
tempfile_suffix = 'html'
| mit | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.