commit stringlengths 40 40 | subject stringlengths 4 1.73k | repos stringlengths 5 127k | old_file stringlengths 2 751 | new_file stringlengths 2 751 | new_contents stringlengths 1 8.98k | old_contents stringlengths 0 6.59k | license stringclasses 13
values | lang stringclasses 23
values |
|---|---|---|---|---|---|---|---|---|
ad54fc8030c47dbacfd80665a6ae3b7ad6a0270d | Fix docstring for is_pyinstaller(). | angr/angr,angr/angr,angr/angr | angr/utils/env.py | angr/utils/env.py | import sys
def is_pyinstaller() -> bool:
"""
Detect if we are currently running as a PyInstaller-packaged program.
:return: True if we are running as a PyInstaller-packaged program. False if we are running in Python directly
(e.g., development mode).
"""
return getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS')
| import sys
def is_pyinstaller() -> bool:
"""
Detect if we are currently running as a PyInstaller-packaged program.
:return: True if we are running as a PyInstaller-packaged program. False if we are running in Python directly
(e.g., development mode).
"""
return getattr(sys, 'frozen', False) and hasattr(sys, '_MEIPASS')
| bsd-2-clause | Python |
6797c9b705b0cfd5c770f68695402baf044510ff | Change default value of checkpoint frequency | harpribot/representation-music,harpribot/representation-music | utils/training_utils/params.py | utils/training_utils/params.py | # Data set split
TRAIN_FRACTION = 0.75
VALIDATE_FRACTION = 0.1
TEST_FRACTION = 0.15
TOTAL_NUM_EXAMPLES = 147052 # Total number of tracks
assert(TRAIN_FRACTION + VALIDATE_FRACTION + TEST_FRACTION == 1.0)
# Validation and check pointing
EVALUATION_FREQ = 900 # Number of mini-batch SGD steps after which an evaluation is performed.
CHECKPOINT_FREQ = 900 # Number of mini-batch SGD steps after which a checkpoint is taken.
# File paths
EXPT_DIRECTORY_PATH = "./Experiments" # Path of the Experiments directory
| # Data set split
TRAIN_FRACTION = 0.75
VALIDATE_FRACTION = 0.1
TEST_FRACTION = 0.15
TOTAL_NUM_EXAMPLES = 147052 # Total number of tracks
assert(TRAIN_FRACTION + VALIDATE_FRACTION + TEST_FRACTION == 1.0)
# Validation and check pointing
EVALUATION_FREQ = 1000 # Number of mini-batch SGD steps after which an evaluation is performed.
CHECKPOINT_FREQ = 1000 # Number of mini-batch SGD steps after which a checkpoint is taken.
# File paths
EXPT_DIRECTORY_PATH = "./Experiments" # Path of the Experiments directory
| mit | Python |
7a60b7ba2141a94d2d7ad93a07039100f965bb17 | Update IrrigatorsModel.py | mikelambson/tcid,mikelambson/tcid,mikelambson/tcid,mikelambson/tcid | backend/IrrigatorsModel.py | backend/IrrigatorsModel.py | import datetime, re;
from sqlalchemy.orm import validates;
from server import DB, FlaskServer;
from components.validation import validate_word;
class Irrigators(DB.Model):
id = DB.Column(DB.Integer, primary_key=True, autoincrement=True);
name = DB.Column(DB.Varchar(40));
notation = DB.Column(DB.Text);
created_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'));
created_at = DB.Column(DB.DateTime);
updated_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'), nullable=True);
updated_at = DB.Column(DB.DateTime, nullable=True);
def __init__(self, name, notation, created_at, updated_at):
self.name = name;
self.notation = notation;
self.created_at = datetime.datetime.now();
self.updated_at = self.created_at;
| import datetime, re;
from sqlalchemy.orm import validates;
from server import DB, FlaskServer;
from components.validation import validate_word;
class Irrigators(DB.Model):
id = DB.Column(DB.Integer, primary_key=True, autoincrement=True);
name = DB.Column(DB.Varchar(40));
notation = DB.Column(DB.Text);
created_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'));
created_at = DB.Column(DB.DateTime);
updated_by = DB.Column(DB.Integer, DB.ForeignKey('users.id'), nullable=True);
updated_at = DB.Column(DB.DateTime, nullable=True);
""" @validates('subject')
def validate_subject(self, key, subject):
return validate_word(subject, 2, 320, valid=re.compile('[a-zA-Z\s]+'))
"""
def __init__(self, name, notation, created_at, updated_at):
self.name = name;
self.notation = notation;
self.created_at = datetime.datetime.now();
self.updated_at = self.created_at;
""" def serialize(self, session):
if session.clearance:
return {"id": self.id, "subject": self.subject, "deleted_by": self.deleted_by, "created_by": self.created_by, "created_at": self.created_at.strftime("%B %d, %Y %H:%M:%S"), "updated_at": self.updated_at.strftime("%B %d, %Y %H:%M:%S"), "updated_by": self.updated_by};
else: #public or protected
return {"delete": self.id} if self.deleted_by else {"id": self.id, "subject": self.subject};
@staticmethod
def create(args, secure):
auth = DB.Models["sessions"].authenticate(secure, paranoid=True, role="admin");
if auth.success == False:
return auth.serialize();
else:
security_question = SecurityQuestions(args["subject"], auth.session.user);
DB.session.add(security_question);
DB.session.commit();
return {"success": True};
@staticmethod
def exists(prop, value):
if prop == "subject":
return True if SecurityQuestions.query.filter_by(subject=value).count() else False;
else:
return False;#invalid property
@staticmethod
def request(mode, secure):
auth = DB.Models["sessions"].authenticate(secure, bypass_activation=True);
if auth.success == False:
return auth.serialize();
cache = DB.Models["caches"].cache("security_questions", mode, auth.session, False);
if mode == "config":
data = [i.serialize(auth.session) for i in SecurityQuestions.query.filter(SecurityQuestions.updated_at > cache).all()] if cache else [i.serialize(auth.session) for i in SecurityQuestions.query.all()];
elif mode == "index": #public
data = [i.serialize(auth.session) for i in SecurityQuestions.query.filter(SecurityQuestions.updated_at > cache, SecurityQuestions.deleted_by == None).all()] if cache else [i.serialize(auth.session) for i in SecurityQuestions.query.all()];
else: #login
data = [i.serialize(auth.session) for i in SecurityQuestions.query.filter(SecurityQuestions.updated_at > cache, SecurityQuestions.deleted_by == None, SecurityQuestions.id == auth.session.user.security_question.id).all()] if cache else [i.serialize(auth.session) for i in SecurityQuestions.query.filter_by(id = auth.session.user.security_question.id).all()];
return {"success": True, "items": len(data), "data": data};
@staticmethod
def update(args, secure):
auth = DB.Models["sessions"].authenticate(secure, paranoid=True, role="admin");
if auth.success == False:
return auth.serialize();
else:
security_question = SecurityQuestions.query.get(args["id"]);
for param, value in args.iteritems():
if param == "subject":
security_question.subject = value;
elif param == "deleted_by":
security_question.deleted_by = auth.session.user if value == True else None;
security_question.updater = auth.session.user;
security_question.updated_at = datetime.datetime.now();
DB.session.commit();
return {"success": True};
"""
| bsd-3-clause | Python |
3bb167fdbf4e4e3f41a7dda6dbdb2005f3be6d52 | make assert python2.6 compatible | lamby/django-extensions,zefciu/django-extensions,helenst/django-extensions,mandx/django-extensions,maroux/django-extensions,barseghyanartur/django-extensions,dpetzold/django-extensions,mandx/django-extensions,helenst/django-extensions,JoseTomasTocino/django-extensions,gvangool/django-extensions,kevgathuku/django-extensions,haakenlid/django-extensions,nikolas/django-extensions,kevgathuku/django-extensions,kevgathuku/django-extensions,joeyespo/django-extensions,rodo/django-extensions,marctc/django-extensions,lamby/django-extensions,helenst/django-extensions,levic/django-extensions,zefciu/django-extensions,bionikspoon/django-extensions,haakenlid/django-extensions,linuxmaniac/django-extensions,VishvajitP/django-extensions,frewsxcv/django-extensions,ewjoachim/django-extensions,atchariya/django-extensions,artscoop/django-extensions,bionikspoon/django-extensions,frewsxcv/django-extensions,django-extensions/django-extensions,atchariya/django-extensions,fusionbox/django-extensions,django-extensions/django-extensions,Moulde/django-extensions,JoseTomasTocino/django-extensions,joeyespo/django-extensions,t1m0thy/django-extensions,maroux/django-extensions,rodo/django-extensions,jpadilla/django-extensions,nikolas/django-extensions,maroux/django-extensions,gvangool/django-extensions,bionikspoon/django-extensions,rodo/django-extensions,ewjoachim/django-extensions,JoseTomasTocino/django-extensions,linuxmaniac/django-extensions,levic/django-extensions,t1m0thy/django-extensions,joeyespo/django-extensions,levic/django-extensions,github-account-because-they-want-it/django-extensions,marctc/django-extensions,dpetzold/django-extensions,fusionbox/django-extensions,gvangool/django-extensions,lamby/django-extensions,Moulde/django-extensions,VishvajitP/django-extensions,django-extensions/django-extensions,ctrl-alt-d/django-extensions,barseghyanartur/django-extensions,VishvajitP/django-extensions,t1m0thy/django-extensions,github-account-because-they-want-it/django-extensions,nikolas/django-extensions,Moulde/django-extensions,marctc/django-extensions,ewjoachim/django-extensions,jpadilla/django-extensions,artscoop/django-extensions,barseghyanartur/django-extensions,artscoop/django-extensions,haakenlid/django-extensions,atchariya/django-extensions,jpadilla/django-extensions,ctrl-alt-d/django-extensions,linuxmaniac/django-extensions,ctrl-alt-d/django-extensions,frewsxcv/django-extensions,github-account-because-they-want-it/django-extensions,dpetzold/django-extensions,zefciu/django-extensions,mandx/django-extensions | django_extensions/tests/json_field.py | django_extensions/tests/json_field.py | import unittest
from django.db import connection
from django.conf import settings
from django.core.management import call_command
from django.db.models import loading
from django.db import models
from django_extensions.db.fields.json import JSONField
class TestModel(models.Model):
a = models.IntegerField()
j_field = JSONField()
class JsonFieldTest(unittest.TestCase):
def setUp(self):
self.old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS)
settings.INSTALLED_APPS.append('django_extensions.tests')
loading.cache.loaded = False
call_command('syncdb', verbosity=0)
def tearDown(self):
settings.INSTALLED_APPS = self.old_installed_apps
def testCharFieldCreate(self):
j = TestModel.objects.create(a=6, j_field=dict(foo='bar'))
def testEmptyList(self):
j = TestModel.objects.create(a=6, j_field=[])
self.assertTrue(isinstance(j.j_field, list))
self.assertEquals(j.j_field, [])
| import unittest
from django.db import connection
from django.conf import settings
from django.core.management import call_command
from django.db.models import loading
from django.db import models
from django_extensions.db.fields.json import JSONField
class TestModel(models.Model):
a = models.IntegerField()
j_field = JSONField()
class JsonFieldTest(unittest.TestCase):
def setUp(self):
self.old_installed_apps = settings.INSTALLED_APPS
settings.INSTALLED_APPS = list(settings.INSTALLED_APPS)
settings.INSTALLED_APPS.append('django_extensions.tests')
loading.cache.loaded = False
call_command('syncdb', verbosity=0)
def tearDown(self):
settings.INSTALLED_APPS = self.old_installed_apps
def testCharFieldCreate(self):
j = TestModel.objects.create(a=6, j_field=dict(foo='bar'))
def testEmptyList(self):
j = TestModel.objects.create(a=6, j_field=[])
self.assertIsInstance(j.j_field, list)
self.assertEquals(j.j_field, [])
| mit | Python |
42c0925b875858752dd86130c42818c43af28872 | change poke command to accept ping as well | mikevb1/discordbot,mikevb1/lagbot | cogs/meta.py | cogs/meta.py | from collections import OrderedDict
import datetime
from discord.ext import commands
import discord
class Meta:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def info(self):
"""Display bot information."""
source_link = 'https://github.com/mikevb1/discordbot'
message = 'The source code can be found at {}.'.format(source_link)
if self.bot.owner_name:
message += '\nThe developer is {}.'.format(self.bot.owner_name)
await self.bot.say(message)
@commands.command()
async def uptime(self):
"""Display bot uptime."""
now = datetime.datetime.utcnow()
delta = now - self.bot.uptime
hours, remainder = divmod(int(delta.total_seconds()), 3600)
minutes, seconds = divmod(remainder, 60)
days, hours = divmod(hours, 24)
if days:
fmt = '{d} day{dp}, {h} hour{hp}, {m} minute{mp}, {s} second{sp}'
elif hours:
fmt = '{h} hour{hp}, {m} minute{mp}, {s} second{sp}'
elif minutes:
fmt = '{m} minute{mp}, {s} second{sp}'
else:
fmt = '{s} second{sp}'
def plural(num):
return 's' if num != 1 else ''
up = fmt.format(
d=days, dp=plural(days),
h=hours, hp=plural(hours),
m=minutes, mp=plural(minutes),
s=seconds, sp=plural(seconds))
await self.bot.say('Uptime: **{}**'.format(up))
@commands.command(pass_context=True, aliases=['ping'])
async def poke(self, ctx):
"""Make sure bot is working."""
if ctx.invoked_with == 'poke':
reply = 'I need an adult!'
else:
reply = 'Pong!'
await self.bot.say(reply)
def setup(bot):
"""Magic function to set up cog."""
bot.add_cog(Meta(bot))
| from collections import OrderedDict
import datetime
import random
from discord.ext import commands
import discord
class Meta:
def __init__(self, bot):
self.bot = bot
@commands.command()
async def info(self):
"""Display bot information."""
source_link = 'https://github.com/mikevb1/discordbot'
message = 'The source code can be found at {}.'.format(source_link)
if self.bot.owner_name:
message += '\nThe developer is {}.'.format(self.bot.owner_name)
await self.bot.say(message)
@commands.command()
async def uptime(self):
"""Display bot uptime."""
now = datetime.datetime.utcnow()
delta = now - self.bot.uptime
hours, remainder = divmod(int(delta.total_seconds()), 3600)
minutes, seconds = divmod(remainder, 60)
days, hours = divmod(hours, 24)
if days:
fmt = '{d} day{dp}, {h} hour{hp}, {m} minute{mp}, {s} second{sp}'
elif hours:
fmt = '{h} hour{hp}, {m} minute{mp}, {s} second{sp}'
elif minutes:
fmt = '{m} minute{mp}, {s} second{sp}'
else:
fmt = '{s} second{sp}'
def plural(num):
return 's' if num != 1 else ''
up = fmt.format(
d=days, dp=plural(days),
h=hours, hp=plural(hours),
m=minutes, mp=plural(minutes),
s=seconds, sp=plural(seconds))
await self.bot.say('Uptime: **{}**'.format(up))
@commands.command()
async def poke(self):
"""Make sure bot is working."""
replies = [
'Hey!', 'Ow!', 'Stop that!', "I'm here!", 'I need an adult!']
await self.bot.say(random.choice(replies))
def setup(bot):
"""Magic function to set up cog."""
bot.add_cog(Meta(bot))
| mit | Python |
507acdd38ea3333c0b2794264a464678d1898d97 | fix issue with smoothing test: the smoothing region was too wide relative to the overall data length. | lzkelley/zcode,lzkelley/zcode | zcode/math/tests/test_numeric.py | zcode/math/tests/test_numeric.py | """Test methods for `zcode/math/math_core.py`.
Can be run with:
$ nosetests math/tests/test_math_core.py
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
from numpy.testing import run_module_suite
from nose.tools import assert_true
import zcode
import zcode.plot
from zcode.math import numeric
class TestMathCore(object):
@classmethod
def setup_class(cls):
np.random.seed(9865)
cls.SIZE = 1000
cls.r2 = np.random.uniform(-1.0, 1.0, size=cls.SIZE)
def test_smooth(self):
r2 = self.r2
ARR_SIZE = r2.size
AMP = 10.0
NOISE = 1.4
SMOOTH_LENGTHS = [1, 4, 10]
NUM = len(SMOOTH_LENGTHS)
xx = np.linspace(-np.pi/4.0, 3.0*np.pi, num=ARR_SIZE)
arrs = [AMP*np.sin(xx) + NOISE*r2
for ii in range(len(SMOOTH_LENGTHS))]
sm_arrs = [numeric.smooth(arr, smlen)
for (arr, smlen) in zip(arrs, SMOOTH_LENGTHS)]
# from matplotlib import pyplot as plt
# fig, ax = plt.subplots()
# colors = zcode.plot.plot_core.color_cycle(NUM)
# for ii, (ar, sm) in enumerate(zip(arrs, sm_arrs)):
# ax.plot(ar, color=colors[ii], alpha=0.5)
# ax.plot(sm, color=colors[ii], alpha=0.5, ls='--')
#
# fig.savefig('test.pdf')
# plt.close('all')
# average derivative should be progressively smaller
stds = [np.mean(np.diff(sm[10:-10])) for sm in sm_arrs]
print("stds = {}".format(stds))
assert_true(stds[0] > stds[1] > stds[2])
# Smoothing length 1 should have no effect
assert_true(np.all(sm_arrs[0] == arrs[0]))
return
# Run all methods as if with `nosetests ...`
if __name__ == "__main__":
run_module_suite()
| """Test methods for `zcode/math/math_core.py`.
Can be run with:
$ nosetests math/tests/test_math_core.py
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
from numpy.testing import run_module_suite
from nose.tools import assert_true
from zcode.math import numeric
class TestMathCore(object):
@classmethod
def setup_class(cls):
np.random.seed(9865)
cls.SIZE = 100
cls.r2 = np.random.uniform(-1.0, 1.0, size=cls.SIZE)
def test_smooth(self):
r2 = self.r2
ARR_SIZE = r2.size
AMP = 10.0
NOISE = 1.4
SMOOTH_LENGTHS = [1, 4, 16]
xx = np.linspace(-np.pi/4.0, 3.0*np.pi, num=ARR_SIZE)
arrs = [AMP*np.sin(xx) + NOISE*r2
for ii in range(len(SMOOTH_LENGTHS))]
smArrs = [numeric.smooth(arr, smlen)
for (arr, smlen) in zip(arrs, SMOOTH_LENGTHS)]
# average derivative should be progressively smaller
stdDiffs = [np.mean(np.diff(sm)) for sm in smArrs]
assert_true(stdDiffs[0] > stdDiffs[1] > stdDiffs[2])
# Smoothing length 1 should have no effect
assert_true(np.all(smArrs[0] == arrs[0]))
# Run all methods as if with `nosetests ...`
if __name__ == "__main__":
run_module_suite()
| mit | Python |
f311dd7c560c3a86fa92a75aeb73a892a3be40ba | set maturity to Beta | OCA/account-fiscal-rule,OCA/account-fiscal-rule | account_avatax_sale/__manifest__.py | account_avatax_sale/__manifest__.py | {
"name": "Taxes on Sales Orders using Avalara Avatax API",
"version": "13.0.1.0.0",
"author": "Open Source Integrators, Fabrice Henrion, Odoo SA,"
" Odoo Community Association (OCA)",
"summary": "Sales Orders with automatic Tax application using Avatax",
"license": "AGPL-3",
"category": "Accounting",
"depends": ["account_avatax", "sale"],
"data": ["views/sale_order_view.xml"],
"auto_install": True,
"development_status": "Beta",
}
| {
"name": "Taxes on Sales Orders using Avalara Avatax API",
"version": "13.0.1.0.0",
"author": "Open Source Integrators, Fabrice Henrion, Odoo SA,"
" Odoo Community Association (OCA)",
"summary": "Sales Orders with automatic Tax application using Avatax",
"license": "AGPL-3",
"category": "Accounting",
"depends": ["account_avatax", "sale"],
"data": ["views/sale_order_view.xml"],
"auto_install": True,
}
| agpl-3.0 | Python |
beff28a4d695b6448c3a1f81aa09bfc685a77e60 | Update kafka start/stop script | wangyangjun/StreamBench,wangyangjun/StreamBench,wangyangjun/StreamBench,wangyangjun/RealtimeStreamBenchmark,wangyangjun/StreamBench,wangyangjun/RealtimeStreamBenchmark,wangyangjun/RealtimeStreamBenchmark,wangyangjun/RealtimeStreamBenchmark,wangyangjun/RealtimeStreamBenchmark,wangyangjun/StreamBench,wangyangjun/StreamBench,wangyangjun/RealtimeStreamBenchmark,wangyangjun/RealtimeStreamBenchmark,wangyangjun/StreamBench,wangyangjun/StreamBench,wangyangjun/RealtimeStreamBenchmark,wangyangjun/RealtimeStreamBenchmark,wangyangjun/StreamBench | script/kafkaServer.py | script/kafkaServer.py | #!/bin/python
from __future__ import print_function
import subprocess
import sys
import json
from util import appendline, get_ip_address
if __name__ == "__main__":
# start server one by one
if len(sys.argv) < 2 or sys.argv[1] not in ['start', 'stop']:
sys.stderr.write("Usage: python %s start or stop\n" % (sys.argv[0]))
sys.exit(1)
else:
config = json.load(open('cluster-config.json'))
if sys.argv[1] == 'start':
for node in config['nodes']:
subprocess.call(['ssh', 'cloud-user@'+node['ip'], 'nohup bash /usr/local/kafka/bin/kafka-server-start.sh /usr/local/kafka/config/server.properties&'])
else:
for node in config['nodes']:
subprocess.call(['ssh', 'cloud-user@'+node['ip'], 'bash /usr/local/kafka/bin/kafka-server-stop.sh'])
| #!/bin/python
from __future__ import print_function
import subprocess
import sys
import json
from util import appendline, get_ip_address
if __name__ == "__main__":
# start server one by one
if len(sys.argv) < 2 or sys.argv[1] not in ['start', 'stop']:
sys.stderr.write("Usage: python %s start or stop\n" % (sys.argv[0]))
sys.exit(1)
else:
config = json.load(open('cluster-config.json'))
if sys.argv[1] == 'start':
for node in config['nodes']:
subprocess.call(['ssh', 'cloud-user@'+node['ip'], 'bash /usr/local/kafka/bin/kafka-server-start.sh'])
else:
for node in config['nodes']:
subprocess.call(['ssh', 'cloud-user@'+node['ip'], 'bash /usr/local/kafka/bin/kafka-server-stop.sh'])
| apache-2.0 | Python |
582f08f1d7bd52076832cc6031473c4363277cf6 | set the default flavor | sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary,sassoftware/conary | conarycfg.py | conarycfg.py | #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import deps
import deps.arch
import deps.deps
import os
import versions
import sys
class SrsConfiguration:
def read(self, file):
if os.path.exists(file):
f = open(file, "r")
for line in f:
self.configLine(line)
f.close()
def configLine(self, line):
line = line.strip()
if not line or line[0] == '#':
return
(key, val) = line.split()
if not self.__dict__.has_key(key):
raise ParseError, ("configuration value %s unknown" % key)
self.__dict__[key] = val
try:
if key == "defaultbranch":
self.defaultbranch = \
versions.VersionFromString(self.defaultbranch)
if self.defaultbranch.isVersion():
sys.stderr.write("The configured default branch %s " +
"specifies version, not a branch.\n" %
self.defaultbranch.asString())
elif key == "installbranch":
self.installbranch = versions.BranchName(self.installbranch)
except versions.ParseError, e:
raise ParseError, str(e)
def display(self):
keys = self.__dict__.keys()
keys.sort()
for item in keys:
if type(self.__dict__[item]) is str:
print "%-20s %s" % (item, self.__dict__[item])
elif isinstance(self.__dict__[item], versions.Version):
print "%-20s %s" % (item, self.__dict__[item].asString())
elif isinstance(self.__dict__[item], versions.BranchName):
print "%-20s %s" % (item, self.__dict__[item])
else:
print "%-20s (unknown type)" % (item)
def __init__(self):
self.reppath = "/var/lib/srsrep"
self.root = "/"
self.sourcepath = "/usr/src/srs/sources"
self.buildpath = "/usr/src/srs/builds"
self.defaultbranch = None
self.installbranch = None
self.lookaside = "/var/cache/srs"
self.dbpath = "/var/lib/srsdb"
self.tmpdir = "/var/tmp/"
self.instructionSet = deps.arch.current()
self.flavor = deps.deps.DependencySet()
self.flavor.addDep(deps.deps.InstructionSetDependency,
self.instructionSet)
self.defaultbranch = versions.VersionFromString("/localhost@local:head")
self.read("/etc/srsrc")
self.read(os.environ["HOME"] + "/" + ".srsrc")
class SrsCfgError(Exception):
"""
Ancestor for all exceptions raised by the srscfg module.
"""
pass
class ParseError(SrsCfgError):
"""
Indicates that an error occured parsing the config file.
"""
def __str__(self):
return self.str
def __init__(self, str):
self.str = str
| #
# Copyright (c) 2004 Specifix, Inc.
# All rights reserved
#
import os
import versions
import sys
class SrsConfiguration:
def read(self, file):
if os.path.exists(file):
f = open(file, "r")
for line in f:
self.configLine(line)
f.close()
def configLine(self, line):
line = line.strip()
if not line or line[0] == '#':
return
(key, val) = line.split()
if not self.__dict__.has_key(key):
raise ParseError, ("configuration value %s unknown" % key)
self.__dict__[key] = val
try:
if key == "defaultbranch":
self.defaultbranch = \
versions.VersionFromString(self.defaultbranch)
if self.defaultbranch.isVersion():
sys.stderr.write("The configured default branch %s " +
"specifies version, not a branch.\n" %
self.defaultbranch.asString())
elif key == "installbranch":
self.installbranch = versions.BranchName(self.installbranch)
except versions.ParseError, e:
raise ParseError, str(e)
def display(self):
keys = self.__dict__.keys()
keys.sort()
for item in keys:
if type(self.__dict__[item]) is str:
print "%-20s %s" % (item, self.__dict__[item])
elif isinstance(self.__dict__[item], versions.Version):
print "%-20s %s" % (item, self.__dict__[item].asString())
elif isinstance(self.__dict__[item], versions.BranchName):
print "%-20s %s" % (item, self.__dict__[item])
else:
print "%-20s (unknown type)" % (item)
def __init__(self):
self.reppath = "/var/lib/srsrep"
self.root = "/"
self.sourcepath = "/usr/src/srs/sources"
self.buildpath = "/usr/src/srs/builds"
self.defaultbranch = None
self.installbranch = None
self.lookaside = "/var/cache/srs"
self.dbpath = "/var/lib/srsdb"
self.tmpdir = "/var/tmp/"
self.defaultbranch = versions.VersionFromString("/localhost@local:head")
self.read("/etc/srsrc")
self.read(os.environ["HOME"] + "/" + ".srsrc")
class SrsCfgError(Exception):
"""
Ancestor for all exceptions raised by the srscfg module.
"""
pass
class ParseError(SrsCfgError):
"""
Indicates that an error occured parsing the config file.
"""
def __str__(self):
return self.str
def __init__(self, str):
self.str = str
| apache-2.0 | Python |
f195c01ad6a98b3454880106e95fe40bbeb14da0 | Fix unit test for essentia dissonance | Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide,Parisson/TimeSide | tests/test_essentia_dissonance.py | tests/test_essentia_dissonance.py | #! /usr/bin/env python
from unit_timeside import unittest, TestRunner
from timeside.plugins.decoder.file import FileDecoder
from timeside.core import get_processor
from timeside.core.tools.test_samples import samples
class TestEssentiaDissonance(unittest.TestCase):
def setUp(self):
self.analyzer = get_processor('essentia_dissonance')()
def testOnC4Scale(self):
"runs on C4 scale"
self.source = samples["C4_scale.wav"]
def tearDown(self):
decoder = FileDecoder(self.source)
(decoder | self.analyzer).run()
self.assertAlmostEqual(self.analyzer.results['essentia_dissonance'].data_object.value.mean(), 0.16, places=2)
if __name__ == '__main__':
unittest.main(testRunner=TestRunner())
| #! /usr/bin/env python
from unit_timeside import unittest, TestRunner
from timeside.plugins.decoder.file import FileDecoder
from timeside.core import get_processor
from timeside.core.tools.test_samples import samples
class TestEssentiaDissonance(unittest.TestCase):
def setUp(self):
self.analyzer = get_processor('essentia_dissonance')()
def testOnC4Scale(self):
"runs on C4 scale"
self.source = samples["C4_scale.wav"]
def tearDown(self):
decoder = FileDecoder(self.source)
(decoder | self.analyzer).run()
self.assertAlmostEqual(self.analyzer.results['essentia_dissonance'].data_object.value.mean(), 0.18, places=2)
if __name__ == '__main__':
unittest.main(testRunner=TestRunner())
| agpl-3.0 | Python |
385d8a073d2996d639e8bc2af7d11658f28575d2 | add more tests | cglewis/vent,CyberReboot/vent,CyberReboot/vent,cglewis/vent,cglewis/vent,CyberReboot/vent | tests/unit/test_api_repository.py | tests/unit/test_api_repository.py | from vent.api.repository import Repository
from vent.api.system import System
def test_add():
""" Test the add function """
repo = Repository(System().manifest)
repo.add('https://github.com/cyberreboot/poseidon')
def test_update():
""" Test the update function """
repo = Repository(System().manifest)
repo.update('foo')
| from vent.api.repository import Repository
from vent.api.system import System
def test_update():
""" Test the update class """
repo = Repository(System().manifest)
repo.update('foo')
| apache-2.0 | Python |
02a4e17ca4f370a6525fa71dd6e33b9802ff99ff | Add urls only if topology module is active | nemesisdesign/ansible-openwisp2,openwisp/ansible-openwisp2 | templates/openwisp2/urls.py | templates/openwisp2/urls.py | from django.conf.urls import include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from openwisp_utils.admin_theme.admin import admin, openwisp_admin
openwisp_admin()
redirect_view = RedirectView.as_view(url=reverse_lazy('admin:index'))
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'', include('openwisp_controller.urls')),
{% if openwisp2_network_topology %}
url(r'^network_topology/', include('openwisp_network_topology.urls')),
{% endif %}
url(r'^$', redirect_view, name='index')
]
urlpatterns += staticfiles_urlpatterns()
| from django.conf.urls import include, url
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.core.urlresolvers import reverse_lazy
from django.views.generic import RedirectView
from openwisp_utils.admin_theme.admin import admin, openwisp_admin
openwisp_admin()
redirect_view = RedirectView.as_view(url=reverse_lazy('admin:index'))
urlpatterns = [
url(r'^admin/', include(admin.site.urls)),
url(r'', include('openwisp_controller.urls')),
url(r'^network_topology/', include('openwisp_network_topology.urls')),
url(r'^$', redirect_view, name='index')
]
urlpatterns += staticfiles_urlpatterns()
| bsd-3-clause | Python |
40bcf570afbc8b990efd0c55c5d306fa19845e04 | add constant for reduced length | vipul-sharma20/compressr | constants.py | constants.py | """
constants for util and compressr script
"""
REDUCED = 'reduced'
SUFFIX_FLAG = 'suffix_flag'
PREFIX_FLAG = 'prefix_flag'
LENGTH = 'length'
REDUCED_LENGTH = 'reduced_length'
| """
constants for util and compressr script
"""
REDUCED = 'reduced'
SUFFIX_FLAG = 'suffix_flag'
PREFIX_FLAG = 'prefix_flag'
LENGTH = 'length'
| mit | Python |
a10b4d98e801fe4f23d8077a74fbbe19e8d10480 | Update poll.py | JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub,JeffreyPowell/pi-heating-hub | cron/poll.py | cron/poll.py | ERROR: type should be string, got "\nhttps://dev.mysql.com/doc/connector-python/en/connector-python-example-connecting.html\n\n#SQL Connection Test\nimport MySQLdb\ndb = MySQLdb.connect(host=\"localhost\", user=\"pi\", passwd=\"password\", db=\"pi-heating-hub\")\n\ncur = db.cursor()\n\n" | apache-2.0 | Python | |
6b2efe0c5b323f650dc1aea217ea9d7270889bc7 | normalize tag names | MuckRock/muckrock,MuckRock/muckrock,MuckRock/muckrock,MuckRock/muckrock | src/muckrock/tags/models.py | src/muckrock/tags/models.py | """
Models for the tags application
"""
from django.contrib.auth.models import User
from django.db import models
from taggit.models import Tag as TaggitTag, GenericTaggedItemBase
class Tag(TaggitTag):
"""Custom Tag Class"""
user = models.ForeignKey(User, null=True, blank=True)
def save(self, *args, **kwargs):
"""Normalize name before saving"""
self.name = self.name.strip().lower()
super(Tag, self).save(*args, **kwargs)
class Meta:
# pylint: disable-msg=R0903
ordering = ['name']
class TaggedItemBase(GenericTaggedItemBase):
"""Custom Tagged Item Base Class"""
tag = models.ForeignKey(Tag, related_name="%(app_label)s_%(class)s_items")
| """
Models for the tags application
"""
from django.contrib.auth.models import User
from django.db import models
from taggit.models import Tag as TaggitTag, GenericTaggedItemBase
class Tag(TaggitTag):
"""Custom Tag Class"""
user = models.ForeignKey(User, null=True, blank=True)
class Meta:
# pylint: disable-msg=R0903
ordering = ['name']
class TaggedItemBase(GenericTaggedItemBase):
"""Custom Tagged Item Base Class"""
tag = models.ForeignKey(Tag, related_name="%(app_label)s_%(class)s_items")
| agpl-3.0 | Python |
960934fb4c9ead99091d32617d98c8d9c4f0d8a5 | include absolute urls in api responses | labkaxita/lakaxita,labkaxita/lakaxita,labkaxita/lakaxita | lakaxita/api.py | lakaxita/api.py | from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.api import Api
from lakaxita.news.models import News
from lakaxita.attachments.models import Attachment
from lakaxita.groups.models import Group
from lakaxita.gallery.models import Category
from lakaxita.lost_found.models import Item
class ItemResource(ModelResource):
class Meta:
queryset = Item.objects.all()
resource_name = 'lost_found'
include_absolute_url = True
fields = ['name', 'description', 'image', 'lost', 'found', 'slug']
class AttachmentResource(ModelResource):
class Meta:
queryset = Attachment.objects.all()
resource_name = 'attachments'
include_absolute_url = True
class CategoryResource(ModelResource):
class Meta:
queryset = Category.objects.root_nodes()
resource_name = 'gallery'
include_absolute_url = True
children = fields.ToManyField('self', 'children')
parent = fields.ForeignKey('self', 'parent', null=True)
attachments = fields.ToManyField(AttachmentResource, 'attachments')
class GroupResource(ModelResource):
class Meta:
queryset = Group.objects.all()
resource_name = 'groups'
include_absolute_url = True
class NewsResource(ModelResource):
group = fields.ForeignKey(GroupResource, 'group', null=True)
class Meta:
queryset = News.objects.published()
resource_name = 'news'
include_absolute_url = True
api = Api(api_name='api')
for resource in (ItemResource, CategoryResource, AttachmentResource,
GroupResource, NewsResource):
api.register(resource())
| from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.api import Api
from lakaxita.news.models import News
from lakaxita.attachments.models import Attachment
from lakaxita.groups.models import Group
from lakaxita.gallery.models import Category
from lakaxita.lost_found.models import Item
class ItemResource(ModelResource):
class Meta:
queryset = Item.objects.all()
resource_name = 'lost_found'
returned = fields.BooleanField()
def dehydrate(self, bundle):
bundle.data['returned'] = bundle.obj.found is not None
return bundle
class AttachmentResource(ModelResource):
class Meta:
queryset = Attachment.objects.all()
resource_name = 'attachments'
class CategoryResource(ModelResource):
class Meta:
queryset = Category.objects.root_nodes()
resource_name = 'gallery'
children = fields.ToManyField('self', 'children')
parent = fields.ForeignKey('self', 'parent', null=True)
attachments = fields.ToManyField(AttachmentResource, 'attachments')
class GroupResource(ModelResource):
class Meta:
queryset = Group.objects.all()
resource_name = 'groups'
class NewsResource(ModelResource):
group = fields.ForeignKey(GroupResource, 'group', null=True)
class Meta:
queryset = News.objects.published()
resource_name = 'news'
api = Api(api_name='api')
for resource in (ItemResource, CategoryResource, AttachmentResource,
GroupResource, NewsResource):
api.register(resource())
| agpl-3.0 | Python |
590e9a6c97710097b2ab93fa194d7fa3cce9cb6c | add debug toolbar middleware to codeship settings | MuckRock/muckrock,MuckRock/muckrock,MuckRock/muckrock,MuckRock/muckrock | muckrock/settings/codeship.py | muckrock/settings/codeship.py | """
Settings used during testing of the application on codeship
Import from test settings
"""
# pylint: disable=wildcard-import
# pylint: disable=unused-wildcard-import
from muckrock.settings.test import *
MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
DATABASES['default'] = {
'NAME': 'test',
'USER': os.environ.get('PG_USER'),
'PASSWORD': os.environ.get('PG_PASSWORD'),
'HOST': '127.0.0.1',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
}
| """
Settings used during testing of the application on codeship
Import from test settings
"""
# pylint: disable=wildcard-import
# pylint: disable=unused-wildcard-import
from muckrock.settings.test import *
DATABASES['default'] = {
'NAME': 'test',
'USER': os.environ.get('PG_USER'),
'PASSWORD': os.environ.get('PG_PASSWORD'),
'HOST': '127.0.0.1',
'ENGINE': 'django.db.backends.postgresql_psycopg2',
}
| agpl-3.0 | Python |
853c727c472efc09df90cb016bd05f81d4cf5e8e | Print a line telling where the site is available at | cknv/beetle-preview | beetle_preview/__init__.py | beetle_preview/__init__.py | from http import server
from socketserver import TCPServer
import os
class Server:
def __init__(self, own_config, config, builder):
self.directory = config.folders['output']
self.port = own_config.get('port', 5000)
self.builder = builder
def serve(self):
os.chdir(self.directory)
request_handler = server.SimpleHTTPRequestHandler
httpd = TCPServer(('', self.port), request_handler)
try:
print('Preview available at http://0.0.0.0:{}/'.format(self.port))
httpd.serve_forever()
except KeyboardInterrupt:
httpd.shutdown()
def register(plugin_config, config, commander, builder, content_renderer):
server = Server(plugin_config, config, builder)
commander.add('preview', server.serve, 'Serve the rendered site')
| from http import server
from socketserver import TCPServer
import os
class Server:
def __init__(self, own_config, config, builder):
self.directory = config.folders['output']
self.port = own_config.get('port', 5000)
self.builder = builder
def serve(self):
os.chdir(self.directory)
request_handler = server.SimpleHTTPRequestHandler
httpd = TCPServer(('', self.port), request_handler)
try:
httpd.serve_forever()
except KeyboardInterrupt:
httpd.shutdown()
def register(plugin_config, config, commander, builder, content_renderer):
server = Server(plugin_config, config, builder)
commander.add('preview', server.serve, 'Serve the rendered site')
| mit | Python |
2bf01da9e1f19dbbead83eaad69f264198900ac2 | allow I;16B as uint16 datatype | michigraber/neuralyzer,michigraber/neuralyzer | neuralyzer/io/plugins/tiff.py | neuralyzer/io/plugins/tiff.py |
from __future__ import print_function
from neuralyzer.io.loader import LoaderTemplate
FILE_EXTENSIONS = ('tiff', 'tif', )
DEFAULT_LIBRARY = 'PIL'
class Loader(LoaderTemplate):
@staticmethod
def get_data(filepath, library=DEFAULT_LIBRARY):
if library == 'PIL':
try:
return get_data_PIL(filepath)
except:
raise
elif library == 'tifffile':
try:
return get_data_tifffile(filepath)
except:
raise
else:
raise ValueError('No load method implemented for library %s' % library)
# reading files using PIL
# -----------------------------------------------------------------------------
PILMode2NPdtype = {
'I;16': 'uint16',
'I;16B': 'uint16',
}
def get_data_PIL(filepath):
import numpy as np
from PIL import Image, ImageSequence
img = Image.open(filepath)
try:
dtype = PILMode2NPdtype[img.mode]
except KeyError:
raise NotImplementedError(('The handling of tif files with PIL'
' mode "%s" is currently not supported.') % img.mode)
framedata = [np.array(frame)
for frame in ImageSequence.Iterator(img)]
image = np.array(framedata)
img.close()
return image
# reading files using tifffile
# -----------------------------------------------------------------------------
def get_data_tifffile(filepath):
''' reads a tifffile using the tifffile library '''
from tifffile import TiffFile
mytiff = TiffFile(filepath)
return mytiff.asarray()
|
from __future__ import print_function
from neuralyzer.io.loader import LoaderTemplate
FILE_EXTENSIONS = ('tiff', 'tif', )
DEFAULT_LIBRARY = 'PIL'
class Loader(LoaderTemplate):
@staticmethod
def get_data(filepath, library=DEFAULT_LIBRARY):
if library == 'PIL':
try:
return get_data_PIL(filepath)
except:
raise
elif library == 'tifffile':
try:
return get_data_tifffile(filepath)
except:
raise
else:
raise ValueError('No load method implemented for library %s' % library)
# reading files using PIL
# -----------------------------------------------------------------------------
PILMode2NPdtype = {
'I;16': 'uint16',
}
def get_data_PIL(filepath):
import numpy as np
from PIL import Image, ImageSequence
img = Image.open(filepath)
try:
dtype = PILMode2NPdtype[img.mode]
except KeyError:
raise NotImplementedError(('The handling of tif files with PIL'
' mode "%s" is currently not supported.') % img.mode)
framedata = [np.array(frame)
for frame in ImageSequence.Iterator(img)]
image = np.array(framedata)
img.close()
return image
# reading files using tifffile
# -----------------------------------------------------------------------------
def get_data_tifffile(filepath):
''' reads a tifffile using the tifffile library '''
from tifffile import TiffFile
mytiff = TiffFile(filepath)
return mytiff.asarray()
| mit | Python |
9b2b91d50edfd07f36bae9d154128602bd855f8e | Update fusebmc.py | dbeyer/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec | benchexec/tools/fusebmc.py | benchexec/tools/fusebmc.py | # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import benchexec.tools.esbmc as esbmc
class Tool(esbmc.Tool):
"""
This class serves as tool adaptor for FuSeBMC (https://github.com/kaled-alshmrany/FuSeBMC)
"""
REQUIRED_PATHS = ["esbmc", "fusebmc.py", "FuSeBMC_inustrment/FuSeBMC_inustrment", "fusebmc_output"]
def name(self):
return "FuSeBMC"
def executable(self, tool_locator):
try:
return tool_locator.find_executable("fusebmc.py")
except ToolNotFoundException:
return super().executable(tool_locator)
| # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import benchexec.tools.esbmc as esbmc
class Tool(esbmc.Tool):
"""
This class serves as tool adaptor for FuSeBMC (https://github.com/kaled-alshmrany/FuSeBMC)
"""
REQUIRED_PATHS = ["esbmc", "fusebmc.py", "FuSeBMC_inustrment/FuSeBMC_inustrment", "fusebmc_output"]
def name(self):
return "FuSeBMC"
def executable(self, tool_locator):
return tool_locator.find_executable("fusebmc.py")
| apache-2.0 | Python |
bd7ccfd86af79c8ad2b1358b3a3e682e7ac88507 | update cache visualisation | tbicr/map-trends,tbicr/map-trends | vis_cache.py | vis_cache.py | import colorsys
import json
import string
import mercantile
import shapely.geometry
saturations = [0.45, 0.5, 0.55, 0.6, 0.65, 0.7, 0.75]
brightness = [0.5, 0.65, 0.8, 0.95]
base = len(saturations)
offset = 1
sv = {i: (saturations[(i + offset) % base], brightness[(i + offset) // base])
for i, _ in enumerate(string.ascii_uppercase)}
color_base = 255
hue_stretch = 0.5
color_map = {
'??': '#00F',
'AQ': '#000',
}
def get_color(*countries):
rr, rg, rb = 0, 0, 0
for country in countries:
if '+' in country:
r, g, b = get_color(*country.split('+'))
elif country in color_map:
r, g, b = [int(c, 16) * 16 + int(c, 16) for c in color_map[country][1:]]
else:
h = string.ascii_uppercase.index(country[0]) / len(string.ascii_uppercase) * hue_stretch
s, v = sv[string.ascii_uppercase.index(country[1])]
r, g, b = colorsys.hsv_to_rgb(h, s, v)
r, g, b = r * color_base, g * color_base, b * color_base
rr += r
rg += g
rb += b
return rr // len(countries), rg // len(countries), rb // len(countries)
def tile_to_rect(zoom, x, y, v):
zoom = int(zoom)
x = int(x)
y = int(y)
box = mercantile.bounds(x, y, zoom)
return {
'type': 'Feature',
'properties': {
't': '%s/%s/%s' % (zoom, x, y),
'c': v,
'k': '#%02X%02X%02X' % get_color(v),
},
'geometry': shapely.geometry.mapping(shapely.geometry.box(*box)),
}
def generate_geojson(zoom):
cache_zoom, cache = json.load(open('cache_tile.json'))
cache_trim = {
'type': 'FeatureCollection',
'features': [tile_to_rect(*k.split('/'), v=v)
for k, v in cache.items()
if int(k.split('/')[0]) <= zoom and len(v.split('|')) == 1],
}
json.dump(cache_trim, open('cache_tile_%s.geojson' % zoom, 'w'),
ensure_ascii=False, sort_keys=True)
if __name__ == '__main__':
generate_geojson(9)
| import json
import itertools
import collections
import mercantile
import shapely.geometry
color = itertools.cycle([
'#%X%X%X' % (r, g, b)
for r in range(14, 4, -1)
for g in range(14, 4, -1)
for b in range(14, 4, -1)
if 20 < r + g + b < 36
])
def get_color():
for i in range(3):
r = next(color)
return r
color_map = collections.defaultdict(get_color)
color_map.update({
'??': '#00F',
'AQ': '#000',
})
duplicates = {
# 'EH|MA',
# 'FR|FX',
# 'FR|GF',
# 'NO|SJ',
# 'RU|UA',
}
def tile_to_rect(zoom, x, y, v):
zoom = int(zoom)
x = int(x)
y = int(y)
box = mercantile.bounds(x, y, zoom)
return {
'type': 'Feature',
'properties': {
't': '%s/%s/%s' % (zoom, x, y),
'c': v,
'k': color_map[v]
},
'geometry': shapely.geometry.mapping(shapely.geometry.box(*box)),
}
def generate_geojson(zoom):
cache = json.load(open('cache_tile.json'))
cache_trim = [tile_to_rect(*k.split('/'), v=v)
for k, v in cache[1].items()
if int(k.split('/')[0]) <= zoom
and (len(v.split('|')) == 1 or (v in duplicates and int(k.split('/')[0]) == zoom))]
cache_trim = {
'type': 'FeatureCollection',
'features': cache_trim,
}
json.dump(cache_trim, open('cache_tile_%s.geojson' % zoom, 'w'), ensure_ascii=False, sort_keys=True)
if __name__ == '__main__':
generate_geojson(9)
| mit | Python |
1d3ab147de66ab7b684f6f2f7b4c0f6db9ff3f6b | Fix Atom syndication to aggregate syndicated sources individually. | chromakode/wake | wake/base.py | wake/base.py | from datetime import datetime
from urlparse import urljoin
from flask import Blueprint, render_template, abort, request, url_for
from werkzeug.contrib.atom import AtomFeed
from been import Been
blueprint = Blueprint('wake', __name__)
been = blueprint.been = Been()
store = blueprint.store = been.store
@blueprint.app_template_filter('relativetime')
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
@blueprint.route('/')
def index():
return render_template('stream.html', events=store.collapsed_events())
@blueprint.route('/post/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
@blueprint.route('/recent.atom')
def recent_feed():
entries = []
for source_id, source in store.get_sources().iteritems():
if not source.get('syndicate'):
continue
for event in store.events(source=source_id):
entries.append({
'title': event['title'],
'content': unicode(event['content']),
'content_type': 'html',
'author': event.get('author', ''),
'url': urljoin(request.url_root,
url_for('by_slug', slug=event.get('slug', ''))),
'updated': datetime.fromtimestamp(event['timestamp']),
'published': datetime.fromtimestamp(event['timestamp']),
})
entries.sort(key=lambda e: e['published'], reverse=True)
feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root,
generator=('Wake', None, None))
for entry in entries:
feed.add(**entry)
return feed.get_response()
| from datetime import datetime
from urlparse import urljoin
from flask import Blueprint, render_template, abort, request, url_for
from werkzeug.contrib.atom import AtomFeed
from been import Been
blueprint = Blueprint('wake', __name__)
been = blueprint.been = Been()
store = blueprint.store = been.store
@blueprint.app_template_filter('relativetime')
def relative_time(timestamp):
delta = (datetime.now() - datetime.fromtimestamp(timestamp))
delta_s = delta.days * 86400 + delta.seconds
if delta_s < 60:
return "less than a minute ago"
elif delta_s < 120:
return "about a minute ago"
elif delta_s < (60 * 60):
return str(delta_s / 60) + " minutes ago"
elif delta_s < (120 * 60):
return "about an hour ago"
elif delta_s < (24 * 60 * 60):
return "about " + str(delta_s / 3600) + " hours ago"
elif delta_s < (48 * 60 * 60):
return "1 day ago"
else:
return str(delta_s / 86400) + " days ago"
@blueprint.route('/')
def index():
return render_template('stream.html', events=store.collapsed_events())
@blueprint.route('/post/<slug>')
def by_slug(slug):
events = list(store.events_by_slug(slug))
if not events:
abort(404)
return render_template('stream.html', events=events)
@blueprint.route('/recent.atom')
def recent_feed():
feed = AtomFeed('Recent Posts', feed_url=request.url, url=request.url_root,
generator=('Wake', None, None))
sources = store.get_sources()
for event in store.events():
if sources[event['source']].get('syndicate'):
feed.add(event['title'],
unicode(event['content']),
content_type='html',
author=event.get('author', ''),
url=urljoin(request.url_root, url_for('by_slug', slug=event.get('slug', ''))),
updated=datetime.fromtimestamp(event['timestamp']),
published=datetime.fromtimestamp(event['timestamp']))
return feed.get_response()
| bsd-3-clause | Python |
4b17332254541a348fc69857ea4ca78539f4b422 | call write_ditz after writing templates | GISAElkartea/django-project | django_project/pastertemplates.py | django_project/pastertemplates.py | from os import getlogin, path
from socket import gethostname
from datetime import datetime
from paste.script.templates import Template, var
class DjangoProjectTemplate(Template):
summary = 'Django project template'
use_cheetah = True
_template_dir = 'project_template'
vars = (
var('username', 'Enter your name',
default=getlogin()),
var('email_address', 'Enter your email address',
default='{}@{}'.format(getlogin(), gethostname())),
)
def run(self, command, output_dir, vars):
now = datetime.now()
vars['creation_date'] = now.strftime('%Y-%m-%d %H:%M:%S.%f Z')
super(DjangoProjectTemplate, self).run(command, output_dir, vars)
self.write_ditz_config(output_dir, vars)
def write_ditz_config(self, output_dir, vars):
config_path = path.join(output_dir, '.ditz-config')
config = ('--- !ditz.rubyforge.org,2008-03-06/config\n'
'name: {username}\n'
'email: {email_address}\n'
'issue_dir: bugs')
config = config.format(
username=vars['username'],
email_address=vars['email_address'],
)
with open(config_path, 'w') as config_file:
config_file.write(config)
| from os import getlogin, path
from socket import gethostname
from datetime import datetime
from paste.script.templates import Template, var
class DjangoProjectTemplate(Template):
summary = 'Django project template'
use_cheetah = True
_template_dir = 'project_template'
vars = (
var('username', 'Enter your name',
default=getlogin()),
var('email_address', 'Enter your email address',
default='{}@{}'.format(getlogin(), gethostname())),
)
def run(self, command, output_dir, vars):
now = datetime.now()
vars['creation_date'] = now.strftime('%Y-%m-%d %H:%M:%S.%f Z')
self.write_ditz_config(vars)
return super(DjangoProjectTemplate, self).run(command, output_dir, vars, asdasd, sad)
def write_ditz_config(self, vars):
config_path = path.join(__file__, self._template_dir, '.ditz-config')
config = ('--- !ditz.rubyforge.org,2008-03-06/config\n'
'name: {username}\n'
'email: {email_address}\n'
'issue_dir: bugs')
config = config.format(
username=vars['username'],
email_address=vars['email_address'],
)
with open(config_path, 'w') as config_file:
config_file.write(config)
| agpl-3.0 | Python |
38359dc8c96eb0a305e57a3f3028ab9f4d73f1e2 | Fix Attr error message | laurent-george/weboob,willprice/weboob,frankrousseau/weboob,RouxRC/weboob,sputnick-dev/weboob,nojhan/weboob-devel,Konubinix/weboob,laurent-george/weboob,laurent-george/weboob,sputnick-dev/weboob,frankrousseau/weboob,willprice/weboob,RouxRC/weboob,Konubinix/weboob,Konubinix/weboob,nojhan/weboob-devel,sputnick-dev/weboob,nojhan/weboob-devel,willprice/weboob,frankrousseau/weboob,RouxRC/weboob | weboob/browser/filters/html.py | weboob/browser/filters/html.py | # -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import lxml.html as html
from .standard import _Selector, _NO_DEFAULT, Filter, FilterError
from weboob.tools.html import html2text
__all__ = ['CSS', 'XPath', 'XPathNotFound', 'AttributeNotFound',
'Attr', 'Link', 'CleanHTML']
class XPathNotFound(FilterError):
pass
class AttributeNotFound(FilterError):
pass
class CSS(_Selector):
@classmethod
def select(cls, selector, item, obj=None, key=None):
return item.cssselect(selector)
class XPath(_Selector):
pass
class Attr(Filter):
def __init__(self, selector, attr, default=_NO_DEFAULT):
super(Attr, self).__init__(selector, default=default)
self.attr = attr
def filter(self, el):
try:
return u'%s' % el[0].attrib[self.attr]
except IndexError:
return self.default_or_raise(XPathNotFound('Unable to find link %s' % self.selector))
except KeyError:
return self.default_or_raise(AttributeNotFound('Element %s does not have attribute %s' % (el[0], self.attr)))
class Link(Attr):
"""
Get the link uri of an element.
If the <a> tag is not found, an exception IndexError is raised.
"""
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Link, self).__init__(selector, 'href', default=default)
class CleanHTML(Filter):
def filter(self, txt):
if isinstance(txt, (tuple, list)):
return u' '.join([self.clean(item) for item in txt])
return self.clean(txt)
@classmethod
def clean(cls, txt):
if not isinstance(txt, basestring):
txt = html.tostring(txt, encoding=unicode)
return html2text(txt)
| # -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import lxml.html as html
from .standard import _Selector, _NO_DEFAULT, Filter, FilterError
from weboob.tools.html import html2text
__all__ = ['CSS', 'XPath', 'XPathNotFound', 'AttributeNotFound',
'Attr', 'Link', 'CleanHTML']
class XPathNotFound(FilterError):
pass
class AttributeNotFound(FilterError):
pass
class CSS(_Selector):
@classmethod
def select(cls, selector, item, obj=None, key=None):
return item.cssselect(selector)
class XPath(_Selector):
pass
class Attr(Filter):
def __init__(self, selector, attr, default=_NO_DEFAULT):
super(Attr, self).__init__(selector, default=default)
self.attr = attr
def filter(self, el):
try:
return u'%s' % el[0].attrib[self.attr]
except IndexError:
return self.default_or_raise(XPathNotFound('Unable to find link %s' % self.selector))
except KeyError:
return self.default_or_raise(AttributeNotFound('Link %s does not has attribute %s' % (el[0], self.attr)))
class Link(Attr):
"""
Get the link uri of an element.
If the <a> tag is not found, an exception IndexError is raised.
"""
def __init__(self, selector=None, default=_NO_DEFAULT):
super(Link, self).__init__(selector, 'href', default=default)
class CleanHTML(Filter):
def filter(self, txt):
if isinstance(txt, (tuple, list)):
return u' '.join([self.clean(item) for item in txt])
return self.clean(txt)
@classmethod
def clean(cls, txt):
if not isinstance(txt, basestring):
txt = html.tostring(txt, encoding=unicode)
return html2text(txt)
| agpl-3.0 | Python |
389edb64e3bc2827ad6ed313de9656205843ddd1 | Mark all the tests on ASan bots non-critical due to a regression in the ASan runtime. | eunchong/build,eunchong/build,eunchong/build,eunchong/build | masters/master.chromium.memory/master_gatekeeper_cfg.py | masters/master.chromium.memory/master_gatekeeper_cfg.py | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
# TODO(glider): browser_tests and content_browsertests timeouts have become
# annoying since the number of bots increased. Disable them until the failure
# rate drops.
# TODO(glider): disabled all the tests because of the ASan regression. Need to
# re-enable them after next Clang roll. See http://crbug.com/177235.
categories_steps = {
'': ['update'],
'testers': [
#'base_unittests',
#'browser_tests',
#'cacheinvalidation_unittests',
#'content_browsertests',
#'content_unittests',
#'courgette_unittests',
#'crypto_unittests',
#'device_unittests',
#'googleurl_unittests',
#'ipc_tests',
#'installer_util_unittests',
#'jingle_unittests',
#'media_unittests',
#'mini_installer_test',
#'nacl_integration',
#'net_unittests',
#'ppapi_unittests',
#'printing_unittests',
#'remoting_unittests',
#'sql_unittests',
#'test_shell_tests',
#'unit_tests',
],
'compile': ['compile'],
# Usually we don't close based on archive failures, but ASAN bots
# detect memory failures in the logs during the Archive stage.
'crosasantest': ['Archive'],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
public_html='../master.chromium/public_html',
sheriffs=['sheriff'],
tree_status_url=active_master.tree_status_url,
use_getname=True))
| # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master import gatekeeper
from master import master_utils
# This is the list of the builder categories and the corresponding critical
# steps. If one critical step fails, gatekeeper will close the tree
# automatically.
# Note: don't include 'update scripts' since we can't do much about it when
# it's failing and the tree is still technically fine.
# TODO(glider): browser_tests and content_browsertests timeouts have become
# annoying since the number of bots increased. Disable them until the failure
# rate drops.
categories_steps = {
'': ['update'],
'testers': [
'base_unittests',
#'browser_tests',
'cacheinvalidation_unittests',
#'content_browsertests',
'content_unittests',
'courgette_unittests',
'crypto_unittests',
'device_unittests',
'googleurl_unittests',
'ipc_tests',
'installer_util_unittests',
'jingle_unittests',
'media_unittests',
'mini_installer_test',
'nacl_integration',
'net_unittests',
'ppapi_unittests',
'printing_unittests',
'remoting_unittests',
'sql_unittests',
'test_shell_tests',
'unit_tests',
],
'compile': ['compile'],
# Usually we don't close based on archive failures, but ASAN bots
# detect memory failures in the logs during the Archive stage.
'crosasantest': ['Archive'],
}
exclusions = {
}
forgiving_steps = ['update_scripts', 'update']
def Update(config, active_master, c):
c['status'].append(gatekeeper.GateKeeper(
fromaddr=active_master.from_address,
categories_steps=categories_steps,
exclusions=exclusions,
relayhost=config.Master.smtp,
subject='buildbot %(result)s in %(projectName)s on %(builder)s, '
'revision %(revision)s',
extraRecipients=active_master.tree_closing_notification_recipients,
lookup=master_utils.FilterDomain(),
forgiving_steps=forgiving_steps,
public_html='../master.chromium/public_html',
sheriffs=['sheriff'],
tree_status_url=active_master.tree_status_url,
use_getname=True))
| bsd-3-clause | Python |
00726313128334e825ab7ff8bcdd1244ceca111b | Add import | Pylons/kai,Pylons/kai | kai/controllers/articles.py | kai/controllers/articles.py | import logging
import re
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect_to
from pylons.decorators import rest
from tw.mods.pylonshf import validate
from kai.lib.base import BaseController, render
from kai.lib.decorators import in_group
from kai.lib.helpers import failure_flash, success_flash
from kai.model.forms import new_article_form
from kai.model import Article
log = logging.getLogger(__name__)
class ArticlesController(BaseController):
def __before__(self):
c.active_tab = 'Community'
c.active_sub = 'Blog'
def index(self):
start = request.GET.get('start', '1')
startkey = request.GET.get('startkey')
prevkey = request.GET.get('prevkey')
if startkey:
c.articles = Article.by_time(self.db, descending=True, startkey=startkey, count=11)
elif prevkey:
c.articles = Article.by_time(self.db, startkey=prevkey, count=11)
c.reverse = True
else:
c.articles = Article.by_time(self.db, descending=True, count=11)
c.start = start
return render('/articles/index.mako')
def archives(self, year, month, slug):
articles = list(Article.by_slug(c.db, include_docs=True)[(int(year), int(month), slug)]) or abort(404)
c.article = articles[0]
return render('/articles/show.mako')
@in_group('admin')
def new(self):
return render('/articles/new.mako')
@in_group('admin')
@validate(form=new_article_form, error_handler='new')
def create(self):
result = self.form_result
article = Article(title=result['title'], summary=result['summary'],
body=result['body'], published=result['publish_date'],
human_id=c.user.id, author=c.user.displayname)
## generate the slug
slug = result['title'].replace(" ", "_")
slug = slug.lower()
slug = re.sub('[^A-Za-z0-9_]+', '', slug)
article.slug = slug
article.store(self.db)
success_flash('Article saved and published')
redirect_to('articles')
| import logging
from pylons import request, response, session, tmpl_context as c, url
from pylons.controllers.util import abort, redirect_to
from pylons.decorators import rest
from tw.mods.pylonshf import validate
from kai.lib.base import BaseController, render
from kai.lib.decorators import in_group
from kai.lib.helpers import failure_flash, success_flash
from kai.model.forms import new_article_form
from kai.model import Article
log = logging.getLogger(__name__)
class ArticlesController(BaseController):
def __before__(self):
c.active_tab = 'Community'
c.active_sub = 'Blog'
def index(self):
start = request.GET.get('start', '1')
startkey = request.GET.get('startkey')
prevkey = request.GET.get('prevkey')
if startkey:
c.articles = Article.by_time(self.db, descending=True, startkey=startkey, count=11)
elif prevkey:
c.articles = Article.by_time(self.db, startkey=prevkey, count=11)
c.reverse = True
else:
c.articles = Article.by_time(self.db, descending=True, count=11)
c.start = start
return render('/articles/index.mako')
def archives(self, year, month, slug):
articles = list(Article.by_slug(c.db, include_docs=True)[(int(year), int(month), slug)]) or abort(404)
c.article = articles[0]
return render('/articles/show.mako')
@in_group('admin')
def new(self):
return render('/articles/new.mako')
@in_group('admin')
@validate(form=new_article_form, error_handler='new')
def create(self):
result = self.form_result
article = Article(title=result['title'], summary=result['summary'],
body=result['body'], published=result['publish_date'],
human_id=c.user.id, author=c.user.displayname)
## generate the slug
slug = result['title'].replace(" ", "_")
slug = slug.lower()
slug = re.sub('[^A-Za-z0-9_]+', '', slug)
article.slug = slug
article.store(self.db)
success_flash('Article saved and published')
redirect_to('articles')
| bsd-3-clause | Python |
1ceafadc042b9efc77d3d9d5fca9aff367369151 | Add multi_set action | Brickstertwo/git-commands | bin/utils/parse_actions.py | bin/utils/parse_actions.py | import argparse
def flag_as_value(value):
class FlagAsInt(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, value)
return FlagAsInt
def multi_set(dest1, value1):
class MultiSet(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, dest1, value1)
setattr(namespace, self.dest, values)
return MultiSet
| import argparse
def flag_as_value(value):
class FlagAsInt(argparse.Action):
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, value)
return FlagAsInt
| mit | Python |
6fc26b08f6a49fc3d745b790f17f05392d2324d9 | fix deploy.py bug | coderzh/gohugo.org,coderzh/gohugo.org,coderzh/gohugo.org,coderzh/gohugo.org | deploy.py | deploy.py | #!/usr/bin/env python
# coding:utf-8
import os
import sys
import glob
import shutil
import subprocess
__author__ = 'coderzh'
class ChDir:
"""Context manager for changing the current working directory"""
def __init__(self, new_path):
self.newPath = os.path.expanduser(new_path)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, exception_type, exception_value, traceback):
os.chdir(self.savedPath)
def deploy():
current_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.abspath(os.path.join(current_dir, '..'))
with ChDir(current_dir):
# step1 clean
os.system('git fetch origin')
os.system('git checkout master')
os.system('git reset --hard origin/master')
os.system('git clean -fdx')
# step2 build
os.system('hugo -v --cacheDir="./cache"')
deploy_dir = os.path.join(parent_dir, 'deploy')
# step3 create if not exists
if not os.path.exists(deploy_dir):
os.makedirs(deploy_dir)
with ChDir(deploy_dir):
# step4 remove all files
for f in os.listdir('.'):
if f != 'index.html':
if os.path.isfile(f):
os.remove(f)
elif os.path.isdir(f):
shutil.rmtree(f)
# step5 copy new files
from_dir = os.path.join(current_dir, 'public')
for f in os.listdir(from_dir):
file_path = os.path.join(from_dir, f)
if os.path.isfile(file_path):
shutil.copy(file_path, '.')
elif os.path.isdir(file_path):
shutil.copytree(file_path, f)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == '--auto':
deploy()
else:
print 'please use --auto flag.'
| #!/usr/bin/env python
# coding:utf-8
import os
import sys
import glob
import shutil
import subprocess
__author__ = 'coderzh'
class ChDir:
"""Context manager for changing the current working directory"""
def __init__(self, new_path):
self.newPath = os.path.expanduser(new_path)
def __enter__(self):
self.savedPath = os.getcwd()
os.chdir(self.newPath)
def __exit__(self, exception_type, exception_value, traceback):
os.chdir(self.savedPath)
def deploy():
current_dir = os.path.dirname(os.path.abspath(__file__))
parent_dir = os.path.abspath(os.path.join(current_dir, '..'))
with ChDir(current_dir):
# step1 clean
os.system('git fetch origin')
os.system('git checkout master')
os.system('git reset --hard origin/master')
os.system('git clean -fdx')
# step2 build
os.system('hugo -v -cacheDir="./cache"')
deploy_dir = os.path.join(parent_dir, 'deploy')
# step3 create if not exists
if not os.path.exists(deploy_dir):
os.makedirs(deploy_dir)
with ChDir(deploy_dir):
# step4 remove all files
for f in os.listdir('.'):
if f != 'index.html':
if os.path.isfile(f):
os.remove(f)
elif os.path.isdir(f):
shutil.rmtree(f)
# step5 copy new files
from_dir = os.path.join(current_dir, 'public')
for f in os.listdir(from_dir):
file_path = os.path.join(from_dir, f)
if os.path.isfile(file_path):
shutil.copy(file_path, '.')
elif os.path.isdir(file_path):
shutil.copytree(file_path, f)
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == '--auto':
deploy()
else:
print 'please use --auto flag.'
| mit | Python |
bb519d85faa4d2177f411eaebb96a827fb942cc5 | Add files via upload | enadol/fbjsonrobot | daterobot.py | daterobot.py | # !python3
import re
# import time
from fbjson import lstdate
# from jsonbuilderbis import match
lstdatenew = []
lstnuevafecha = []
for item in lstdate:
#regex para extraer p.e.
corte = re.compile('.*\s(.*)]')
#print(lstdate)
nada, fecha, nada2 = corte.split(item)
#print(fecha)
#para fecha sin . p. ej [Fr ]
#partida = fecha[0].split(' ')
#print(partida)
#dia = partida[1]
#para fecha con . p. ej [Fr. ]
partida = fecha.split('.')
dia = partida[0].zfill(2)
#print(dia)
mes = partida[1].zfill(2)
#print(mes)
if int(mes) >= 8:
partida[2] = "2019"
#print mes
else:
partida[2] = "2020"
#print mes, fecha[2]
separator="-"
ffecha = separator.join([partida[2], mes, dia])
#print(ffecha)
#ddia = nuevafecha1[0]
#mmes = nuevafecha1[1]
#yyear = nuevafecha1[2]
#ffecha = yyear + "-" + mmes + "-" + ddia
#nuevafecha2=ddia+"-"+mmes+"-"+yyear
#nuevafecha3=datetime.datetime.strptime(nuevafecha2, "%d-%m-%Y").strftime("%Y-%m-%d")
lstnuevafecha.append(ffecha)
#print(lstnuevafecha)
| # !python3
# import datetime
# import time
from fbjson import lstdate
# from jsonbuilderbis import match
lstdatenew = []
lstnuevafecha = []
for item in lstdate:
#print(lstdate)
fecha = item.split('.')
print(fecha)
#para fecha sin . p. ej [Fr ]
#partida = fecha[0].split(' ')
#print(partida)
#dia = partida[1]
#para fecha con . p. ej [Fr. ]
partida = fecha[1].split(' ')
dia = partida[1].zfill(2)
print(dia)
mes = fecha[2].zfill(2)
#print int(fecha[1])
if int(mes) >= 8:
fecha[2] = "2019"
#print mes
else:
fecha[2] = "2020"
#print mes, fecha[2]
nuevafecha1 = dia, mes, fecha[2]
#print nuevafecha1
ddia = nuevafecha1[0]
mmes = nuevafecha1[1]
yyear = nuevafecha1[2]
ffecha = yyear + "-" + mmes + "-" + ddia
#nuevafecha2=ddia+"-"+mmes+"-"+yyear
#nuevafecha3=datetime.datetime.strptime(nuevafecha2, "%d-%m-%Y").strftime("%Y-%m-%d")
lstnuevafecha.append(ffecha)
#print(lstnuevafecha)
| mit | Python |
300dddc30eb9f44cc864a4214f24d3e478da1a52 | Add achievement deleting. | fi-ksi/web-backend,fi-ksi/web-backend | endpoint/achievement.py | endpoint/achievement.py | from db import session
import model
import util
import falcon
class Achievement(object):
def on_get(self, req, resp, id):
achievement = session.query(model.Achievement).get(id)
if achievement is None:
resp.status = falcon.HTTP_404
return
req.context['result'] = { 'achievement': util.achievement.to_json(achievement) }
def on_delete(self, req, resp, id):
user = req.context['user']
achievement = session.query(model.Achievement).get(id)
if (not user.is_logged_in()) or (not user.is_admin()):
resp.status = falcon.HTTP_400
return
if not achievement:
resp.status = falcon.HTTP_404
return
# Ziskame vsechna prideleni daneho achievementu
user_achs = session.query(model.UserAchievement).\
filter(model.UserAchievement.achievement_id == id).all()
try:
for user_ach in user_achs:
session.delete(user_ach)
session.delete(achievement)
session.commit()
except:
session.rollback()
raise
finally:
session.close()
req.context['result'] = {}
class Achievements(object):
def on_get(self, req, resp):
achievements = session.query(model.Achievement).\
filter(model.Achievement.year == req.context['year']).all()
req.context['result'] = { 'achievements': [ util.achievement.to_json(achievement) for achievement in achievements ] }
| from db import session
import model
import util
import falcon
class Achievement(object):
def on_get(self, req, resp, id):
achievement = session.query(model.Achievement).get(id)
if achievement is None:
resp.status = falcon.HTTP_404
return
req.context['result'] = { 'achievement': util.achievement.to_json(achievement) }
class Achievements(object):
def on_get(self, req, resp):
achievements = session.query(model.Achievement).\
filter(model.Achievement.year == req.context['year']).all()
req.context['result'] = { 'achievements': [ util.achievement.to_json(achievement) for achievement in achievements ] }
| mit | Python |
81835f5c79fc0f2b014f1082c3e49ff141638309 | change middleware name | altaurog/django-shoogie | shoogie/middleware.py | shoogie/middleware.py | import sys
import traceback
from django import http
from django.conf import settings
from django.views import debug
from shoogie import models
class ExceptionLoggingMiddleware(object):
def process_exception(self, request, exception):
if settings.DEBUG:
return
exc_type, exc_val, tb = sys.exc_info()
if issubclass(exc_type, http.Http404):
return
reporter = debug.ExceptionReporter(request, exc_type, exc_val, tb)
user = request.user
if user.is_anonymous():
user = None
tb_desc = traceback.extract_tb(tb, 1)
tb_file, tb_line_num, tb_function, tb_text = tb_desc[0]
models.ServerError.objects.create(
hostname = request.get_host(),
request_method = request.method,
request_path = request.path,
query_string = request.META.get('QUERY_STRING',''),
post_data = request.raw_post_data,
cookie_data = repr(request.COOKIES),
session_id = request.session.session_key,
session_data = repr(dict(request.session.iteritems())),
user = user,
exception_type = exc_type.__name__,
exception_str = str(exc_val),
source_file = tb_file,
source_line_num = tb_line_num,
source_function = tb_function,
source_text = tb_text,
issue = '',
technical_response = reporter.get_traceback_html(),
)
| import sys
import traceback
from django import http
from django.conf import settings
from django.views import debug
from shoogie import models
class DBExceptionMiddleware(object):
def process_exception(self, request, exception):
if settings.DEBUG:
return
exc_type, exc_val, tb = sys.exc_info()
if issubclass(exc_type, http.Http404):
return
reporter = debug.ExceptionReporter(request, exc_type, exc_val, tb)
user = request.user
if user.is_anonymous():
user = None
tb_desc = traceback.extract_tb(tb, 1)
tb_file, tb_line_num, tb_function, tb_text = tb_desc[0]
models.ServerError.objects.create(
hostname = request.get_host(),
request_method = request.method,
request_path = request.path,
query_string = request.META.get('QUERY_STRING',''),
post_data = request.raw_post_data,
cookie_data = repr(request.COOKIES),
session_id = request.session.session_key,
session_data = repr(dict(request.session.iteritems())),
user = user,
exception_type = exc_type.__name__,
exception_str = str(exc_val),
source_file = tb_file,
source_line_num = tb_line_num,
source_function = tb_function,
source_text = tb_text,
issue = '',
technical_response = reporter.get_traceback_html(),
)
| mit | Python |
c977e0a6696d9fbe119fa22a33cca4ecf85aff8f | Update LoggerRateLimiter_001_space.py | Chasego/cod,cc13ny/algo,cc13ny/algo,Chasego/cod,Chasego/cod,Chasego/codirit,cc13ny/algo,Chasego/cod,cc13ny/algo,cc13ny/Allin,Chasego/cod,cc13ny/Allin,Chasego/codirit,cc13ny/Allin,cc13ny/algo,Chasego/codi,Chasego/codi,Chasego/codi,Chasego/codi,cc13ny/Allin,Chasego/codirit,Chasego/codirit,Chasego/codi,cc13ny/Allin,Chasego/codirit | leetcode/359-Logger-Rate-Limiter/LoggerRateLimiter_001_space.py | leetcode/359-Logger-Rate-Limiter/LoggerRateLimiter_001_space.py | class Logger(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.log_hash = {}
def shouldPrintMessage(self, timestamp, message):
"""
Returns true if the message should be printed in the given timestamp, otherwise returns false.
If this method returns false, the message will not be printed.
The timestamp is in seconds granularity.
:type timestamp: int
:type message: str
:rtype: bool
"""
hs = self.log_hash
if message in hs and timestamp - hs[message] < 10:
return False
else:
hs[message] = timestamp
return True
# Your Logger object will be instantiated and called as such:
# obj = Logger()
# param_1 = obj.shouldPrintMessage(timestamp,message)
| class Logger(object):
def __init__(self):
"""
Initialize your data structure here.
"""
self.log_set = {}
def shouldPrintMessage(self, timestamp, message):
"""
Returns true if the message should be printed in the given timestamp, otherwise returns false.
If this method returns false, the message will not be printed.
The timestamp is in seconds granularity.
:type timestamp: int
:type message: str
:rtype: bool
"""
st = self.log_set
if message in st and timestamp - st[message] < 10:
return False
else:
st[message] = timestamp
return True
# Your Logger object will be instantiated and called as such:
# obj = Logger()
# param_1 = obj.shouldPrintMessage(timestamp,message)
| mit | Python |
642518d196f0bd9695426fd52bfb58adfd80d23d | Implement tests for change_password | Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us,Elections-R-Us/Elections-R-Us | elections_r_us/tests/test_security.py | elections_r_us/tests/test_security.py | from __future__ import unicode_literals
from ..models import User
def test_user_gets_created(new_session):
"""Test that after create_user adds to the database."""
from ..security import create_user
create_user(new_session, 'username', 'password')
assert len(new_session.query(User).all()) == 1
def test_user_can_login(session_with_user):
"""Test that check_login returns True with the correct login."""
from ..security import check_login
assert check_login(*session_with_user)
def test_nonexistent_user_login_fails(new_session):
"""Test check_login returns False when the username isn't present."""
from ..security import check_login
assert not check_login(new_session, 'hello', 'world')
def test_bad_password_login_fails(session_with_user):
"""Test check_login returns False when the password doesn't match."""
from ..security import check_login
session, username, password = session_with_user
assert not check_login(session, username, password + 'not!')
def test_change_password(session_with_user):
from ..security import check_login, change_password
session, username, password = session_with_user
new_password = password + '!'
change_password(session, username, new_password)
assert check_login(session, username, new_password)
def test_change_password_new_overwrites(session_with_user):
from ..security import check_login, change_password
session, username, password = session_with_user
new_password = password + '!'
change_password(session, username, new_password)
assert not check_login(session, username, password)
| from __future__ import unicode_literals
from ..models import User
def test_user_gets_created(new_session):
"""Test that after create_user adds to the database."""
from ..security import create_user
create_user(new_session, 'username', 'password')
assert len(new_session.query(User).all()) == 1
def test_user_can_login(session_with_user):
"""Test that check_login returns True with the correct login."""
from ..security import check_login
assert check_login(*session_with_user)
def test_nonexistent_user_login_fails(new_session):
"""Test check_login returns False when the username isn't present."""
from ..security import check_login
assert not check_login(new_session, 'hello', 'world')
def test_bad_password_login_fails(session_with_user):
"""Test check_login returns False when the password doesn't match."""
from ..security import check_login
session, username, password = session_with_user
assert not check_login(session, username, password + 'not!')
| mit | Python |
5e837ddeca7f749206b76eb568e663e368973f02 | Add summary in l10n_ch_bank | BT-csanchez/l10n-switzerland,CompassionCH/l10n-switzerland,michl/l10n-switzerland,ndtran/l10n-switzerland,BT-ojossen/l10n-switzerland,CompassionCH/l10n-switzerland,eLBati/l10n-switzerland,cyp-opennet/ons_cyp_github,BT-fgarbely/l10n-switzerland,BT-ojossen/l10n-switzerland,cyp-opennet/ons_cyp_github,cgaspoz/l10n-switzerland,guewen/l10n-switzerland,open-net-sarl/l10n-switzerland,BT-aestebanez/l10n-switzerland,open-net-sarl/l10n-switzerland,BT-fgarbely/l10n-switzerland | l10n_ch_bank/__openerp__.py | l10n_ch_bank/__openerp__.py | # -*- coding: utf-8 -*-
##############################################################################
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
#
# Created by Nicolas Bessi
#
# Copyright (c) 2010 CamptoCamp. All rights reserved.
{"name": "Switzerland - Bank list",
"summary": "Banks names, addresses and BIC codes",
"description": """
Swiss bank list
===============
This module will load all Swiss banks in OpenERP with their name, address and BIC code to
ease the input of bank account.
It is not mandatory to use OpenERP in Switzerland, but can improve the user experience.
""",
"version": "7.0",
"author": "Camptocamp",
"category": "Localisation",
"website": "http://www.camptocamp.com",
"depends": ["l10n_ch"],
"data": ["bank.xml"],
"update_xml": [],
"active": False,
"installable": True,
}
| # -*- coding: utf-8 -*-
##############################################################################
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
#
##############################################################################
#
# Created by Nicolas Bessi
#
# Copyright (c) 2010 CamptoCamp. All rights reserved.
{"name": "Switzerland - Bank list",
"description": """
Swiss bank list
===============
This module will load all Swiss banks in OpenERP with their name, address and BIC code to
ease the input of bank account.
It is not mandatory to use OpenERP in Switzerland, but can improve the user experience.
""",
"version": "7.0",
"author": "Camptocamp",
"category": "Localisation",
"website": "http://www.camptocamp.com",
"depends": ["l10n_ch"],
"data": ["bank.xml"],
"update_xml": [],
"active": False,
"installable": True,
}
| agpl-3.0 | Python |
7f735d75ea071c2544949ec4cadfa23fb4140e01 | Document the sandbox urls.py | thechampanurag/django-oscar,michaelkuty/django-oscar,Idematica/django-oscar,thechampanurag/django-oscar,monikasulik/django-oscar,QLGu/django-oscar,nickpack/django-oscar,makielab/django-oscar,jmt4/django-oscar,sasha0/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,bnprk/django-oscar,QLGu/django-oscar,jinnykoo/christmas,manevant/django-oscar,DrOctogon/unwash_ecom,MatthewWilkes/django-oscar,jlmadurga/django-oscar,amirrpp/django-oscar,manevant/django-oscar,spartonia/django-oscar,saadatqadri/django-oscar,kapari/django-oscar,solarissmoke/django-oscar,makielab/django-oscar,django-oscar/django-oscar,WillisXChen/django-oscar,nfletton/django-oscar,taedori81/django-oscar,rocopartners/django-oscar,rocopartners/django-oscar,jmt4/django-oscar,jinnykoo/wuyisj.com,mexeniz/django-oscar,Jannes123/django-oscar,jinnykoo/wuyisj,nickpack/django-oscar,marcoantoniooliveira/labweb,binarydud/django-oscar,anentropic/django-oscar,sasha0/django-oscar,john-parton/django-oscar,WadeYuChen/django-oscar,monikasulik/django-oscar,jlmadurga/django-oscar,Bogh/django-oscar,saadatqadri/django-oscar,sasha0/django-oscar,eddiep1101/django-oscar,jinnykoo/christmas,lijoantony/django-oscar,saadatqadri/django-oscar,faratro/django-oscar,WillisXChen/django-oscar,saadatqadri/django-oscar,ademuk/django-oscar,WadeYuChen/django-oscar,itbabu/django-oscar,jinnykoo/wuyisj,WillisXChen/django-oscar,pdonadeo/django-oscar,faratro/django-oscar,manevant/django-oscar,okfish/django-oscar,faratro/django-oscar,itbabu/django-oscar,nickpack/django-oscar,thechampanurag/django-oscar,jlmadurga/django-oscar,spartonia/django-oscar,Idematica/django-oscar,michaelkuty/django-oscar,sasha0/django-oscar,WillisXChen/django-oscar,dongguangming/django-oscar,itbabu/django-oscar,nickpack/django-oscar,john-parton/django-oscar,vovanbo/django-oscar,mexeniz/django-oscar,adamend/django-oscar,lijoantony/django-oscar,bschuon/django-oscar,marcoantoniooliveira/labweb,mexeniz/django-oscar,itbabu/django-oscar,john-parton/django-oscar,faratro/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,MatthewWilkes/django-oscar,Bogh/django-oscar,kapt/django-oscar,machtfit/django-oscar,adamend/django-oscar,solarissmoke/django-oscar,vovanbo/django-oscar,amirrpp/django-oscar,machtfit/django-oscar,spartonia/django-oscar,jinnykoo/christmas,kapt/django-oscar,makielab/django-oscar,nfletton/django-oscar,eddiep1101/django-oscar,okfish/django-oscar,django-oscar/django-oscar,dongguangming/django-oscar,monikasulik/django-oscar,spartonia/django-oscar,binarydud/django-oscar,manevant/django-oscar,django-oscar/django-oscar,binarydud/django-oscar,pasqualguerrero/django-oscar,QLGu/django-oscar,okfish/django-oscar,bnprk/django-oscar,nfletton/django-oscar,bschuon/django-oscar,Jannes123/django-oscar,adamend/django-oscar,WillisXChen/django-oscar,dongguangming/django-oscar,anentropic/django-oscar,anentropic/django-oscar,anentropic/django-oscar,taedori81/django-oscar,kapari/django-oscar,ka7eh/django-oscar,bschuon/django-oscar,john-parton/django-oscar,WadeYuChen/django-oscar,jmt4/django-oscar,eddiep1101/django-oscar,jinnykoo/wuyisj.com,marcoantoniooliveira/labweb,ka7eh/django-oscar,pdonadeo/django-oscar,kapt/django-oscar,lijoantony/django-oscar,vovanbo/django-oscar,binarydud/django-oscar,sonofatailor/django-oscar,Idematica/django-oscar,bnprk/django-oscar,josesanch/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,QLGu/django-oscar,solarissmoke/django-oscar,pdonadeo/django-oscar,marcoantoniooliveira/labweb,solarissmoke/django-oscar,lijoantony/django-oscar,nfletton/django-oscar,mexeniz/django-oscar,sonofatailor/django-oscar,DrOctogon/unwash_ecom,amirrpp/django-oscar,jlmadurga/django-oscar,kapari/django-oscar,dongguangming/django-oscar,pdonadeo/django-oscar,bschuon/django-oscar,josesanch/django-oscar,thechampanurag/django-oscar,WadeYuChen/django-oscar,Jannes123/django-oscar,pasqualguerrero/django-oscar,amirrpp/django-oscar,michaelkuty/django-oscar,machtfit/django-oscar,eddiep1101/django-oscar,ahmetdaglarbas/e-commerce,ka7eh/django-oscar,michaelkuty/django-oscar,DrOctogon/unwash_ecom,kapari/django-oscar,monikasulik/django-oscar,adamend/django-oscar,ademuk/django-oscar,pasqualguerrero/django-oscar,vovanbo/django-oscar,elliotthill/django-oscar,jinnykoo/wuyisj.com,ademuk/django-oscar,pasqualguerrero/django-oscar,Bogh/django-oscar,makielab/django-oscar,rocopartners/django-oscar,jmt4/django-oscar,josesanch/django-oscar,bnprk/django-oscar,jinnykoo/wuyisj.com,jinnykoo/wuyisj,Jannes123/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,Bogh/django-oscar,okfish/django-oscar,rocopartners/django-oscar,elliotthill/django-oscar,ka7eh/django-oscar,django-oscar/django-oscar,ademuk/django-oscar,elliotthill/django-oscar,jinnykoo/wuyisj,taedori81/django-oscar | sites/sandbox/urls.py | sites/sandbox/urls.py | from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from oscar.app import shop
# These simply need to be imported into this namespace. Ignore the PEP8
# warning that they aren't used.
from oscar.views import handler500, handler404, handler403
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
# Custom functionality to allow dashboard users to be created
(r'^gateway/', include('apps.gateway.urls')),
(r'', include(shop.urls)),
)
# Allow rosetta to be used to add translations
if 'rosetta' in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^rosetta/', include('rosetta.urls')),
)
if settings.DEBUG:
# Server statics and uploaded media
urlpatterns += staticfiles_urlpatterns()
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
# Allow error pages to be tested
urlpatterns += patterns('',
url(r'^403$', handler403),
url(r'^404$', handler404),
url(r'^500$', handler500)
)
| from django.conf.urls import patterns, include, url
from django.conf import settings
from django.contrib import admin
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.conf.urls.static import static
from django.views.generic import TemplateView
from oscar.app import shop
from oscar.views import handler500, handler404, handler403
admin.autodiscover()
urlpatterns = patterns('',
(r'^admin/', include(admin.site.urls)),
(r'^gateway/', include('apps.gateway.urls')),
(r'', include(shop.urls)),
)
# Allow rosetta to be used to add translations
if 'rosetta' in settings.INSTALLED_APPS:
urlpatterns += patterns('',
(r'^rosetta/', include('rosetta.urls')),
)
if settings.DEBUG:
# Server statics
urlpatterns += staticfiles_urlpatterns()
# Serve uploaded media
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
# Test error pages
urlpatterns += patterns('',
url(r'^403$', TemplateView.as_view(template_name='403.html')),
url(r'^404$', TemplateView.as_view(template_name='404.html')),
url(r'^500$', TemplateView.as_view(template_name='500.html')),
)
| bsd-3-clause | Python |
7cda27f088487fb8d3e7f09596bee72a7f88aebd | update decorator | congminghaoxue/learn_python | decorator.py | decorator.py | #!/usr/bin/env python
# encoding: utf-8
# 函数装饰器
def memp(func):
cache = {}
def wrap(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return wrap
# 第一题
@memp
def fibonacci(n):
if n <= 1:
return 1
return fibonacci(n - 1) + fibonacci(n - 2)
print(fibonacci(35))
# 第二题
@memp
def climb(n, steps):
count = 0
if n == 0:
count = 1
elif n > 0:
for step in steps:
count += climb(n - step, steps)
return count
#print(climb(10, (1, 2, 3)))
| #!/usr/bin/env python
# encoding: utf-8
# 函数装饰器
def memp(func):
cache = {}
def wrap(*args):
if args not in cache:
cache[args] = func(*args)
return cache[args]
return wrap
# 第一题
# @memp
def fibonacci(n):
if n <= 1:
return 1
return fibonacci(n - 1) + fibonacci(n - 2)
print(fibonacci(50))
# 第二题
# @memp
def climb(n, steps):
count = 0
if n == 0:
count = 1
elif n > 0:
for step in steps:
count += climb(n - step, steps)
return count
print(climb(10, (1, 2, 3)))
| apache-2.0 | Python |
d96e51d60d78b25f6a23938246c7b166ed1ef238 | fix peering | guthemberg/yanoama,guthemberg/yanoama | yanoama/system/peering.py | yanoama/system/peering.py | #!/usr/bin/env python
try:
import json
except ImportError:
import simplejson as json
#looking for yanoama module
def get_install_path():
try:
config_file = file('/etc/yanoama.conf').read()
config = json.loads(config_file)
except Exception, e:
print "There was an error in your configuration file (/etc/yanoama.conf)"
raise e
_ple_deployment = config.get('ple_deployment', {"path":"/home/upmc_aren/yanoama"})
return (_ple_deployment['path'])
import sys
try:
from yanoama.core.essential import Essential, \
get_hostname, log
from yanoama.backend.mongo import Mongo
except ImportError:
sys.path.append(get_install_path())
#import yanoama modules alternatively
from yanoama.core.essential import Essential, \
get_hostname, log
from yanoama.backend.mongo import Mongo
if __name__ == '__main__':
kernel=Essential()
db = Mongo()
local_peers=db.get_peers()
for coordinator in kernel.get_coordinators(get_hostname()):
peers=db.get_peers(coordinator)
to_be_removed=[]
for hostname in peers.keys():
if local_peers.has_key(hostname):
if local_peers[hostname]>peers[hostname]:
to_be_removed.append(hostname)
for hostname in to_be_removed:
del local_peers[hostname]
db.save_peers(local_peers)
#log/print out to the standard output
log('current number of members:'+str(len(local_peers.keys()))+', done.')
| #!/usr/bin/env python
try:
import json
except ImportError:
import simplejson as json
#looking for yanoama module
def get_install_path():
try:
config_file = file('/etc/yanoama.conf').read()
config = json.loads(config_file)
except Exception, e:
print "There was an error in your configuration file (/etc/yanoama.conf)"
raise e
_ple_deployment = config.get('ple_deployment', {"path":"/home/upmc_aren/yanoama"})
return (_ple_deployment['path'])
import sys
try:
from yanoama.core.essential import Essential, \
get_hostname, log
from yanoama.backend.mongo import Mongo
except ImportError:
sys.path.append(get_install_path())
#import yanoama modules alternatively
from yanoama.core.essential import Essential, \
get_hostname, log
from yanoama.backend.mongo import Mongo
if __name__ == '__main__':
kernel=Essential()
db = Mongo()
local_peers=db.get_peers()
for coordinator in kernel.get_coordinators(get_hostname()):
peers=db.get_peers(coordinator)
to_be_removed=[]
for hostname in peers.keys():
if local_peers.has_key(hostname):
if local_peers[hostname]>peers[hostname]:
to_be_removed.append(hostname)
for hostname in to_be_removed:
del local_peers[hostname]
db.save_peers(local_peers)
#log/print out to the standard output
log('current number of members:'+len(local_peers.keys())+', done.')
| bsd-3-clause | Python |
b2703e712c1a392940b5ab63dd88cf73c5b6db0b | set LOCAL_T_MAX to 5 | miyosuda/async_deep_reinforce | constants.py | constants.py | # -*- coding: utf-8 -*-
LOCAL_T_MAX = 5 # repeat step size
RMSP_EPSILON = 1e-10 # epsilon parameter for RMSProp
CHECKPOINT_DIR = 'checkpoints'
LOG_FILE = 'tmp/a3c_log'
INITIAL_ALPHA_LOW = 1e-4 # log_uniform low limit for learning rate
INITIAL_ALPHA_HIGH = 1e-2 # log_uniform high limit for learning rate
PARALLEL_SIZE = 8 # parallel thread size
ROM = "pong.bin" # action size = 3
#ROM = "breakout.bin" # action size = 4
ACTION_SIZE = 3 # action size
INITIAL_ALPHA_LOG_RATE = 0.4226 # log_uniform interpolate rate for learning rate (around 7 * 10^-4)
GAMMA = 0.99 # discount factor for rewards
ENTROPY_BETA = 0.1 # entropy regurarlization constant
MAX_TIME_STEP = 6 * 10**7
GRAD_NORM_CLIP = 40.0 # gradient norm clipping
| # -*- coding: utf-8 -*-
LOCAL_T_MAX = 20 # repeat step size
RMSP_EPSILON = 1e-10 # epsilon parameter for RMSProp
CHECKPOINT_DIR = 'checkpoints'
LOG_FILE = 'tmp/a3c_log'
INITIAL_ALPHA_LOW = 1e-4 # log_uniform low limit for learning rate
INITIAL_ALPHA_HIGH = 1e-2 # log_uniform high limit for learning rate
PARALLEL_SIZE = 8 # parallel thread size
ROM = "pong.bin" # action size = 3
#ROM = "breakout.bin" # action size = 4
ACTION_SIZE = 3 # action size
INITIAL_ALPHA_LOG_RATE = 0.4226 # log_uniform interpolate rate for learning rate (around 7 * 10^-4)
GAMMA = 0.99 # discount factor for rewards
ENTROPY_BETA = 0.1 # entropy regurarlization constant
MAX_TIME_STEP = 6 * 10**7
GRAD_NORM_CLIP = 40.0 # gradient norm clipping
| apache-2.0 | Python |
c9ae61bd0aba076c3b6ae937672339bdd90877da | Bump version to 1.1.0.dev1 | gradel/django-sortedm2m,gregmuellegger/django-sortedm2m,gradel/django-sortedm2m,gregmuellegger/django-sortedm2m,gradel/django-sortedm2m,gregmuellegger/django-sortedm2m | sortedm2m/__init__.py | sortedm2m/__init__.py | # -*- coding: utf-8 -*-
__version__ = '1.1.0.dev1'
| # -*- coding: utf-8 -*-
__version__ = '1.1.0'
| bsd-3-clause | Python |
3970106ae0244c869cab790029664522a1e8910c | Fix migration 166 | wakermahmud/sync-engine,wakermahmud/sync-engine,wakermahmud/sync-engine,nylas/sync-engine,closeio/nylas,jobscore/sync-engine,PriviPK/privipk-sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,ErinCall/sync-engine,wakermahmud/sync-engine,Eagles2F/sync-engine,ErinCall/sync-engine,closeio/nylas,gale320/sync-engine,jobscore/sync-engine,gale320/sync-engine,wakermahmud/sync-engine,closeio/nylas,PriviPK/privipk-sync-engine,PriviPK/privipk-sync-engine,nylas/sync-engine,closeio/nylas,gale320/sync-engine,Eagles2F/sync-engine,PriviPK/privipk-sync-engine,ErinCall/sync-engine,gale320/sync-engine,ErinCall/sync-engine,Eagles2F/sync-engine,nylas/sync-engine,Eagles2F/sync-engine,jobscore/sync-engine,jobscore/sync-engine,gale320/sync-engine,ErinCall/sync-engine | migrations/versions/166_migrate_body_format.py | migrations/versions/166_migrate_body_format.py | """migrate body format
Revision ID: 3d4f5741e1d7
Revises: 29698176aa8d
Create Date: 2015-05-10 03:16:04.846781
"""
# revision identifiers, used by Alembic.
revision = '3d4f5741e1d7'
down_revision = '29698176aa8d'
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import load_only
CHUNK_SIZE = 1000
def upgrade():
from inbox.ignition import main_engine
from inbox.models.session import session_scope
from inbox.security.blobstorage import encode_blob
engine = main_engine(pool_size=1, max_overflow=0)
Base = declarative_base()
Base.metadata.reflect(engine)
class Message(Base):
__table__ = Base.metadata.tables['message']
with session_scope(versioned=False) as db_session:
max_id, = db_session.query(sa.func.max(Message.id)).one()
if max_id is None:
max_id = 0
for i in range(0, max_id, CHUNK_SIZE):
messages = db_session.query(Message). \
filter(Message.id > i, Message.id <= i + CHUNK_SIZE). \
options(load_only('_compacted_body', 'sanitized_body'))
for message in messages:
if message._compacted_body is None:
message._compacted_body = encode_blob(
message.sanitized_body.encode('utf-8'))
db_session.commit()
def downgrade():
pass
| """migrate body format
Revision ID: 3d4f5741e1d7
Revises: 29698176aa8d
Create Date: 2015-05-10 03:16:04.846781
"""
# revision identifiers, used by Alembic.
revision = '3d4f5741e1d7'
down_revision = '29698176aa8d'
import sqlalchemy as sa
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import load_only
CHUNK_SIZE = 1000
def upgrade():
from inbox.ignition import main_engine
from inbox.models.session import session_scope
from inbox.security.blobstorage import encode_blob
engine = main_engine(pool_size=1, max_overflow=0)
Base = declarative_base()
Base.metadata.reflect(engine)
class Message(Base):
__table__ = Base.metadata.tables['message']
with session_scope(versioned=False) as db_session:
max_id, = db_session.query(sa.func.max(Message.id)).one()
for i in range(0, max_id, CHUNK_SIZE):
messages = db_session.query(Message). \
filter(Message.id > i, Message.id <= i + CHUNK_SIZE). \
options(load_only('_compacted_body', 'sanitized_body'))
for message in messages:
if message._compacted_body is None:
message._compacted_body = encode_blob(
message.sanitized_body.encode('utf-8'))
db_session.commit()
def downgrade():
pass
| agpl-3.0 | Python |
933c138cb1a7b7e5afc7405a45a8d6f8865d4670 | fix bug: argument encoding | hack4code/BlogSpider,hack4code/BlogSpider,wartalker/BlogSpider,wartalker/BlogSpider,hack4code/BlogSpider,alone-walker/BlogSpider,alone-walker/BlogSpider,wartalker/BlogSpider,wartalker/BlogSpider,alone-walker/BlogSpider,alone-walker/BlogSpider,hack4code/BlogSpider | spider/mydm/ai/tag.py | spider/mydm/ai/tag.py | # -*- coding: utf-8 -*-
import re
def verify_tags(tags):
return True if all(len(tag) < 32 for tag in tags) else False
class ReExtractor:
PATTERN = re.compile(r'tags?\s*:.*')
def extract(self, s):
tags = [tag.strip() for tag in s[s.find(':')+1:-1].split(',')]
return tags
def __call__(self, doc, encoding='UTF-8'):
from lxml.html import fromstring, HTMLParser
doc = fromstring(bytes(bytearray(doc,
encoding=encoding)),
parser=HTMLParser(encoding=encoding))
txt = doc.text_content()
matches = re.findall(self.PATTERN,
txt,
re.IGNORECASE)
if len(matches) == 1:
stag = matches[0]
tags = self.extract(stag)
if verify_tags(tags):
return tags
elif len(matches) == 2:
for stag in matches:
tags = self.extract(stag)
if verify_tags(tags):
return tags
elif len(matches) > 2:
stag = matches[0]
tags = self.extract(stag)
if verify_tags(tags):
return tags
stag = matches[-1]
tags = self.extract(stag)
if verify_tags(tags):
return tags
return None
class TagExtractor:
EXTRACTORS = (ReExtractor,)
def __call__(self, doc, encoding='UTF-8'):
for cls in self.EXTRACTORS:
match = cls()
tags = match(doc,
encoding=encoding)
if tags is not None:
return tags
return None
| # -*- coding: utf-8 -*-
import re
def verify_tags(tags):
return True if all(len(tag) < 32 for tag in tags) else False
class ReExtractor:
PATTERN = re.compile(r'tags?\s*:.*')
def extract(self, s):
tags = [tag.strip() for tag in s[s.find(':')+1:-1].split(',')]
return tags
def __call__(self, doc, encoding='UTF-8'):
from lxml.html import fromstring, HTMLParser
doc = fromstring(bytes(bytearray(doc,
encoding=encoding)),
parser=HTMLParser(encoding=encoding))
txt = doc.text_content()
matches = re.findall(self.PATTERN,
txt,
re.IGNORECASE)
if len(matches) == 1:
stag = matches[0]
tags = self.extract(stag)
if verify_tags(tags):
return tags
elif len(matches) == 2:
for stag in matches:
tags = self.extract(stag)
if verify_tags(tags):
return tags
elif len(matches) > 2:
stag = matches[0]
tags = self.extract(stag)
if verify_tags(tags):
return tags
stag = matches[-1]
tags = self.extract(stag)
if verify_tags(tags):
return tags
return None
class TagExtractor:
EXTRACTORS = (ReExtractor,)
def __call__(self, doc):
for cls in self.EXTRACTORS:
match = cls()
tags = match(doc)
if tags is not None:
return tags
return None
| mit | Python |
b02094fe025dca9ff64969a3141e87cba3bb82e1 | remove ts2dt | osoken/sqlite-tensor | sqlite_tensor/util.py | sqlite_tensor/util.py | # -*- coding: utf-8 -*-
import time
from datetime import datetime
import shortuuid
shortuuid.set_alphabet(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
)
def gen_id():
"""Return generated short UUID.
"""
return shortuuid.uuid()
def dt2ts(dt):
return int(time.mktime(dt.timetuple()) * 1000) + (dt.microsecond // 1000)
def now():
return dt2ts(datetime.now())
| # -*- coding: utf-8 -*-
import time
from datetime import datetime
import shortuuid
shortuuid.set_alphabet(
'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'
)
def gen_id():
"""Return generated short UUID.
"""
return shortuuid.uuid()
def dt2ts(dt):
return int(time.mktime(dt.timetuple()) * 1000) + (dt.microsecond // 1000)
def ts2dt(ts):
return datetime.fromtimestamp(
int(ts) // 1000
).replace(microsecond=(int(ts) % 1000 * 1000))
def now():
return dt2ts(datetime.now())
| mit | Python |
33020a8beafd64eb666e479897f9bab8606a463e | Change to relative import | drnlm/sqlobject,sqlobject/sqlobject,drnlm/sqlobject,sqlobject/sqlobject | sqlobject/__init__.py | sqlobject/__init__.py | """SQLObject"""
# Do import for namespace
# noqa is a directive for flake8 to ignore seemingly unused imports
from .__version__ import version, version_info # noqa
from .col import * # noqa
from .index import * # noqa
from .joins import * # noqa
from .main import * # noqa
from .sqlbuilder import AND, OR, NOT, IN, LIKE, RLIKE, DESC, CONTAINSSTRING, const, func # noqa
from .styles import * # noqa
from .dbconnection import connectionForURI # noqa
from . import dberrors # noqa
| """SQLObject"""
# Do import for namespace
# noqa is a directive for flake8 to ignore seemingly unused imports
from __version__ import version, version_info # noqa
from col import * # noqa
from index import * # noqa
from joins import * # noqa
from main import * # noqa
from sqlbuilder import AND, OR, NOT, IN, LIKE, RLIKE, DESC, CONTAINSSTRING, const, func # noqa
from styles import * # noqa
from dbconnection import connectionForURI # noqa
import dberrors # noqa
| lgpl-2.1 | Python |
c4c200db7feedd6aa8babd6c5ec5b54ff368febe | Update convert simple | acuestap/smarttools_test,acuestap/smarttools_test,acuestap/smarttools_test | web/views.py | web/views.py | import datetime
from celery import chain
from .tasks import *
from django.http import HttpResponse
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import logout
from .serializers import VideoSerializer
from django.shortcuts import render
from rest_framework.authtoken import serializers
from web.business_logic import login_request_from_model, tareas, get_videos_from_model, validateConvert
# Create your views here.
from web.models import Competition, Video
from rest_framework.generics import ListAPIView, RetrieveAPIView
from rest_framework import status
from rest_framework.response import Response
def index(request):
return render(request, 'inicio.html')
'''
REST Service performing login
'''
@csrf_exempt
def login_request(request):
if request.method == 'POST':
response = login_request_from_model(request)
print("Exito")
print(response)
else:
response = {
'username': '',
'status': 'NO POST',
'message': 'Error de metodo.',
}
return JsonResponse(response)
'''
Check if user is logged
'''
@csrf_exempt
def is_logged_user(request):
if request.user.is_authenticated():
logged = True
else:
logged = False
print("ENTRO.")
return JsonResponse({'logged': logged})
'''
Logout user
'''
@csrf_exempt
def logout_user(request):
logout(request)
print("Cerrando sesión....")
return JsonResponse({'logout': True})
'''
Add video to competition
'''
@csrf_exempt
def add_video(request):
if request.method == 'POST':
print("Llego al servicio")
print(request)
new_video = Video(
name=request.POST.get('name'),
state='En proceso',
user_email=request.POST.get('user_email'),
message=request.POST.get('message'),
original_video=request.FILES['original_video'],
uploadDate=datetime.datetime.now(),
competition=Competition.objects.filter(id=1).get()
)
new_video.save()
# data for video convert
#validateConvert(new_video.user_email,new_video.original_video)
return JsonResponse({'ok': 'video guardado'}, status=200)
class VideosListView(ListAPIView):
serializer_class = VideoSerializer
queryset = Video.objects.all()
| import datetime
from celery import chain
from .tasks import *
from django.http import HttpResponse
from django.http import JsonResponse
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import logout
from .serializers import VideoSerializer
from django.shortcuts import render
from rest_framework.authtoken import serializers
from web.business_logic import login_request_from_model, tareas, get_videos_from_model, validateConvert
# Create your views here.
from web.models import Competition, Video
from rest_framework.generics import ListAPIView, RetrieveAPIView
from rest_framework import status
from rest_framework.response import Response
def index(request):
return render(request, 'inicio.html')
'''
REST Service performing login
'''
@csrf_exempt
def login_request(request):
if request.method == 'POST':
response = login_request_from_model(request)
print("Exito")
print(response)
else:
response = {
'username': '',
'status': 'NO POST',
'message': 'Error de metodo.',
}
return JsonResponse(response)
'''
Check if user is logged
'''
@csrf_exempt
def is_logged_user(request):
if request.user.is_authenticated():
logged = True
else:
logged = False
print("ENTRO.")
return JsonResponse({'logged': logged})
'''
Logout user
'''
@csrf_exempt
def logout_user(request):
logout(request)
print("Cerrando sesión....")
return JsonResponse({'logout': True})
'''
Add video to competition
'''
@csrf_exempt
def add_video(request):
if request.method == 'POST':
print("Llego al servicio")
print(request)
new_video = Video(
name=request.POST.get('name'),
state='En proceso',
user_email=request.POST.get('user_email'),
message=request.POST.get('message'),
original_video=request.FILES['original_video'],
uploadDate=datetime.datetime.now(),
competition=Competition.objects.filter(id=1).get()
)
new_video.save()
# data for video convert
return JsonResponse({'ok': 'video guardado'}, status=200)
class VideosListView(ListAPIView):
serializer_class = VideoSerializer
queryset = Video.objects.all()
| mit | Python |
9ac55bb9615bdda38df5d4cdc4357158e3b63f08 | Improve memoize | anaruse/chainer,wkentaro/chainer,cemoody/chainer,niboshi/chainer,hvy/chainer,muupan/chainer,chainer/chainer,cupy/cupy,ktnyt/chainer,chainer/chainer,laysakura/chainer,kiyukuta/chainer,truongdq/chainer,hvy/chainer,sinhrks/chainer,tscohen/chainer,sinhrks/chainer,sou81821/chainer,minhpqn/chainer,wkentaro/chainer,keisuke-umezawa/chainer,t-abe/chainer,ronekko/chainer,okuta/chainer,rezoo/chainer,ktnyt/chainer,hvy/chainer,1986ks/chainer,tigerneil/chainer,okuta/chainer,benob/chainer,jnishi/chainer,cupy/cupy,AlpacaDB/chainer,ktnyt/chainer,tkerola/chainer,wkentaro/chainer,niboshi/chainer,AlpacaDB/chainer,jnishi/chainer,niboshi/chainer,keisuke-umezawa/chainer,hvy/chainer,jnishi/chainer,pfnet/chainer,muupan/chainer,jnishi/chainer,kikusu/chainer,keisuke-umezawa/chainer,kikusu/chainer,Kaisuke5/chainer,kashif/chainer,okuta/chainer,chainer/chainer,okuta/chainer,benob/chainer,aonotas/chainer,delta2323/chainer,chainer/chainer,ktnyt/chainer,ytoyama/yans_chainer_hackathon,keisuke-umezawa/chainer,niboshi/chainer,t-abe/chainer,wkentaro/chainer,cupy/cupy,cupy/cupy,truongdq/chainer,ysekky/chainer | cupy/util.py | cupy/util.py | import atexit
import functools
from cupy import cuda
_memoized_funcs = []
def memoize(for_each_device=False):
"""Makes a function memoizing the result for each argument and device.
This decorator provides automatic memoization of the function result.
Args:
for_each_device (bool): If True, it memoizes the results for each
device. Otherwise, it memoizes the results only based on the
arguments.
"""
def decorator(f):
global _memoized_funcs
f._cupy_memo = {}
_memoized_funcs.append(f)
@functools.wraps(f)
def ret(*args, **kwargs):
arg_key = (args, frozenset(kwargs.items()))
if for_each_device:
arg_key = (cuda.Device().id, arg_key)
memo = f._cupy_memo
result = memo.get(arg_key, None)
if result is None:
result = f(*args, **kwargs)
memo[arg_key] = result
return result
return ret
return decorator
@atexit.register
def clear_memo():
"""Clears the memoized results for all functions decorated by memoize."""
global _memoized_funcs
for f in _memoized_funcs:
del f._cupy_memo
_memoized_funcs = []
| import atexit
import functools
from cupy import cuda
_memoized_funcs = []
def memoize(for_each_device=False):
"""Makes a function memoizing the result for each argument and device.
This decorator provides automatic memoization of the function result.
Args:
for_each_device (bool): If True, it memoizes the results for each
device. Otherwise, it memoizes the results only based on the
arguments.
"""
def decorator(f):
@functools.wraps(f)
def ret(*args, **kwargs):
global _memoized_funcs
arg_key = (args, frozenset(kwargs.items()))
if for_each_device:
arg_key = (cuda.Device().id, arg_key)
memo = getattr(f, '_cupy_memo', None)
if memo is None:
memo = f._cupy_memo = {}
_memoized_funcs.append(f)
result = memo.get(arg_key, None)
if result is None:
result = f(*args, **kwargs)
memo[arg_key] = result
return result
return ret
return decorator
@atexit.register
def clear_memo():
"""Clears the memoized results for all functions decorated by memoize."""
global _memoized_funcs
for f in _memoized_funcs:
del f._cupy_memo
_memoized_funcs = []
| mit | Python |
a0862fbd206b8ffe46db98f7c9a04211552b5a9d | Add blank functions | import/component.py | component.py | component.py | # -*- coding: utf-8 -*-
"""
component
~~~~~~~~~
A module that implements various helper functions to assist in using
component(1) with a python workflow.
:copyright: (c) 2013 by Daniel Chatfield
:license: MIT, see LICENSE for details.
"""
def require(component_name):
pass
def export(object):
pass | # -*- coding: utf-8 -*-
"""
component
~~~~~~~~~
A module that implements various helper functions to assist in using
component(1) with a python workflow.
:copyright: (c) 2013 by Daniel Chatfield
:license: MIT, see LICENSE for details.
"""
| mit | Python |
c8753f7aafecac17d500ecf24488e885f7a97d31 | Update serializer -fix typo | icereval/osf.io,amyshi188/osf.io,mluo613/osf.io,cwisecarver/osf.io,Johnetordoff/osf.io,cslzchen/osf.io,abought/osf.io,hmoco/osf.io,amyshi188/osf.io,chrisseto/osf.io,kwierman/osf.io,mluke93/osf.io,sloria/osf.io,cwisecarver/osf.io,TomHeatwole/osf.io,aaxelb/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,pattisdr/osf.io,alexschiller/osf.io,kch8qx/osf.io,caneruguz/osf.io,acshi/osf.io,jnayak1/osf.io,icereval/osf.io,Nesiehr/osf.io,RomanZWang/osf.io,acshi/osf.io,jnayak1/osf.io,leb2dg/osf.io,amyshi188/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,acshi/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,DanielSBrown/osf.io,mluo613/osf.io,brianjgeiger/osf.io,erinspace/osf.io,sloria/osf.io,erinspace/osf.io,CenterForOpenScience/osf.io,kwierman/osf.io,mattclark/osf.io,zachjanicki/osf.io,RomanZWang/osf.io,kch8qx/osf.io,caseyrollins/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,mattclark/osf.io,alexschiller/osf.io,cslzchen/osf.io,samchrisinger/osf.io,Johnetordoff/osf.io,felliott/osf.io,RomanZWang/osf.io,SSJohns/osf.io,monikagrabowska/osf.io,mfraezz/osf.io,Nesiehr/osf.io,mluke93/osf.io,hmoco/osf.io,binoculars/osf.io,zamattiac/osf.io,monikagrabowska/osf.io,chennan47/osf.io,felliott/osf.io,brianjgeiger/osf.io,felliott/osf.io,jnayak1/osf.io,zachjanicki/osf.io,zachjanicki/osf.io,chennan47/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,aaxelb/osf.io,mluke93/osf.io,hmoco/osf.io,erinspace/osf.io,cwisecarver/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,caneruguz/osf.io,cwisecarver/osf.io,monikagrabowska/osf.io,saradbowman/osf.io,doublebits/osf.io,asanfilippo7/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,laurenrevere/osf.io,adlius/osf.io,kwierman/osf.io,brianjgeiger/osf.io,SSJohns/osf.io,caneruguz/osf.io,DanielSBrown/osf.io,zachjanicki/osf.io,mluo613/osf.io,zamattiac/osf.io,doublebits/osf.io,saradbowman/osf.io,jnayak1/osf.io,wearpants/osf.io,asanfilippo7/osf.io,baylee-d/osf.io,kch8qx/osf.io,samchrisinger/osf.io,aaxelb/osf.io,abought/osf.io,asanfilippo7/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,laurenrevere/osf.io,cslzchen/osf.io,samchrisinger/osf.io,rdhyee/osf.io,hmoco/osf.io,binoculars/osf.io,Nesiehr/osf.io,leb2dg/osf.io,wearpants/osf.io,adlius/osf.io,wearpants/osf.io,SSJohns/osf.io,TomBaxter/osf.io,pattisdr/osf.io,abought/osf.io,crcresearch/osf.io,acshi/osf.io,RomanZWang/osf.io,sloria/osf.io,TomHeatwole/osf.io,wearpants/osf.io,emetsger/osf.io,chrisseto/osf.io,chennan47/osf.io,cslzchen/osf.io,mfraezz/osf.io,crcresearch/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,SSJohns/osf.io,zamattiac/osf.io,DanielSBrown/osf.io,asanfilippo7/osf.io,kch8qx/osf.io,samchrisinger/osf.io,acshi/osf.io,mluo613/osf.io,zamattiac/osf.io,emetsger/osf.io,amyshi188/osf.io,abought/osf.io,TomBaxter/osf.io,icereval/osf.io,mfraezz/osf.io,alexschiller/osf.io,DanielSBrown/osf.io,mfraezz/osf.io,Nesiehr/osf.io,rdhyee/osf.io,TomHeatwole/osf.io,TomBaxter/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,adlius/osf.io,adlius/osf.io,crcresearch/osf.io,caneruguz/osf.io,pattisdr/osf.io,mluke93/osf.io,alexschiller/osf.io,mluo613/osf.io,chrisseto/osf.io,aaxelb/osf.io,laurenrevere/osf.io,binoculars/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,alexschiller/osf.io,doublebits/osf.io,kch8qx/osf.io,doublebits/osf.io,rdhyee/osf.io,kwierman/osf.io,doublebits/osf.io | website/addons/s3/serializer.py | website/addons/s3/serializer.py | from website.addons.base.serializer import OAuthAddonSerializer
from website.addons.s3 import utils
class S3Serializer(OAuthAddonSerializer):
addon_short_name = 's3'
REQUIRED_URLS = []
@property
def addon_serialized_urls(self):
node = self.node_settings.owner
user_settings = self.node_settings.user_settings or self.user_settings
result = {
'accounts': node.api_url_for('s3_account_list'),
'create_bucket': node.api_url_for('create_bucket'),
'import_auth': node.api_url_for('s3_import_auth'),
'create': node.api_url_for('s3_add_user_account'),
'deauthorize': node.api_url_for('s3_deauthorize_node'),
'bucket_list': node.api_url_for('s3_folder_list'),
'set_bucket': node.api_url_for('s3_get_config'),
'files': node.web_url_for('collect_file_trees'),
}
if user_settings:
result['owner'] = user_settings.owner._id
return result
def credentials_are_valid(self, user_settings):
if user_settings:
if len(user_settings.external_accounts) < 1:
return False
return any([utils.can_list(account.oauth_key, account.oauth_secret)
for account in user_settings.external_accounts])
return False
def serialize_settings(self, node_settings, current_user):
self.user_settings = node_settings.user_settings
self.node_settings = node_settings
ret = self.node_settings.to_json(current_user)
current_user_settings = current_user.get_addon('s3')
ret.update({
'bucket': self.node_settings.bucket or '',
'encrypt_uploads': self.node_settings.encrypt_uploads,
'has_bucket': self.node_settings.bucket is not None,
'user_is_owner': (
self.user_settings and self.user_settings.owner == current_user
),
'user_has_auth': bool(current_user_settings) and current_user_settings.has_auth,
'node_has_auth': self.node_settings.has_auth,
'owner': None,
'bucket_list': None,
'valid_credentials': bool(current_user_settings) and self.credentials_are_valid(current_user_settings),
})
if node_settings.has_auth:
ret['owner'] = self.user_settings.owner.fullname
ret['owner_url'] = self.user_settings.owner.url
ret['node_has_auth'] = True
ret['urls'] = self.serialized_urls
return ret
| from website.addons.base.serializer import OAuthAddonSerializer
from webs.addons.s3 import utils
class S3Serializer(OAuthAddonSerializer):
addon_short_name = 's3'
REQUIRED_URLS = []
@property
def addon_serialized_urls(self):
node = self.node_settings.owner
user_settings = self.node_settings.user_settings or self.user_settings
result = {
'create_bucket': node.api_url_for('create_bucket'),
'import_auth': node.api_url_for('s3_node_import_auth'),
'create_auth': node.api_url_for('s3_authorize_node'),
'deauthorize': node.api_url_for('s3_delete_node_settings'),
'bucket_list': node.api_url_for('s3_get_bucket_list'),
'set_bucket': node.api_url_for('s3_get_node_settings'),
'files': node.web_url_for('collect_file_trees'),
}
if user_settings:
result['owner'] = user_settings.owner._id
return result
def credentials_are_valid(self, user_settings, client):
if user_settings:
if len(user_settings.external_accounts) < 1:
return False
return any([utils.can_list(account.oauth_key, account.oauth_secret)
for account in user_settings.external_accounts])
return False
| apache-2.0 | Python |
c33c527d08b5a7fbe414c07819c69bb884a2e977 | use my version, cit compiles on every system | jgsogo/queryset-cpp,jgsogo/queryset-cpp,jgsogo/queryset-cpp | conanfile.py | conanfile.py |
import os
import fnmatch
from conans import ConanFile, CMake
class QuerysetCPP(ConanFile):
name = "queryset-cpp"
version = "0.4"
generators = "cmake"
settings = "os", "compiler", "build_type", "arch"
exports = "conanfile.py", "CMakeLists.txt", "queryset/*", "tests/*"
url = "https://github.com/jgsogo/queryset-cpp"
def requirements(self):
self.requires.add("Boost/1.60.0@lasote/stable")
self.requires.add("spdlog/0.9.0@memsharded/stable")
self.requires.add("sqlite3cc/0.1.1@jgsogo/stable")
def imports(self):
self.copy("*.dll", dst="bin", src="bin") # From bin to bin
self.copy("*.dylib*", dst="bin", src="lib") # From lib to bin
def build(self):
cmake = CMake(self.settings)
flag_build_tests = "-DBUILD_TEST:BOOL=ON" if self.scope.BUILD_TEST else ""
if flag_build_tests:
self.run('cmake "%s" %s %s' % (self.conanfile_directory, cmake.command_line, flag_build_tests))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest -C {}".format(self.settings.build_type))
def package(self):
self.copy("*.h", dst="include")
# self.copy("*.lib", dst="lib", src="lib")
# self.copy("*.a", dst="lib", src="lib")
def package_info(self):
#self.cpp_info.libs = ["queryset-cpp"] # Do not generates .lib
pass
|
import os
import fnmatch
from conans import ConanFile, CMake
class QuerysetCPP(ConanFile):
name = "queryset-cpp"
version = "0.4"
generators = "cmake"
settings = "os", "compiler", "build_type", "arch"
exports = "conanfile.py", "CMakeLists.txt", "queryset/*", "tests/*"
url = "https://github.com/jgsogo/queryset-cpp"
def requirements(self):
self.requires.add("Boost/1.60.0@lasote/stable")
self.requires.add("spdlog/0.9.0@memsharded/stable")
self.requires.add("SQLite3cc/0.1.1@monsdar/testing")
self.requires.add("sqlite3/3.15.2@jgsogo/stable") # This one is needed because sqlite3 dep in SQLite3cc/0.1.1@monsdar/testing does not copy libraries for debug version.
def imports(self):
self.copy("*.dll", dst="bin", src="bin") # From bin to bin
self.copy("*.dylib*", dst="bin", src="lib") # From lib to bin
def build(self):
cmake = CMake(self.settings)
flag_build_tests = "-DBUILD_TEST:BOOL=ON" if self.scope.BUILD_TEST else ""
if flag_build_tests:
self.run('cmake "%s" %s %s' % (self.conanfile_directory, cmake.command_line, flag_build_tests))
self.run("cmake --build . %s" % cmake.build_config)
self.run("ctest -C {}".format(self.settings.build_type))
def package(self):
self.copy("*.h", dst="include")
# self.copy("*.lib", dst="lib", src="lib")
# self.copy("*.a", dst="lib", src="lib")
def package_info(self):
#self.cpp_info.libs = ["queryset-cpp"] # Do not generates .lib
pass
| mit | Python |
446d4919c5664ff27fc3bae09439000a1a46866b | Bump version: 0.0.7 -> 0.0.8 | polysquare/cmake-module-common | conanfile.py | conanfile.py | from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.8"
class CMakeModuleCommonConan(ConanFile):
name = "cmake-module-common"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
url = "http://github.com/polysquare/cmake-module-common"
license = "MIT"
requires = ("cmake-unit/master@smspillaz/cmake-unit",
"cmake-linter-cmake/master@smspillaz/cmake-linter-cmake",
"style-linter-cmake/master@smspillaz/style-linter-cmake")
def source(self):
zip_name = "cmake-module-common.zip"
download("https://github.com/polysquare/"
"cmake-module-common/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="cmake-module-common-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/cmake-module-common",
src="cmake-module-common-" + VERSION,
keep_path=True)
| from conans import ConanFile
from conans.tools import download, unzip
import os
VERSION = "0.0.7"
class CMakeModuleCommonConan(ConanFile):
name = "cmake-module-common"
version = os.environ.get("CONAN_VERSION_OVERRIDE", VERSION)
generators = "cmake"
url = "http://github.com/polysquare/cmake-module-common"
license = "MIT"
requires = ("cmake-unit/master@smspillaz/cmake-unit",
"cmake-linter-cmake/master@smspillaz/cmake-linter-cmake",
"style-linter-cmake/master@smspillaz/style-linter-cmake")
def source(self):
zip_name = "cmake-module-common.zip"
download("https://github.com/polysquare/"
"cmake-module-common/archive/{version}.zip"
"".format(version="v" + VERSION),
zip_name)
unzip(zip_name)
os.unlink(zip_name)
def package(self):
self.copy(pattern="Find*.cmake",
dst="",
src="cmake-module-common-" + VERSION,
keep_path=True)
self.copy(pattern="*.cmake",
dst="cmake/cmake-module-common",
src="cmake-module-common-" + VERSION,
keep_path=True)
| mit | Python |
df3f94e96e5c28f8eff503b3ce264b2cf0cda209 | Make lint | ASCIT/donut,ASCIT/donut-python,ASCIT/donut-python,ASCIT/donut,ASCIT/donut | donut/modules/groups/constants.py | donut/modules/groups/constants.py | from enum import Enum
# Enum for group types in the groups table
class GroupTypes(Enum):
HOUSE = 'house'
COMMITTEE = 'committee' # i.e IHC, BOC, CRC
ASCIT = 'ascit' # For all groups that are involved with ascit
PUBLICATION = 'publication' # i.e The Tech
UGCURRENT = 'ugcurrent' # current student groups i.e ug2020
UGALUMN = 'ugalumn' # alumn groups, i.e ug2010
| from enum import Enum
# Enum for group types in the groups table
class GroupTypes(Enum):
HOUSE = 'house'
COMMITTEE = 'committee' # i.e IHC, BOC, CRC
ASCIT = 'ascit' # For all groups that are involved with ascit
PUBLICATION = 'publication' # i.e The Tech
UGCURRENT = 'ugcurrent' # current student groups i.e ug2020
UGALUMN = 'ugalumn' # alumn groups, i.e ug2010
| mit | Python |
22549161a50b26c21343b9bec8a9e075dd46b223 | fix unittest to filter a request with same url as start_requests. rel r868. ref #49 | liyy7/scrapy,jeffreyjinfeng/scrapy,zhangtao11/scrapy,pablohoffman/scrapy,cyberplant/scrapy,famorted/scrapy,nfunato/scrapy,moraesnicol/scrapy,nikgr95/scrapy,elacuesta/scrapy,emschorsch/scrapy,Lucifer-Kim/scrapy,jamesblunt/scrapy,pranjalpatil/scrapy,Parlin-Galanodel/scrapy,eliasdorneles/scrapy,AaronTao1990/scrapy,scorphus/scrapy,pawelmhm/scrapy,songfj/scrapy,scorphus/scrapy,CodeJuan/scrapy,github-account-because-they-want-it/scrapy,Chenmxs/scrapy,hyrole/scrapy,dangra/scrapy,Chenmxs/scrapy,xiao26/scrapy,nguyenhongson03/scrapy,jc0n/scrapy,hansenDise/scrapy,aivarsk/scrapy,elacuesta/scrapy,zorojean/scrapy,nguyenhongson03/scrapy,pfctdayelise/scrapy,fpy171/scrapy,Slater-Victoroff/scrapy,kmike/scrapy,joshlk/scrapy,AaronTao1990/scrapy,carlosp420/scrapy,kazitanvirahsan/scrapy,foromer4/scrapy,Djlavoy/scrapy,ENjOyAbLE1991/scrapy,huoxudong125/scrapy,mgedmin/scrapy,pombredanne/scrapy,gnemoug/scrapy,tntC4stl3/scrapy,umrashrf/scrapy,ashishnerkar1/scrapy,lacrazyboy/scrapy,jiezhu2007/scrapy,cleydson/scrapy,ssteo/scrapy,tliber/scrapy,jiezhu2007/scrapy,nikgr95/scrapy,Adai0808/scrapy-1,curita/scrapy,fontenele/scrapy,kazitanvirahsan/scrapy,taito/scrapy,dhenyjarasandy/scrapy,webmakin/scrapy,Bourneer/scrapy,hbwzhsh/scrapy,carlosp420/scrapy,foromer4/scrapy,Djlavoy/scrapy,johnardavies/scrapy,dracony/scrapy,nfunato/scrapy,CodeJuan/scrapy,haiiiiiyun/scrapy,emschorsch/scrapy,haiiiiiyun/scrapy,webmakin/scrapy,Geeglee/scrapy,redapple/scrapy,starrify/scrapy,GregoryVigoTorres/scrapy,taito/scrapy,umrashrf/scrapy,farhan0581/scrapy,xiao26/scrapy,stenskjaer/scrapy,fpy171/scrapy,shaform/scrapy,barraponto/scrapy,lacrazyboy/scrapy,chekunkov/scrapy,KublaikhanGeek/scrapy,legendtkl/scrapy,pablohoffman/scrapy,rolando-contrib/scrapy,haiiiiiyun/scrapy,olafdietsche/scrapy,profjrr/scrapy,moraesnicol/scrapy,kashyap32/scrapy,bmess/scrapy,WilliamKinaan/scrapy,yarikoptic/scrapy,Slater-Victoroff/scrapy,hbwzhsh/scrapy,hwsyy/scrapy,wzyuliyang/scrapy,github-account-because-they-want-it/scrapy,amboxer21/scrapy,w495/scrapy,ArturGaspar/scrapy,godfreyy/scrapy,aivarsk/scrapy,dacjames/scrapy,sardok/scrapy,fqul/scrapy,tagatac/scrapy,tntC4stl3/scrapy,Geeglee/scrapy,bmess/scrapy,crasker/scrapy,KublaikhanGeek/scrapy,crasker/scrapy,joshlk/scrapy,beni55/scrapy,URXtech/scrapy,scrapy/scrapy,profjrr/scrapy,nett55/scrapy,shaform/scrapy,nowopen/scrapy,fafaman/scrapy,CodeJuan/scrapy,wujuguang/scrapy,OpenWhere/scrapy,mgedmin/scrapy,w495/scrapy,yarikoptic/scrapy,JacobStevenR/scrapy,JacobStevenR/scrapy,Chenmxs/scrapy,Digenis/scrapy,ylcolala/scrapy,Preetwinder/scrapy,ssh-odoo/scrapy,livepy/scrapy,Partoo/scrapy,OpenWhere/scrapy,z-fork/scrapy,moraesnicol/scrapy,aivarsk/scrapy,agreen/scrapy,darkrho/scrapy-scrapy,curita/scrapy,IvanGavran/scrapy,livepy/scrapy,zackslash/scrapy,olorz/scrapy,tagatac/scrapy,codebhendi/scrapy,devGregA/scrapy,pfctdayelise/scrapy,YeelerG/scrapy,farhan0581/scrapy,URXtech/scrapy,Timeship/scrapy,yidongliu/scrapy,coderabhishek/scrapy,CENDARI/scrapy,pawelmhm/scrapy,pablohoffman/scrapy,legendtkl/scrapy,pawelmhm/scrapy,pranjalpatil/scrapy,redapple/scrapy,cyrixhero/scrapy,finfish/scrapy,dhenyjarasandy/scrapy,ssh-odoo/scrapy,jdemaeyer/scrapy,mouadino/scrapy,cyrixhero/scrapy,cyberplant/scrapy,Partoo/scrapy,eLRuLL/scrapy,csalazar/scrapy,GregoryVigoTorres/scrapy,wenyu1001/scrapy,beni55/scrapy,godfreyy/scrapy,dhenyjarasandy/scrapy,kimimj/scrapy,yidongliu/scrapy,taito/scrapy,csalazar/scrapy,rolando/scrapy,rdowinton/scrapy,CENDARI/scrapy,darkrho/scrapy-scrapy,YeelerG/scrapy,olorz/scrapy,cleydson/scrapy,avtoritet/scrapy,scrapy/scrapy,smaty1/scrapy,hectoruelo/scrapy,snowdream1314/scrapy,godfreyy/scrapy,URXtech/scrapy,cyrixhero/scrapy,ENjOyAbLE1991/scrapy,raphaelfruneaux/scrapy,wangjun/scrapy,tliber/scrapy,eliasdorneles/scrapy,elijah513/scrapy,ramiro/scrapy,Zephor5/scrapy,Allianzcortex/scrapy,ArturGaspar/scrapy,eliasdorneles/scrapy,olafdietsche/scrapy,IvanGavran/scrapy,Digenis/scrapy,arush0311/scrapy,zhangtao11/scrapy,finfish/scrapy,zorojean/scrapy,Ryezhang/scrapy,arush0311/scrapy,smaty1/scrapy,livepy/scrapy,fqul/scrapy,kalessin/scrapy,avtoritet/scrapy,fafaman/scrapy,bmess/scrapy,rolando/scrapy,zackslash/scrapy,lacrazyboy/scrapy,ssh-odoo/scrapy,Adai0808/scrapy-1,mlyundin/scrapy,emschorsch/scrapy,wujuguang/scrapy,rklabs/scrapy,starrify/scrapy,crasker/scrapy,hyrole/scrapy,Cnfc19932/scrapy,rahul-c1/scrapy,Zephor5/scrapy,kimimj/scrapy,IvanGavran/scrapy,jeffreyjinfeng/scrapy,TarasRudnyk/scrapy,wenyu1001/scrapy,hectoruelo/scrapy,tagatac/scrapy,ENjOyAbLE1991/scrapy,csalazar/scrapy,pranjalpatil/scrapy,cleydson/scrapy,cursesun/scrapy,finfish/scrapy,dgillis/scrapy,coderabhishek/scrapy,tliber/scrapy,ylcolala/scrapy,agusc/scrapy,pombredanne/scrapy,YeelerG/scrapy,1yvT0s/scrapy,sigma-random/scrapy,chekunkov/scrapy,dgillis/scrapy,famorted/scrapy,agusc/scrapy,Parlin-Galanodel/scrapy,rolando/scrapy,nguyenhongson03/scrapy,nfunato/scrapy,fontenele/scrapy,dangra/scrapy,coderabhishek/scrapy,jorik041/scrapy,wangjun/scrapy,redapple/scrapy,eLRuLL/scrapy,hansenDise/scrapy,TarasRudnyk/scrapy,johnardavies/scrapy,chekunkov/scrapy,mouadino/scrapy,Bourneer/scrapy,pombredanne/scrapy,agusc/scrapy,ssteo/scrapy,CENDARI/scrapy,dacjames/scrapy,jiezhu2007/scrapy,amboxer21/scrapy,stenskjaer/scrapy,irwinlove/scrapy,starrify/scrapy,jc0n/scrapy,kmike/scrapy,ylcolala/scrapy,kazitanvirahsan/scrapy,wzyuliyang/scrapy,scrapy/scrapy,jdemaeyer/scrapy,mgedmin/scrapy,famorted/scrapy,rahul-c1/scrapy,kalessin/scrapy,songfj/scrapy,fqul/scrapy,yusofm/scrapy,Allianzcortex/scrapy,webmakin/scrapy,ndemir/scrapy,WilliamKinaan/scrapy,dracony/scrapy,tntC4stl3/scrapy,joshlk/scrapy,elacuesta/scrapy,wzyuliyang/scrapy,Preetwinder/scrapy,ArturGaspar/scrapy,fontenele/scrapy,fpy171/scrapy,rootAvish/scrapy,foromer4/scrapy,Zephor5/scrapy,jorik041/scrapy,barraponto/scrapy,JacobStevenR/scrapy,liyy7/scrapy,rahulsharma1991/scrapy,arush0311/scrapy,jeffreyjinfeng/scrapy,KublaikhanGeek/scrapy,z-fork/scrapy,Djlavoy/scrapy,rklabs/scrapy,snowdream1314/scrapy,Timeship/scrapy,rdowinton/scrapy,cyberplant/scrapy,profjrr/scrapy,rolando-contrib/scrapy,Parlin-Galanodel/scrapy,legendtkl/scrapy,zorojean/scrapy,nikgr95/scrapy,rolando-contrib/scrapy,farhan0581/scrapy,heamon7/scrapy,rahulsharma1991/scrapy,nett55/scrapy,avtoritet/scrapy,cursesun/scrapy,yarikoptic/scrapy,darkrho/scrapy-scrapy,agreen/scrapy,Ryezhang/scrapy,Lucifer-Kim/scrapy,huoxudong125/scrapy,AaronTao1990/scrapy,barraponto/scrapy,olafdietsche/scrapy,mlyundin/scrapy,irwinlove/scrapy,z-fork/scrapy,rahulsharma1991/scrapy,hwsyy/scrapy,rklabs/scrapy,shaform/scrapy,beni55/scrapy,GregoryVigoTorres/scrapy,gnemoug/scrapy,w495/scrapy,yidongliu/scrapy,zjuwangg/scrapy,zackslash/scrapy,elijah513/scrapy,hwsyy/scrapy,mlyundin/scrapy,Geeglee/scrapy,github-account-because-they-want-it/scrapy,zjuwangg/scrapy,gbirke/scrapy,zhangtao11/scrapy,1yvT0s/scrapy,sardok/scrapy,zjuwangg/scrapy,rdowinton/scrapy,snowdream1314/scrapy,wujuguang/scrapy,xiao26/scrapy,stenskjaer/scrapy,rootAvish/scrapy,ramiro/scrapy,Slater-Victoroff/scrapy,jorik041/scrapy,wangjun/scrapy,hectoruelo/scrapy,rootAvish/scrapy,umrashrf/scrapy,Adai0808/scrapy-1,songfj/scrapy,curita/scrapy,carlosp420/scrapy,wenyu1001/scrapy,raphaelfruneaux/scrapy,Cnfc19932/scrapy,eLRuLL/scrapy,ndemir/scrapy,codebhendi/scrapy,pfctdayelise/scrapy,Bourneer/scrapy,olorz/scrapy,yusofm/scrapy,heamon7/scrapy,WilliamKinaan/scrapy,dangra/scrapy,irwinlove/scrapy,jc0n/scrapy,kmike/scrapy,huoxudong125/scrapy,kashyap32/scrapy,kimimj/scrapy,Timeship/scrapy,ssteo/scrapy,nett55/scrapy,dracony/scrapy,OpenWhere/scrapy,sigma-random/scrapy,nowopen/scrapy,dacjames/scrapy,Partoo/scrapy,liyy7/scrapy,Ryezhang/scrapy,jdemaeyer/scrapy,devGregA/scrapy,Digenis/scrapy,raphaelfruneaux/scrapy,codebhendi/scrapy,johnardavies/scrapy,agreen/scrapy,nowopen/scrapy,devGregA/scrapy,heamon7/scrapy,ramiro/scrapy,smaty1/scrapy,Cnfc19932/scrapy,Preetwinder/scrapy,hansenDise/scrapy,cursesun/scrapy,gbirke/scrapy,1yvT0s/scrapy,kalessin/scrapy,fafaman/scrapy,Allianzcortex/scrapy,kashyap32/scrapy,hbwzhsh/scrapy,scorphus/scrapy,hyrole/scrapy,jamesblunt/scrapy,TarasRudnyk/scrapy,rahul-c1/scrapy,ashishnerkar1/scrapy,yusofm/scrapy,amboxer21/scrapy,elijah513/scrapy,Lucifer-Kim/scrapy,dgillis/scrapy | scrapy/trunk/scrapy/tests/test_spidermiddleware_duplicatesfilter.py | scrapy/trunk/scrapy/tests/test_spidermiddleware_duplicatesfilter.py | import unittest
from scrapy.spider import spiders
from scrapy.http import Request, Response
from scrapy.contrib.spidermiddleware.duplicatesfilter import DuplicatesFilterMiddleware, SimplePerDomainFilter
class DuplicatesFilterMiddlewareTest(unittest.TestCase):
def setUp(self):
spiders.spider_modules = ['scrapy.tests.test_spiders']
spiders.reload()
self.spider = spiders.fromdomain('scrapytest.org')
def test_process_spider_output(self):
mw = DuplicatesFilterMiddleware()
mw.filter.open('scrapytest.org')
response = Response('http://scrapytest.org/')
response.request = Request('http://scrapytest.org/')
r0 = Request('http://scrapytest.org/')
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
filtered = list(mw.process_spider_output(response, [r0, r1, r2, r3], self.spider))
assert r0 not in filtered
assert r1 in filtered
assert r2 in filtered
assert r3 not in filtered
mw.filter.close('scrapytest.org')
class SimplePerDomainFilterTest(unittest.TestCase):
def test_filter(self):
domain = 'scrapytest.org'
filter = SimplePerDomainFilter()
filter.open(domain)
assert domain in filter
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
assert filter.add(domain, r1)
assert filter.add(domain, r2)
assert not filter.add(domain, r3)
filter.close(domain)
assert domain not in filter
| import unittest
from scrapy.spider import spiders
from scrapy.http import Request, Response
from scrapy.contrib.spidermiddleware.duplicatesfilter import DuplicatesFilterMiddleware, SimplePerDomainFilter
class DuplicatesFilterMiddlewareTest(unittest.TestCase):
def setUp(self):
spiders.spider_modules = ['scrapy.tests.test_spiders']
spiders.reload()
self.spider = spiders.fromdomain('scrapytest.org')
def test_process_spider_output(self):
mw = DuplicatesFilterMiddleware()
mw.filter.open('scrapytest.org')
rq = Request('http://scrapytest.org/')
response = Response('http://scrapytest.org/')
response.request = rq
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
filtered = list(mw.process_spider_output(response, [r1, r2, r3], self.spider))
self.assertFalse(rq in filtered)
self.assertTrue(r1 in filtered)
self.assertTrue(r2 in filtered)
self.assertFalse(r3 in filtered)
mw.filter.close('scrapytest.org')
class SimplePerDomainFilterTest(unittest.TestCase):
def test_filter(self):
domain = 'scrapytest.org'
filter = SimplePerDomainFilter()
filter.open(domain)
self.assertTrue(domain in filter)
r1 = Request('http://scrapytest.org/1')
r2 = Request('http://scrapytest.org/2')
r3 = Request('http://scrapytest.org/2')
self.assertTrue(filter.add(domain, r1))
self.assertTrue(filter.add(domain, r2))
self.assertFalse(filter.add(domain, r3))
filter.close(domain)
self.assertFalse(domain in filter)
| bsd-3-clause | Python |
2af01c0293db53dc80c552df3986d0e088b65b76 | improve player params extraction(closes #22638) | Orochimarufan/youtube-dl,rg3/youtube-dl,remitamine/youtube-dl,remitamine/youtube-dl,rg3/youtube-dl,Tatsh/youtube-dl,yan12125/youtube-dl,ozburo/youtube-dl,Tatsh/youtube-dl,Orochimarufan/youtube-dl,erikdejonge/youtube-dl,erikdejonge/youtube-dl,nyuszika7h/youtube-dl,yan12125/youtube-dl,spvkgn/youtube-dl,spvkgn/youtube-dl,nyuszika7h/youtube-dl,vinegret/youtube-dl,ozburo/youtube-dl,vinegret/youtube-dl | youtube_dl/extractor/bokecc.py | youtube_dl/extractor/bokecc.py | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_parse_qs
from ..utils import ExtractorError
class BokeCCBaseIE(InfoExtractor):
def _extract_bokecc_formats(self, webpage, video_id, format_id=None):
player_params_str = self._html_search_regex(
r'<(?:script|embed)[^>]+src=(?P<q>["\'])(?:https?:)?//p\.bokecc\.com/(?:player|flash/player\.swf)\?(?P<query>.+?)(?P=q)',
webpage, 'player params', group='query')
player_params = compat_parse_qs(player_params_str)
info_xml = self._download_xml(
'http://p.bokecc.com/servlet/playinfo?uid=%s&vid=%s&m=1' % (
player_params['siteid'][0], player_params['vid'][0]), video_id)
formats = [{
'format_id': format_id,
'url': quality.find('./copy').attrib['playurl'],
'preference': int(quality.attrib['value']),
} for quality in info_xml.findall('./video/quality')]
self._sort_formats(formats)
return formats
class BokeCCIE(BokeCCBaseIE):
_IE_DESC = 'CC视频'
_VALID_URL = r'https?://union\.bokecc\.com/playvideo\.bo\?(?P<query>.*)'
_TESTS = [{
'url': 'http://union.bokecc.com/playvideo.bo?vid=E0ABAE9D4F509B189C33DC5901307461&uid=FE644790DE9D154A',
'info_dict': {
'id': 'FE644790DE9D154A_E0ABAE9D4F509B189C33DC5901307461',
'ext': 'flv',
'title': 'BokeCC Video',
},
}]
def _real_extract(self, url):
qs = compat_parse_qs(re.match(self._VALID_URL, url).group('query'))
if not qs.get('vid') or not qs.get('uid'):
raise ExtractorError('Invalid URL', expected=True)
video_id = '%s_%s' % (qs['uid'][0], qs['vid'][0])
webpage = self._download_webpage(url, video_id)
return {
'id': video_id,
'title': 'BokeCC Video', # no title provided in the webpage
'formats': self._extract_bokecc_formats(webpage, video_id),
}
| # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import compat_parse_qs
from ..utils import ExtractorError
class BokeCCBaseIE(InfoExtractor):
def _extract_bokecc_formats(self, webpage, video_id, format_id=None):
player_params_str = self._html_search_regex(
r'<(?:script|embed)[^>]+src="http://p\.bokecc\.com/player\?([^"]+)',
webpage, 'player params')
player_params = compat_parse_qs(player_params_str)
info_xml = self._download_xml(
'http://p.bokecc.com/servlet/playinfo?uid=%s&vid=%s&m=1' % (
player_params['siteid'][0], player_params['vid'][0]), video_id)
formats = [{
'format_id': format_id,
'url': quality.find('./copy').attrib['playurl'],
'preference': int(quality.attrib['value']),
} for quality in info_xml.findall('./video/quality')]
self._sort_formats(formats)
return formats
class BokeCCIE(BokeCCBaseIE):
_IE_DESC = 'CC视频'
_VALID_URL = r'https?://union\.bokecc\.com/playvideo\.bo\?(?P<query>.*)'
_TESTS = [{
'url': 'http://union.bokecc.com/playvideo.bo?vid=E44D40C15E65EA30&uid=CD0C5D3C8614B28B',
'info_dict': {
'id': 'CD0C5D3C8614B28B_E44D40C15E65EA30',
'ext': 'flv',
'title': 'BokeCC Video',
},
}]
def _real_extract(self, url):
qs = compat_parse_qs(re.match(self._VALID_URL, url).group('query'))
if not qs.get('vid') or not qs.get('uid'):
raise ExtractorError('Invalid URL', expected=True)
video_id = '%s_%s' % (qs['uid'][0], qs['vid'][0])
webpage = self._download_webpage(url, video_id)
return {
'id': video_id,
'title': 'BokeCC Video', # no title provided in the webpage
'formats': self._extract_bokecc_formats(webpage, video_id),
}
| unlicense | Python |
d72aea25f2a6d18c7f6d213144e0861fac65a4f9 | Use named match for Gravatar email | Cheppers/zulip,eeshangarg/zulip,jimmy54/zulip,stamhe/zulip,jeffcao/zulip,ufosky-server/zulip,yuvipanda/zulip,kokoar/zulip,zachallaun/zulip,rht/zulip,suxinde2009/zulip,easyfmxu/zulip,timabbott/zulip,sup95/zulip,seapasulli/zulip,yocome/zulip,bowlofstew/zulip,LAndreas/zulip,samatdav/zulip,littledogboy/zulip,niftynei/zulip,Suninus/zulip,jonesgithub/zulip,jeffcao/zulip,cosmicAsymmetry/zulip,Batterfii/zulip,kaiyuanheshang/zulip,themass/zulip,willingc/zulip,susansls/zulip,armooo/zulip,dnmfarrell/zulip,cosmicAsymmetry/zulip,rishig/zulip,Qgap/zulip,umkay/zulip,adnanh/zulip,TigorC/zulip,Batterfii/zulip,Cheppers/zulip,dawran6/zulip,praveenaki/zulip,mahim97/zulip,MayB/zulip,qq1012803704/zulip,KingxBanana/zulip,ipernet/zulip,kokoar/zulip,bssrdf/zulip,johnnygaddarr/zulip,akuseru/zulip,krtkmj/zulip,KJin99/zulip,seapasulli/zulip,bowlofstew/zulip,ApsOps/zulip,amyliu345/zulip,schatt/zulip,umkay/zulip,armooo/zulip,hj3938/zulip,isht3/zulip,blaze225/zulip,SmartPeople/zulip,noroot/zulip,fw1121/zulip,grave-w-grave/zulip,sharmaeklavya2/zulip,jainayush975/zulip,wweiradio/zulip,zorojean/zulip,zofuthan/zulip,ahmadassaf/zulip,zacps/zulip,littledogboy/zulip,gkotian/zulip,rishig/zulip,aliceriot/zulip,isht3/zulip,Juanvulcano/zulip,shrikrishnaholla/zulip,Batterfii/zulip,MariaFaBella85/zulip,bastianh/zulip,jainayush975/zulip,eastlhu/zulip,Gabriel0402/zulip,wweiradio/zulip,atomic-labs/zulip,timabbott/zulip,mdavid/zulip,jessedhillon/zulip,JPJPJPOPOP/zulip,isht3/zulip,jonesgithub/zulip,shubhamdhama/zulip,mansilladev/zulip,esander91/zulip,technicalpickles/zulip,calvinleenyc/zulip,vakila/zulip,zachallaun/zulip,JanzTam/zulip,wdaher/zulip,sonali0901/zulip,arpith/zulip,armooo/zulip,vaidap/zulip,tdr130/zulip,peguin40/zulip,he15his/zulip,voidException/zulip,calvinleenyc/zulip,kou/zulip,johnny9/zulip,codeKonami/zulip,deer-hope/zulip,isht3/zulip,amallia/zulip,hayderimran7/zulip,vaidap/zulip,karamcnair/zulip,vakila/zulip,hustlzp/zulip,MariaFaBella85/zulip,m1ssou/zulip,ryanbackman/zulip,vabs22/zulip,hafeez3000/zulip,jrowan/zulip,JPJPJPOPOP/zulip,adnanh/zulip,joshisa/zulip,zachallaun/zulip,KingxBanana/zulip,esander91/zulip,developerfm/zulip,developerfm/zulip,easyfmxu/zulip,wdaher/zulip,zorojean/zulip,vakila/zulip,peguin40/zulip,jainayush975/zulip,dotcool/zulip,Diptanshu8/zulip,KJin99/zulip,hayderimran7/zulip,wweiradio/zulip,mahim97/zulip,wavelets/zulip,guiquanz/zulip,johnny9/zulip,schatt/zulip,hafeez3000/zulip,johnnygaddarr/zulip,amanharitsh123/zulip,firstblade/zulip,hengqujushi/zulip,esander91/zulip,brainwane/zulip,vakila/zulip,johnny9/zulip,aakash-cr7/zulip,grave-w-grave/zulip,amanharitsh123/zulip,MayB/zulip,aakash-cr7/zulip,susansls/zulip,vikas-parashar/zulip,moria/zulip,bitemyapp/zulip,adnanh/zulip,technicalpickles/zulip,hayderimran7/zulip,DazWorrall/zulip,stamhe/zulip,johnny9/zulip,glovebx/zulip,hayderimran7/zulip,bssrdf/zulip,peguin40/zulip,hengqujushi/zulip,ufosky-server/zulip,firstblade/zulip,tbutter/zulip,zachallaun/zulip,bastianh/zulip,brainwane/zulip,souravbadami/zulip,aps-sids/zulip,pradiptad/zulip,ahmadassaf/zulip,blaze225/zulip,dxq-git/zulip,niftynei/zulip,j831/zulip,brainwane/zulip,guiquanz/zulip,xuanhan863/zulip,jackrzhang/zulip,proliming/zulip,bastianh/zulip,gkotian/zulip,xuanhan863/zulip,rht/zulip,qq1012803704/zulip,tiansiyuan/zulip,joyhchen/zulip,saitodisse/zulip,bssrdf/zulip,ashwinirudrappa/zulip,m1ssou/zulip,vabs22/zulip,ryanbackman/zulip,amyliu345/zulip,kaiyuanheshang/zulip,vabs22/zulip,xuxiao/zulip,rishig/zulip,codeKonami/zulip,mohsenSy/zulip,aliceriot/zulip,jessedhillon/zulip,luyifan/zulip,blaze225/zulip,j831/zulip,Gabriel0402/zulip,lfranchi/zulip,christi3k/zulip,joshisa/zulip,cosmicAsymmetry/zulip,karamcnair/zulip,he15his/zulip,Gabriel0402/zulip,mansilladev/zulip,peiwei/zulip,KJin99/zulip,jonesgithub/zulip,SmartPeople/zulip,tiansiyuan/zulip,xuanhan863/zulip,AZtheAsian/zulip,developerfm/zulip,gkotian/zulip,brockwhittaker/zulip,jerryge/zulip,showell/zulip,m1ssou/zulip,hafeez3000/zulip,RobotCaleb/zulip,peguin40/zulip,paxapy/zulip,bluesea/zulip,yocome/zulip,sonali0901/zulip,hengqujushi/zulip,jackrzhang/zulip,luyifan/zulip,tbutter/zulip,hj3938/zulip,eastlhu/zulip,MayB/zulip,tbutter/zulip,brockwhittaker/zulip,fw1121/zulip,pradiptad/zulip,MariaFaBella85/zulip,showell/zulip,synicalsyntax/zulip,wangdeshui/zulip,PhilSk/zulip,Vallher/zulip,ikasumiwt/zulip,ahmadassaf/zulip,proliming/zulip,synicalsyntax/zulip,dxq-git/zulip,Batterfii/zulip,MayB/zulip,eeshangarg/zulip,brainwane/zulip,xuxiao/zulip,m1ssou/zulip,so0k/zulip,akuseru/zulip,PaulPetring/zulip,johnnygaddarr/zulip,zwily/zulip,hj3938/zulip,suxinde2009/zulip,wavelets/zulip,jimmy54/zulip,zorojean/zulip,levixie/zulip,natanovia/zulip,yuvipanda/zulip,KJin99/zulip,jrowan/zulip,Drooids/zulip,zofuthan/zulip,saitodisse/zulip,wweiradio/zulip,stamhe/zulip,jackrzhang/zulip,zhaoweigg/zulip,LeeRisk/zulip,huangkebo/zulip,natanovia/zulip,RobotCaleb/zulip,DazWorrall/zulip,zofuthan/zulip,developerfm/zulip,AZtheAsian/zulip,jeffcao/zulip,saitodisse/zulip,nicholasbs/zulip,swinghu/zulip,so0k/zulip,Juanvulcano/zulip,udxxabp/zulip,mdavid/zulip,jrowan/zulip,so0k/zulip,umkay/zulip,synicalsyntax/zulip,Juanvulcano/zulip,aliceriot/zulip,huangkebo/zulip,luyifan/zulip,jonesgithub/zulip,avastu/zulip,ashwinirudrappa/zulip,verma-varsha/zulip,akuseru/zulip,susansls/zulip,schatt/zulip,EasonYi/zulip,KingxBanana/zulip,ufosky-server/zulip,moria/zulip,suxinde2009/zulip,wangdeshui/zulip,JanzTam/zulip,punchagan/zulip,jphilipsen05/zulip,bowlofstew/zulip,zulip/zulip,jessedhillon/zulip,synicalsyntax/zulip,ApsOps/zulip,amyliu345/zulip,sup95/zulip,ipernet/zulip,eastlhu/zulip,natanovia/zulip,themass/zulip,eastlhu/zulip,bastianh/zulip,dattatreya303/zulip,kaiyuanheshang/zulip,hengqujushi/zulip,timabbott/zulip,tommyip/zulip,LeeRisk/zulip,technicalpickles/zulip,willingc/zulip,codeKonami/zulip,Frouk/zulip,wangdeshui/zulip,mohsenSy/zulip,nicholasbs/zulip,jonesgithub/zulip,bowlofstew/zulip,Frouk/zulip,timabbott/zulip,so0k/zulip,bssrdf/zulip,zhaoweigg/zulip,jphilipsen05/zulip,EasonYi/zulip,AZtheAsian/zulip,tommyip/zulip,peguin40/zulip,seapasulli/zulip,pradiptad/zulip,willingc/zulip,jphilipsen05/zulip,moria/zulip,kaiyuanheshang/zulip,schatt/zulip,peiwei/zulip,LeeRisk/zulip,punchagan/zulip,itnihao/zulip,bluesea/zulip,peiwei/zulip,kou/zulip,johnny9/zulip,eastlhu/zulip,bastianh/zulip,JPJPJPOPOP/zulip,Suninus/zulip,isht3/zulip,christi3k/zulip,jessedhillon/zulip,brockwhittaker/zulip,hj3938/zulip,praveenaki/zulip,Jianchun1/zulip,Vallher/zulip,JPJPJPOPOP/zulip,avastu/zulip,Batterfii/zulip,tommyip/zulip,ufosky-server/zulip,voidException/zulip,zhaoweigg/zulip,xuanhan863/zulip,Galexrt/zulip,Jianchun1/zulip,zulip/zulip,Galexrt/zulip,bssrdf/zulip,qq1012803704/zulip,bluesea/zulip,paxapy/zulip,sharmaeklavya2/zulip,ApsOps/zulip,JanzTam/zulip,ryanbackman/zulip,kokoar/zulip,themass/zulip,Qgap/zulip,MariaFaBella85/zulip,Batterfii/zulip,ipernet/zulip,Gabriel0402/zulip,itnihao/zulip,guiquanz/zulip,kaiyuanheshang/zulip,ryansnowboarder/zulip,firstblade/zulip,shaunstanislaus/zulip,JanzTam/zulip,lfranchi/zulip,levixie/zulip,dhcrzf/zulip,verma-varsha/zulip,mansilladev/zulip,yocome/zulip,PaulPetring/zulip,natanovia/zulip,Diptanshu8/zulip,Juanvulcano/zulip,tdr130/zulip,atomic-labs/zulip,wweiradio/zulip,hengqujushi/zulip,reyha/zulip,Galexrt/zulip,hackerkid/zulip,proliming/zulip,dattatreya303/zulip,punchagan/zulip,showell/zulip,praveenaki/zulip,dnmfarrell/zulip,christi3k/zulip,paxapy/zulip,calvinleenyc/zulip,zhaoweigg/zulip,moria/zulip,hackerkid/zulip,saitodisse/zulip,dotcool/zulip,zwily/zulip,atomic-labs/zulip,nicholasbs/zulip,Drooids/zulip,themass/zulip,suxinde2009/zulip,natanovia/zulip,ryansnowboarder/zulip,eastlhu/zulip,dattatreya303/zulip,avastu/zulip,jeffcao/zulip,zulip/zulip,zulip/zulip,TigorC/zulip,xuxiao/zulip,shrikrishnaholla/zulip,ashwinirudrappa/zulip,dwrpayne/zulip,jeffcao/zulip,umkay/zulip,paxapy/zulip,zacps/zulip,ufosky-server/zulip,hackerkid/zulip,codeKonami/zulip,amallia/zulip,wavelets/zulip,hustlzp/zulip,hafeez3000/zulip,souravbadami/zulip,arpith/zulip,dnmfarrell/zulip,mohsenSy/zulip,reyha/zulip,amanharitsh123/zulip,jimmy54/zulip,swinghu/zulip,bastianh/zulip,lfranchi/zulip,esander91/zulip,LAndreas/zulip,verma-varsha/zulip,jainayush975/zulip,natanovia/zulip,aliceriot/zulip,hengqujushi/zulip,alliejones/zulip,babbage/zulip,proliming/zulip,technicalpickles/zulip,aps-sids/zulip,arpitpanwar/zulip,ericzhou2008/zulip,Galexrt/zulip,arpitpanwar/zulip,rht/zulip,Vallher/zulip,mahim97/zulip,vaidap/zulip,Suninus/zulip,hj3938/zulip,jessedhillon/zulip,shubhamdhama/zulip,umkay/zulip,alliejones/zulip,umkay/zulip,SmartPeople/zulip,j831/zulip,christi3k/zulip,esander91/zulip,dnmfarrell/zulip,yocome/zulip,hustlzp/zulip,thomasboyt/zulip,kou/zulip,bowlofstew/zulip,Suninus/zulip,ericzhou2008/zulip,Drooids/zulip,mansilladev/zulip,eastlhu/zulip,RobotCaleb/zulip,arpith/zulip,hackerkid/zulip,themass/zulip,he15his/zulip,mohsenSy/zulip,KJin99/zulip,susansls/zulip,MayB/zulip,wangdeshui/zulip,eeshangarg/zulip,vabs22/zulip,zofuthan/zulip,he15his/zulip,showell/zulip,zachallaun/zulip,vabs22/zulip,wweiradio/zulip,aakash-cr7/zulip,PaulPetring/zulip,jerryge/zulip,huangkebo/zulip,easyfmxu/zulip,codeKonami/zulip,tdr130/zulip,dhcrzf/zulip,sup95/zulip,levixie/zulip,seapasulli/zulip,RobotCaleb/zulip,qq1012803704/zulip,shrikrishnaholla/zulip,tiansiyuan/zulip,wdaher/zulip,aps-sids/zulip,shaunstanislaus/zulip,amallia/zulip,amallia/zulip,kaiyuanheshang/zulip,zulip/zulip,voidException/zulip,hengqujushi/zulip,easyfmxu/zulip,tiansiyuan/zulip,Vallher/zulip,zhaoweigg/zulip,aps-sids/zulip,jeffcao/zulip,showell/zulip,adnanh/zulip,akuseru/zulip,armooo/zulip,KingxBanana/zulip,johnnygaddarr/zulip,yuvipanda/zulip,thomasboyt/zulip,joyhchen/zulip,dotcool/zulip,Frouk/zulip,stamhe/zulip,Diptanshu8/zulip,dotcool/zulip,Qgap/zulip,gigawhitlocks/zulip,samatdav/zulip,guiquanz/zulip,amyliu345/zulip,eeshangarg/zulip,shaunstanislaus/zulip,SmartPeople/zulip,niftynei/zulip,xuanhan863/zulip,ericzhou2008/zulip,zachallaun/zulip,littledogboy/zulip,willingc/zulip,timabbott/zulip,jackrzhang/zulip,zwily/zulip,udxxabp/zulip,DazWorrall/zulip,EasonYi/zulip,rishig/zulip,nicholasbs/zulip,shubhamdhama/zulip,arpitpanwar/zulip,yuvipanda/zulip,kou/zulip,xuxiao/zulip,alliejones/zulip,codeKonami/zulip,amallia/zulip,gkotian/zulip,wangdeshui/zulip,kou/zulip,dhcrzf/zulip,bitemyapp/zulip,j831/zulip,rishig/zulip,akuseru/zulip,dhcrzf/zulip,RobotCaleb/zulip,adnanh/zulip,kaiyuanheshang/zulip,qq1012803704/zulip,johnnygaddarr/zulip,hafeez3000/zulip,Juanvulcano/zulip,dwrpayne/zulip,deer-hope/zulip,guiquanz/zulip,Galexrt/zulip,atomic-labs/zulip,nicholasbs/zulip,shaunstanislaus/zulip,huangkebo/zulip,ashwinirudrappa/zulip,sharmaeklavya2/zulip,gigawhitlocks/zulip,amanharitsh123/zulip,TigorC/zulip,firstblade/zulip,tbutter/zulip,noroot/zulip,avastu/zulip,zacps/zulip,tbutter/zulip,calvinleenyc/zulip,PhilSk/zulip,themass/zulip,dattatreya303/zulip,easyfmxu/zulip,souravbadami/zulip,calvinleenyc/zulip,armooo/zulip,peiwei/zulip,itnihao/zulip,dnmfarrell/zulip,showell/zulip,arpitpanwar/zulip,mahim97/zulip,Cheppers/zulip,arpith/zulip,littledogboy/zulip,willingc/zulip,Cheppers/zulip,bastianh/zulip,johnny9/zulip,dhcrzf/zulip,lfranchi/zulip,Cheppers/zulip,ahmadassaf/zulip,babbage/zulip,ufosky-server/zulip,andersk/zulip,huangkebo/zulip,stamhe/zulip,deer-hope/zulip,deer-hope/zulip,cosmicAsymmetry/zulip,jphilipsen05/zulip,saitodisse/zulip,bluesea/zulip,jessedhillon/zulip,eeshangarg/zulip,joyhchen/zulip,krtkmj/zulip,udxxabp/zulip,kou/zulip,punchagan/zulip,voidException/zulip,Qgap/zulip,hustlzp/zulip,johnny9/zulip,joshisa/zulip,praveenaki/zulip,voidException/zulip,gigawhitlocks/zulip,rishig/zulip,souravbadami/zulip,Jianchun1/zulip,xuxiao/zulip,proliming/zulip,yuvipanda/zulip,technicalpickles/zulip,nicholasbs/zulip,xuanhan863/zulip,joshisa/zulip,bssrdf/zulip,luyifan/zulip,Jianchun1/zulip,zofuthan/zulip,gkotian/zulip,Frouk/zulip,swinghu/zulip,seapasulli/zulip,vakila/zulip,johnnygaddarr/zulip,mahim97/zulip,glovebx/zulip,avastu/zulip,grave-w-grave/zulip,MayB/zulip,jimmy54/zulip,gkotian/zulip,christi3k/zulip,dxq-git/zulip,KingxBanana/zulip,aliceriot/zulip,amallia/zulip,jimmy54/zulip,grave-w-grave/zulip,mahim97/zulip,noroot/zulip,LeeRisk/zulip,hj3938/zulip,krtkmj/zulip,glovebx/zulip,praveenaki/zulip,bluesea/zulip,aliceriot/zulip,hayderimran7/zulip,lfranchi/zulip,PaulPetring/zulip,bitemyapp/zulip,EasonYi/zulip,EasonYi/zulip,esander91/zulip,zulip/zulip,bitemyapp/zulip,cosmicAsymmetry/zulip,thomasboyt/zulip,kokoar/zulip,dhcrzf/zulip,gigawhitlocks/zulip,dawran6/zulip,samatdav/zulip,MariaFaBella85/zulip,wangdeshui/zulip,udxxabp/zulip,udxxabp/zulip,dxq-git/zulip,sonali0901/zulip,jerryge/zulip,ashwinirudrappa/zulip,AZtheAsian/zulip,arpitpanwar/zulip,ApsOps/zulip,tbutter/zulip,schatt/zulip,luyifan/zulip,zwily/zulip,zorojean/zulip,vakila/zulip,aps-sids/zulip,developerfm/zulip,suxinde2009/zulip,karamcnair/zulip,amanharitsh123/zulip,aliceriot/zulip,firstblade/zulip,sonali0901/zulip,wavelets/zulip,aakash-cr7/zulip,rht/zulip,Vallher/zulip,joyhchen/zulip,esander91/zulip,pradiptad/zulip,peiwei/zulip,littledogboy/zulip,hustlzp/zulip,sharmaeklavya2/zulip,shrikrishnaholla/zulip,MayB/zulip,arpith/zulip,shrikrishnaholla/zulip,verma-varsha/zulip,verma-varsha/zulip,jackrzhang/zulip,Frouk/zulip,swinghu/zulip,itnihao/zulip,aakash-cr7/zulip,jrowan/zulip,udxxabp/zulip,jackrzhang/zulip,shubhamdhama/zulip,stamhe/zulip,yuvipanda/zulip,vikas-parashar/zulip,natanovia/zulip,atomic-labs/zulip,mansilladev/zulip,Vallher/zulip,zorojean/zulip,Diptanshu8/zulip,proliming/zulip,shubhamdhama/zulip,mdavid/zulip,schatt/zulip,tdr130/zulip,levixie/zulip,Qgap/zulip,Qgap/zulip,thomasboyt/zulip,developerfm/zulip,rht/zulip,suxinde2009/zulip,sonali0901/zulip,Jianchun1/zulip,pradiptad/zulip,developerfm/zulip,joyhchen/zulip,MariaFaBella85/zulip,arpitpanwar/zulip,zachallaun/zulip,Drooids/zulip,dwrpayne/zulip,firstblade/zulip,dawran6/zulip,dwrpayne/zulip,KJin99/zulip,krtkmj/zulip,easyfmxu/zulip,pradiptad/zulip,ApsOps/zulip,karamcnair/zulip,proliming/zulip,tiansiyuan/zulip,gkotian/zulip,brainwane/zulip,praveenaki/zulip,TigorC/zulip,bssrdf/zulip,vaidap/zulip,gigawhitlocks/zulip,levixie/zulip,codeKonami/zulip,tdr130/zulip,dwrpayne/zulip,shrikrishnaholla/zulip,hj3938/zulip,LeeRisk/zulip,zorojean/zulip,guiquanz/zulip,moria/zulip,noroot/zulip,dotcool/zulip,ryanbackman/zulip,dhcrzf/zulip,andersk/zulip,moria/zulip,isht3/zulip,alliejones/zulip,adnanh/zulip,calvinleenyc/zulip,andersk/zulip,jeffcao/zulip,he15his/zulip,arpitpanwar/zulip,andersk/zulip,dawran6/zulip,LeeRisk/zulip,sup95/zulip,showell/zulip,sharmaeklavya2/zulip,wweiradio/zulip,ericzhou2008/zulip,Drooids/zulip,voidException/zulip,ericzhou2008/zulip,fw1121/zulip,mansilladev/zulip,DazWorrall/zulip,kokoar/zulip,thomasboyt/zulip,niftynei/zulip,xuxiao/zulip,easyfmxu/zulip,KingxBanana/zulip,jimmy54/zulip,PaulPetring/zulip,swinghu/zulip,aakash-cr7/zulip,DazWorrall/zulip,DazWorrall/zulip,dawran6/zulip,EasonYi/zulip,brockwhittaker/zulip,huangkebo/zulip,itnihao/zulip,itnihao/zulip,TigorC/zulip,amanharitsh123/zulip,jainayush975/zulip,ahmadassaf/zulip,alliejones/zulip,noroot/zulip,reyha/zulip,vikas-parashar/zulip,zacps/zulip,amyliu345/zulip,dattatreya303/zulip,cosmicAsymmetry/zulip,akuseru/zulip,littledogboy/zulip,vaidap/zulip,grave-w-grave/zulip,schatt/zulip,zwily/zulip,fw1121/zulip,hustlzp/zulip,gigawhitlocks/zulip,Qgap/zulip,ufosky-server/zulip,fw1121/zulip,Batterfii/zulip,PaulPetring/zulip,bitemyapp/zulip,PhilSk/zulip,dxq-git/zulip,Drooids/zulip,sup95/zulip,ikasumiwt/zulip,DazWorrall/zulip,atomic-labs/zulip,rht/zulip,LeeRisk/zulip,hafeez3000/zulip,sup95/zulip,vikas-parashar/zulip,punchagan/zulip,technicalpickles/zulip,kou/zulip,jphilipsen05/zulip,tdr130/zulip,ryansnowboarder/zulip,qq1012803704/zulip,itnihao/zulip,pradiptad/zulip,vikas-parashar/zulip,niftynei/zulip,m1ssou/zulip,Vallher/zulip,swinghu/zulip,AZtheAsian/zulip,jonesgithub/zulip,fw1121/zulip,LAndreas/zulip,zorojean/zulip,karamcnair/zulip,brainwane/zulip,luyifan/zulip,firstblade/zulip,eeshangarg/zulip,joyhchen/zulip,arpith/zulip,levixie/zulip,paxapy/zulip,PaulPetring/zulip,xuanhan863/zulip,deer-hope/zulip,sonali0901/zulip,mdavid/zulip,ashwinirudrappa/zulip,reyha/zulip,armooo/zulip,hustlzp/zulip,zacps/zulip,tommyip/zulip,grave-w-grave/zulip,lfranchi/zulip,wdaher/zulip,Juanvulcano/zulip,jimmy54/zulip,jerryge/zulip,dnmfarrell/zulip,Galexrt/zulip,shaunstanislaus/zulip,praveenaki/zulip,jonesgithub/zulip,LAndreas/zulip,tiansiyuan/zulip,xuxiao/zulip,shubhamdhama/zulip,hayderimran7/zulip,bluesea/zulip,babbage/zulip,christi3k/zulip,saitodisse/zulip,samatdav/zulip,deer-hope/zulip,babbage/zulip,samatdav/zulip,synicalsyntax/zulip,punchagan/zulip,yocome/zulip,mdavid/zulip,souravbadami/zulip,babbage/zulip,timabbott/zulip,thomasboyt/zulip,Galexrt/zulip,luyifan/zulip,andersk/zulip,bowlofstew/zulip,ipernet/zulip,brockwhittaker/zulip,dotcool/zulip,jessedhillon/zulip,punchagan/zulip,technicalpickles/zulip,RobotCaleb/zulip,blaze225/zulip,ikasumiwt/zulip,glovebx/zulip,ApsOps/zulip,ipernet/zulip,tommyip/zulip,joshisa/zulip,andersk/zulip,ikasumiwt/zulip,mdavid/zulip,m1ssou/zulip,wavelets/zulip,thomasboyt/zulip,Cheppers/zulip,SmartPeople/zulip,umkay/zulip,tbutter/zulip,brainwane/zulip,hafeez3000/zulip,bitemyapp/zulip,joshisa/zulip,lfranchi/zulip,PhilSk/zulip,udxxabp/zulip,AZtheAsian/zulip,bowlofstew/zulip,wdaher/zulip,ericzhou2008/zulip,verma-varsha/zulip,JPJPJPOPOP/zulip,Frouk/zulip,bitemyapp/zulip,noroot/zulip,avastu/zulip,littledogboy/zulip,dxq-git/zulip,ikasumiwt/zulip,tommyip/zulip,stamhe/zulip,gigawhitlocks/zulip,jrowan/zulip,Frouk/zulip,PhilSk/zulip,glovebx/zulip,zacps/zulip,moria/zulip,peiwei/zulip,Diptanshu8/zulip,JanzTam/zulip,Jianchun1/zulip,Suninus/zulip,KJin99/zulip,timabbott/zulip,ryansnowboarder/zulip,dnmfarrell/zulip,dotcool/zulip,suxinde2009/zulip,seapasulli/zulip,he15his/zulip,LAndreas/zulip,shrikrishnaholla/zulip,nicholasbs/zulip,ApsOps/zulip,niftynei/zulip,Diptanshu8/zulip,mdavid/zulip,deer-hope/zulip,sharmaeklavya2/zulip,dattatreya303/zulip,zulip/zulip,vakila/zulip,LAndreas/zulip,zofuthan/zulip,babbage/zulip,shaunstanislaus/zulip,wavelets/zulip,mohsenSy/zulip,he15his/zulip,atomic-labs/zulip,avastu/zulip,reyha/zulip,SmartPeople/zulip,hayderimran7/zulip,ahmadassaf/zulip,tdr130/zulip,qq1012803704/zulip,mansilladev/zulip,tiansiyuan/zulip,alliejones/zulip,kokoar/zulip,bluesea/zulip,krtkmj/zulip,wdaher/zulip,Gabriel0402/zulip,andersk/zulip,ahmadassaf/zulip,amyliu345/zulip,rht/zulip,yocome/zulip,saitodisse/zulip,ryanbackman/zulip,noroot/zulip,samatdav/zulip,wavelets/zulip,jackrzhang/zulip,shaunstanislaus/zulip,jainayush975/zulip,jerryge/zulip,yuvipanda/zulip,dwrpayne/zulip,so0k/zulip,so0k/zulip,zwily/zulip,aps-sids/zulip,hackerkid/zulip,vabs22/zulip,wdaher/zulip,dxq-git/zulip,swinghu/zulip,ipernet/zulip,Suninus/zulip,susansls/zulip,dawran6/zulip,reyha/zulip,Cheppers/zulip,alliejones/zulip,babbage/zulip,kokoar/zulip,wangdeshui/zulip,j831/zulip,TigorC/zulip,j831/zulip,JanzTam/zulip,karamcnair/zulip,huangkebo/zulip,MariaFaBella85/zulip,krtkmj/zulip,karamcnair/zulip,vikas-parashar/zulip,krtkmj/zulip,jphilipsen05/zulip,hackerkid/zulip,tommyip/zulip,hackerkid/zulip,zhaoweigg/zulip,johnnygaddarr/zulip,armooo/zulip,paxapy/zulip,blaze225/zulip,m1ssou/zulip,ashwinirudrappa/zulip,RobotCaleb/zulip,fw1121/zulip,ipernet/zulip,ericzhou2008/zulip,ryanbackman/zulip,voidException/zulip,synicalsyntax/zulip,themass/zulip,zwily/zulip,so0k/zulip,seapasulli/zulip,jerryge/zulip,PhilSk/zulip,susansls/zulip,ikasumiwt/zulip,jrowan/zulip,JanzTam/zulip,zofuthan/zulip,LAndreas/zulip,peiwei/zulip,brockwhittaker/zulip,Suninus/zulip,shubhamdhama/zulip,dwrpayne/zulip,levixie/zulip,EasonYi/zulip,ryansnowboarder/zulip,synicalsyntax/zulip,ryansnowboarder/zulip,blaze225/zulip,ryansnowboarder/zulip,adnanh/zulip,willingc/zulip,rishig/zulip,guiquanz/zulip,aps-sids/zulip,JPJPJPOPOP/zulip,amallia/zulip,joshisa/zulip,peguin40/zulip,ikasumiwt/zulip,eeshangarg/zulip,Gabriel0402/zulip,yocome/zulip,Drooids/zulip,glovebx/zulip,akuseru/zulip,Gabriel0402/zulip,zhaoweigg/zulip,vaidap/zulip,mohsenSy/zulip,willingc/zulip,souravbadami/zulip,jerryge/zulip,glovebx/zulip | zephyr/lib/bugdown/__init__.py | zephyr/lib/bugdown/__init__.py | import re
import markdown
from zephyr.lib.avatar import gravatar_hash
from zephyr.lib.bugdown import codehilite
class Gravatar(markdown.inlinepatterns.Pattern):
def handleMatch(self, match):
img = markdown.util.etree.Element('img')
img.set('class', 'message_body_gravatar img-rounded')
img.set('src', 'https://secure.gravatar.com/avatar/%s?d=identicon&s=30'
% (gravatar_hash(match.group('email')),))
return img
class Bugdown(markdown.Extension):
def extendMarkdown(self, md, md_globals):
del md.inlinePatterns['image_link']
del md.inlinePatterns['image_reference']
del md.parser.blockprocessors['hashheader']
del md.parser.blockprocessors['setextheader']
md.inlinePatterns.add('gravatar', Gravatar(r'!gravatar\((?P<email>[^)]*)\)'), '_begin')
# We need to re-initialize the markdown engine every 30 messages
# due to some sort of performance leak in the markdown library.
MAX_MD_ENGINE_USES = 30
_md_engine = None
_use_count = 0
# A link starts after whitespace, and cannot contain spaces,
# end parentheses, or end brackets (which would confuse Markdown).
# FIXME: Use one of the actual linkification extensions.
_link_regex = re.compile(r'(\s|\A)(?P<url>https?://[^\s\])]+)')
def _linkify(match):
url = match.group('url')
return ' [%s](%s) ' % (url, url)
def convert(md):
"""Convert Markdown to HTML, with Humbug-specific settings and hacks."""
global _md_engine, _use_count
if _md_engine is None:
_md_engine = markdown.Markdown(
safe_mode = 'escape',
output_format = 'xhtml',
extensions = ['fenced_code', 'nl2br',
codehilite.makeExtension(configs=[('force_linenos', False)]),
Bugdown()])
md = _link_regex.sub(_linkify, md)
try:
html = _md_engine.convert(md)
except:
# FIXME: Do something more reasonable here!
html = '<p>[Humbug note: Sorry, we could not understand the formatting of your message]</p>'
_use_count += 1
if _use_count >= MAX_MD_ENGINE_USES:
_md_engine = None
_use_count = 0
return html
| import re
import markdown
from zephyr.lib.avatar import gravatar_hash
from zephyr.lib.bugdown import codehilite
class Gravatar(markdown.inlinepatterns.Pattern):
def handleMatch(self, match):
# NB: the first match of our regex is match.group(2) due to
# markdown internal matches
img = markdown.util.etree.Element('img')
img.set('class', 'message_body_gravatar img-rounded')
img.set('src', 'https://secure.gravatar.com/avatar/%s?d=identicon&s=30'
% (gravatar_hash(match.group(2)),))
return img
class Bugdown(markdown.Extension):
def extendMarkdown(self, md, md_globals):
del md.inlinePatterns['image_link']
del md.inlinePatterns['image_reference']
del md.parser.blockprocessors['hashheader']
del md.parser.blockprocessors['setextheader']
md.inlinePatterns.add('gravatar', Gravatar(r'!gravatar\(([^)]*)\)'), '_begin')
# We need to re-initialize the markdown engine every 30 messages
# due to some sort of performance leak in the markdown library.
MAX_MD_ENGINE_USES = 30
_md_engine = None
_use_count = 0
# A link starts after whitespace, and cannot contain spaces,
# end parentheses, or end brackets (which would confuse Markdown).
# FIXME: Use one of the actual linkification extensions.
_link_regex = re.compile(r'(\s|\A)(?P<url>https?://[^\s\])]+)')
def _linkify(match):
url = match.group('url')
return ' [%s](%s) ' % (url, url)
def convert(md):
"""Convert Markdown to HTML, with Humbug-specific settings and hacks."""
global _md_engine, _use_count
if _md_engine is None:
_md_engine = markdown.Markdown(
safe_mode = 'escape',
output_format = 'xhtml',
extensions = ['fenced_code', 'nl2br',
codehilite.makeExtension(configs=[('force_linenos', False)]),
Bugdown()])
md = _link_regex.sub(_linkify, md)
try:
html = _md_engine.convert(md)
except:
# FIXME: Do something more reasonable here!
html = '<p>[Humbug note: Sorry, we could not understand the formatting of your message]</p>'
_use_count += 1
if _use_count >= MAX_MD_ENGINE_USES:
_md_engine = None
_use_count = 0
return html
| apache-2.0 | Python |
af16a55be73fd88c7a07faee13917092109ecb72 | Add test for iCal export for workouts | wger-project/wger,wger-project/wger,petervanderdoes/wger,petervanderdoes/wger,wger-project/wger,DeveloperMal/wger,kjagoo/wger_stark,kjagoo/wger_stark,kjagoo/wger_stark,rolandgeider/wger,rolandgeider/wger,DeveloperMal/wger,kjagoo/wger_stark,petervanderdoes/wger,wger-project/wger,rolandgeider/wger,DeveloperMal/wger,DeveloperMal/wger,petervanderdoes/wger,rolandgeider/wger | wger/manager/tests/test_ical.py | wger/manager/tests/test_ical.py | # This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import datetime
from django.core.urlresolvers import reverse
from wger.manager.tests.testcase import WorkoutManagerTestCase
from wger.utils.helpers import next_weekday
class IcalToolsTestCase(WorkoutManagerTestCase):
'''
Tests some tools used for iCal generation
'''
def test_next_weekday(self):
'''
Test the next weekday function
'''
start_date = datetime.date(2013, 12, 5)
# Find next monday
self.assertEqual(next_weekday(start_date, 0), datetime.date(2013, 12, 9))
# Find next wednesday
self.assertEqual(next_weekday(start_date, 2), datetime.date(2013, 12, 11))
# Find next saturday
self.assertEqual(next_weekday(start_date, 5), datetime.date(2013, 12, 7))
class WorkoutICalExportTestCase(WorkoutManagerTestCase):
'''
Tests exporting the ical file for a workout
'''
def export_ical(self, fail=False):
'''
Helper function
'''
response = self.client.get(reverse('workout-ical', kwargs={'pk': 3}))
if fail:
self.assertIn(response.status_code, (404, 302))
else:
self.assertEqual(response.status_code, 200)
self.assertEqual(response['Content-Type'], 'text/calendar')
self.assertEqual(response['Content-Disposition'], 'attachment; filename=calendar-3.ics')
# Approximate size
self.assertGreater(len(response.content), 550)
self.assertLess(len(response.content), 560)
def test_export_ical_anonymous(self):
'''
Tests exporting a workout as an ical file as an anonymous user
'''
self.export_ical(fail=True)
def test_export_ical_owner(self):
'''
Tests exporting a workout as an ical file as the owner user
'''
self.user_login('test')
self.export_ical(fail=False)
def test_export_ical_other(self):
'''
Tests exporting a workout as an ical file as a logged user not owning the data
'''
self.user_login('admin')
self.export_ical(fail=True)
| # This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
import datetime
from wger.manager.tests.testcase import WorkoutManagerTestCase
from wger.utils.helpers import next_weekday
class IcalToolsTestCase(WorkoutManagerTestCase):
'''
Tests some tools used for iCal generation
'''
def test_next_weekday(self):
'''
Test the next weekday function
'''
start_date = datetime.date(2013, 12, 5)
# Find next monday
self.assertEqual(next_weekday(start_date, 0), datetime.date(2013, 12, 9))
# Find next wednesday
self.assertEqual(next_weekday(start_date, 2), datetime.date(2013, 12, 11))
# Find next saturday
self.assertEqual(next_weekday(start_date, 5), datetime.date(2013, 12, 7))
| agpl-3.0 | Python |
6fdb04e47bdfc80e17a8c34cebabec98c3c5ee0e | Increase filters | israelg99/eva | eva/models/pixelcnn.py | eva/models/pixelcnn.py | from keras.models import Model
from keras.layers import Input, Convolution2D, Activation, Flatten, Dense
from keras.layers.advanced_activations import PReLU
from keras.optimizers import Nadam
from eva.layers.residual_block import ResidualBlockList
from eva.layers.masked_convolution2d import MaskedConvolution2D
def PixelCNN(input_shape, filters, blocks, softmax=False, build=True):
input_map = Input(shape=input_shape)
model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same')(input_map)
model = PReLU()(model)
model = ResidualBlockList(model, filters, blocks)
model = Convolution2D(filters//2, 1, 1)(model)
model = PReLU()(model)
model = Convolution2D(filters//2, 1, 1)(model)
model = PReLU()(model)
model = Convolution2D(1, 1, 1)(model)
if not softmax:
model = Activation('sigmoid')(model)
else:
raise NotImplementedError()
if build:
# (Potentially) Against paper, loss and optimizers are different.
model = Model(input=input_map, output=model)
model.compile(loss='binary_crossentropy',
optimizer=Nadam(),
metrics=['accuracy'])
return model
| from keras.models import Model
from keras.layers import Input, Convolution2D, Activation, Flatten, Dense
from keras.layers.advanced_activations import PReLU
from keras.optimizers import Nadam
from eva.layers.residual_block import ResidualBlockList
from eva.layers.masked_convolution2d import MaskedConvolution2D
def PixelCNN(input_shape, filters, blocks, softmax=False, build=True):
input_map = Input(shape=input_shape)
model = MaskedConvolution2D(filters, 7, 7, mask='A', border_mode='same')(input_map)
model = PReLU()(model)
model = ResidualBlockList(model, filters, blocks)
model = Convolution2D(1, 1, 1)(model)
model = PReLU()(model)
model = Convolution2D(1, 1, 1)(model)
model = PReLU()(model)
model = Convolution2D(1, 1, 1)(model)
if not softmax:
model = Activation('sigmoid')(model)
else:
raise NotImplementedError()
if build:
# (Potentially) Against paper, loss and optimizers are different.
model = Model(input=input_map, output=model)
model.compile(loss='binary_crossentropy',
optimizer=Nadam(),
metrics=['accuracy'])
return model
| apache-2.0 | Python |
ed1de617b5fcca499f9ca620ea0d16641e113b30 | Fix wiimoteac | legonigel/wii-drone-on | wiimoteAC.py | wiimoteAC.py | #! /usr/bin/python
import cwiid
import time
class InputWiimoteAC(object):
def makeConnection(self):
print "Press 1 + 2 on the wiimote for Acceleration"
try:
wm = cwiid.Wiimote()
except RuntimeError:
wm = None
else:
wm.led = 2
#wm.enable(cwiid.FLAG_MOTIONPLUS)
wm.rpt_mode = cwiid.RPT_BTN | cwiid.RPT_ACC
print "you be connected"
self.wm = wm
return wm
def determine(self):
#print "x: " + str(float((wm.state['acc'][0]-125))/30)
#print "y: " + str(float((wm.state['acc'][1]-125))/30)
directions = "lr " + str(float((self.wm.state['acc'][0]-125))/30) + " fb " + str(float((self.wm.state['acc'][1]-125))/30)
time.sleep(0.1)
return directions
def main():
myInputAC = InputWiimoteAC()
wiimote = myInputAC.makeConnection()
while True:
wiimote.led = (wiimote.state['led'] + 1) % 16
print myInputAC.determine()
if __name__ == "__main__":
main()
| #! /usr/bin/python
import cwiid
import time
class InputWiimoteAC(object):
def makeConnection(self):
print "Press 1 + 2 on the wiimote for Acceleration"
try:
wm = cwiid.Wiimote()
except RuntimeError:
wm = None
else:
wm.led = 2
#wm.enable(cwiid.FLAG_MOTIONPLUS)
wm.rpt_mode = cwiid.RPT_BTN | cwiid.RPT_ACC
print "you be connected"
self.wm = wm
return wm
def determine(self):
#print "x: " + str(float((wm.state['acc'][0]-125))/30)
#print "y: " + str(float((wm.state['acc'][1]-125))/30)
directions = "lr " + str(float((self.wm.state['acc'][0]-125))/30) + " fb " + str(float((self.wm.state['acc'][1]-125))/30)
time.sleep(0.1)
return directions
def main():
myInputAC = InputWiimoteAC()
wiimote = myInputAC.makeConnection()
while True:
wiimote.led = (wiimote.state['led'] + 1) % 16
print myInputAC.determine(wiimote)
if __name__ == "__main__":
main()
| mit | Python |
0866b795d3da3f0e3b20759ad597b3b2ca0be126 | enable `/gettext/{aoid}` route | jar3b/py-phias,jar3b/py-phias,jar3b/py-phias,jar3b/py-phias | aore/app.py | aore/app.py | import asyncpg
from aiohttp import web
from aiohttp_pydantic import oas
from settings import AppConfig
from . import log
from .search import FiasFactory
from .views import NormalizeAoidView, error_middleware, ExpandAoidView, ConvertAoidView, FindAoView, cors_middleware, \
GetAoidTextView
async def init_fias(app: web.Application) -> None:
app['ff'] = FiasFactory(app)
async def shutdown_fias(app: web.Application) -> None:
pass
async def init_pg(app: web.Application) -> None:
conf: AppConfig.PG = app['config'].pg
dsn = f'postgres://{conf.user}:{conf.password}@{conf.host}:{conf.port}/{conf.name}'
log.info(f'Connecting to pg_main ({conf.host}:{conf.port})')
app['pg'] = await asyncpg.create_pool(dsn, max_inactive_connection_lifetime=conf.pool_recycle)
async def shutdown_pg(app: web.Application) -> None:
await app['pg'].close()
async def root_handler(request: web.Request) -> web.StreamResponse:
return web.FileResponse('./aore/static/index.html')
def run(port: int, config: AppConfig) -> None:
# create web app and set config
app = web.Application(middlewares=[
error_middleware,
cors_middleware
])
app['config'] = config
app['name'] = 'fias-api'
app.on_startup.append(init_pg)
app.on_cleanup.append(shutdown_pg)
app.on_startup.append(init_fias)
app.on_cleanup.append(shutdown_fias)
app.router.add_view('/normalize/{aoid}', NormalizeAoidView)
app.router.add_view('/expand/{aoid}', ExpandAoidView)
app.router.add_view('/aoid2aoguid/{aoid}', ConvertAoidView)
app.router.add_view('/gettext/{aoid}', GetAoidTextView)
app.router.add_view('/find/{text}', FindAoView)
# --
# ** OAS (OpenAPI Swagger docs) **
# --
oas.setup(app, title_spec="Py-Phias API", url_prefix='/docs', raise_validation_errors=True)
# static
app.router.add_route('*', '/', root_handler)
app.router.add_static('/', './aore/static')
# now run_app using default asyncio loop
web.run_app(app, port=port)
| import asyncpg
from aiohttp import web
from aiohttp_pydantic import oas
from settings import AppConfig
from . import log
from .search import FiasFactory
from .views import NormalizeAoidView, error_middleware, ExpandAoidView, ConvertAoidView, FindAoView, cors_middleware
async def init_fias(app: web.Application) -> None:
app['ff'] = FiasFactory(app)
async def shutdown_fias(app: web.Application) -> None:
pass
async def init_pg(app: web.Application) -> None:
conf: AppConfig.PG = app['config'].pg
dsn = f'postgres://{conf.user}:{conf.password}@{conf.host}:{conf.port}/{conf.name}'
log.info(f'Connecting to pg_main ({conf.host}:{conf.port})')
app['pg'] = await asyncpg.create_pool(dsn, max_inactive_connection_lifetime=conf.pool_recycle)
async def shutdown_pg(app: web.Application) -> None:
await app['pg'].close()
async def root_handler(request: web.Request) -> web.StreamResponse:
return web.FileResponse('./aore/static/index.html')
def run(port: int, config: AppConfig) -> None:
# create web app and set config
app = web.Application(middlewares=[
error_middleware,
cors_middleware
])
app['config'] = config
app['name'] = 'fias-api'
app.on_startup.append(init_pg)
app.on_cleanup.append(shutdown_pg)
app.on_startup.append(init_fias)
app.on_cleanup.append(shutdown_fias)
app.router.add_view('/normalize/{aoid}', NormalizeAoidView)
app.router.add_view('/expand/{aoid}', ExpandAoidView)
app.router.add_view('/aoid2aoguid/{aoid}', ConvertAoidView)
app.router.add_view('/find/{text}', FindAoView)
# --
# ** OAS (OpenAPI Swagger docs) **
# --
oas.setup(app, title_spec="Py-Phias API", url_prefix='/docs', raise_validation_errors=True)
# static
app.router.add_route('*', '/', root_handler)
app.router.add_static('/', './aore/static')
# now run_app using default asyncio loop
web.run_app(app, port=port)
| bsd-3-clause | Python |
44bfdc7f81183c3a4aa79b0075c2ad2288026e14 | update backend interface | bedaes/burp-ui,bedaes/burp-ui,bedaes/burp-ui,bedaes/burp-ui | burpui/misc/backend/interface.py | burpui/misc/backend/interface.py | # -*- coding: utf8 -*-
class BUIbackend:
def __init__(self, app=None, host='127.0.0.1', port=4972):
self.app = app
self.host = host
self.port = port
def status(self, query='\n', agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_backup_logs(self, n, c, forward=False, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_counters(self, name=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def is_backup_running(self, name=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def is_one_backup_running(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_all_clients(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_client(self, name=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_tree(self, name=None, backup=None, root=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def restore_files(self, name=None, backup=None, files=None, strip=None, archive='zip', agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def read_conf_srv(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def read_conf_cli(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def store_conf_srv(self, data, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def store_conf_cli(self, data, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_parser_attr(self, attr=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
class BUIserverException(Exception):
pass
| # -*- coding: utf8 -*-
class BUIbackend:
def __init__(self, app=None, host='127.0.0.1', port=4972):
self.app = app
self.host = host
self.port = port
def status(self, query='\n', agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_backup_logs(self, n, c, forward=False, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_counters(self, name=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def is_backup_running(self, name=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def is_one_backup_running(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_all_clients(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_client(self, name=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_tree(self, name=None, backup=None, root=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def restore_files(self, name=None, backup=None, files=None, strip=None, archive='zip', agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def read_conf(self, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def store_conf(self, data, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
def get_parser_attr(self, attr=None, agent=None):
raise NotImplementedError("Sorry, the current Backend does not implement this method!")
class BUIserverException(Exception):
pass
class BUIserverException(Exception):
pass
| bsd-3-clause | Python |
059d445f40d7c829b2d5888e1c0e700e03552d47 | Fix tag params in user directory | IndiciumSRL/wirecurly | wirecurly/directory/__init__.py | wirecurly/directory/__init__.py | import logging
log = logging.getLogger(__name__)
__all__ = ['User']
class User(object):
"""A user object for the directory"""
def __init__(self, user_id, password=None):
super(User, self).__init__()
self.user_id = user_id
self.variables = []
self.parameters = []
if password:
self.addParameter('password', password)
def addVariable(self, var, val):
'''
Set an extra variable for an user
'''
try:
self.getVariable(var)
except ValueError:
self.variables.append({'name': var, 'value': val})
return
log.warning('Cannot replace existing variable.')
raise ValueError
def addParameter(self, param, val):
'''
Set an extra parameter for an user
'''
try:
self.getParameter(param)
except ValueError:
self.parameters.append({'name': param, 'value': val})
return
log.warning('Cannot replace existing parameter.')
raise ValueError
def getParameter(self, param):
'''
Retrieve the value of a parameter by its name
'''
for p in self.parameters:
if p.get('name') == param:
return p.get('value')
raise ValueError
def getVariable(self, var):
'''
Retrieve the value of a variable by its name
'''
for v in self.variables:
if v.get('name') == var:
return v.get('value')
raise ValueError
def todict(self):
'''
Create a dict so it can be converted/serialized
'''
children = [{'tag': 'params', 'children': [
{'tag': 'param', 'attrs': p} for p in self.parameters
]}]
if self.variables:
children.append({'tag': 'variables', 'children': [
{'tag': 'variable', 'attrs': v} for v in self.variables
]})
return {'tag': 'user', 'children': children, 'attrs': {'id': self.user_id}} | import logging
log = logging.getLogger(__name__)
__all__ = ['User']
class User(object):
"""A user object for the directory"""
def __init__(self, user_id, password=None):
super(User, self).__init__()
self.user_id = user_id
self.variables = []
self.parameters = []
if password:
self.addParameter('password', password)
def addVariable(self, var, val):
'''
Set an extra variable for an user
'''
try:
self.getVariable(var)
except ValueError:
self.variables.append({'name': var, 'value': val})
return
log.warning('Cannot replace existing variable.')
raise ValueError
def addParameter(self, param, val):
'''
Set an extra parameter for an user
'''
try:
self.getParameter(param)
except ValueError:
self.parameters.append({'name': param, 'value': val})
return
log.warning('Cannot replace existing parameter.')
raise ValueError
def getParameter(self, param):
'''
Retrieve the value of a parameter by its name
'''
for p in self.parameters:
if p.get('name') == param:
return p.get('value')
raise ValueError
def getVariable(self, var):
'''
Retrieve the value of a variable by its name
'''
for v in self.variables:
if v.get('name') == var:
return v.get('value')
raise ValueError
def todict(self):
'''
Create a dict so it can be converted/serialized
'''
children = [{'tag': 'parameters', 'children': [
{'tag': 'param', 'attrs': p} for p in self.parameters
]}]
if self.variables:
children.append({'tag': 'variables', 'children': [
{'tag': 'variable', 'attrs': v} for v in self.variables
]})
return {'tag': 'user', 'children': children, 'attrs': {'id': self.user_id}} | mpl-2.0 | Python |
dbe28da1be7415ad5f496b983c0345c5e9714fb9 | Add fake devices for non-existant serial ports. | zlfben/gem5,joerocklin/gem5,sobercoder/gem5,HwisooSo/gemV-update,kaiyuanl/gem5,TUD-OS/gem5-dtu,TUD-OS/gem5-dtu,SanchayanMaity/gem5,KuroeKurose/gem5,samueldotj/TeeRISC-Simulator,austinharris/gem5-riscv,samueldotj/TeeRISC-Simulator,markoshorro/gem5,gedare/gem5,powerjg/gem5-ci-test,rallylee/gem5,markoshorro/gem5,gedare/gem5,rjschof/gem5,zlfben/gem5,yb-kim/gemV,kaiyuanl/gem5,austinharris/gem5-riscv,rjschof/gem5,qizenguf/MLC-STT,aclifton/cpeg853-gem5,briancoutinho0905/2dsampling,cancro7/gem5,cancro7/gem5,zlfben/gem5,joerocklin/gem5,HwisooSo/gemV-update,rallylee/gem5,briancoutinho0905/2dsampling,sobercoder/gem5,KuroeKurose/gem5,briancoutinho0905/2dsampling,sobercoder/gem5,yb-kim/gemV,sobercoder/gem5,qizenguf/MLC-STT,joerocklin/gem5,joerocklin/gem5,TUD-OS/gem5-dtu,samueldotj/TeeRISC-Simulator,powerjg/gem5-ci-test,sobercoder/gem5,briancoutinho0905/2dsampling,gem5/gem5,markoshorro/gem5,austinharris/gem5-riscv,markoshorro/gem5,powerjg/gem5-ci-test,aclifton/cpeg853-gem5,Weil0ng/gem5,gem5/gem5,austinharris/gem5-riscv,rallylee/gem5,HwisooSo/gemV-update,cancro7/gem5,briancoutinho0905/2dsampling,aclifton/cpeg853-gem5,KuroeKurose/gem5,zlfben/gem5,powerjg/gem5-ci-test,rallylee/gem5,rjschof/gem5,aclifton/cpeg853-gem5,rjschof/gem5,Weil0ng/gem5,Weil0ng/gem5,yb-kim/gemV,gedare/gem5,gedare/gem5,aclifton/cpeg853-gem5,SanchayanMaity/gem5,SanchayanMaity/gem5,markoshorro/gem5,gedare/gem5,kaiyuanl/gem5,HwisooSo/gemV-update,cancro7/gem5,rjschof/gem5,KuroeKurose/gem5,kaiyuanl/gem5,KuroeKurose/gem5,HwisooSo/gemV-update,HwisooSo/gemV-update,austinharris/gem5-riscv,TUD-OS/gem5-dtu,markoshorro/gem5,gem5/gem5,joerocklin/gem5,joerocklin/gem5,samueldotj/TeeRISC-Simulator,powerjg/gem5-ci-test,yb-kim/gemV,rallylee/gem5,Weil0ng/gem5,yb-kim/gemV,powerjg/gem5-ci-test,SanchayanMaity/gem5,aclifton/cpeg853-gem5,KuroeKurose/gem5,SanchayanMaity/gem5,HwisooSo/gemV-update,zlfben/gem5,gem5/gem5,briancoutinho0905/2dsampling,qizenguf/MLC-STT,samueldotj/TeeRISC-Simulator,rallylee/gem5,austinharris/gem5-riscv,TUD-OS/gem5-dtu,yb-kim/gemV,zlfben/gem5,rjschof/gem5,qizenguf/MLC-STT,gem5/gem5,gedare/gem5,austinharris/gem5-riscv,cancro7/gem5,qizenguf/MLC-STT,gedare/gem5,gem5/gem5,qizenguf/MLC-STT,joerocklin/gem5,samueldotj/TeeRISC-Simulator,Weil0ng/gem5,yb-kim/gemV,kaiyuanl/gem5,rallylee/gem5,TUD-OS/gem5-dtu,cancro7/gem5,SanchayanMaity/gem5,markoshorro/gem5,kaiyuanl/gem5,zlfben/gem5,powerjg/gem5-ci-test,samueldotj/TeeRISC-Simulator,SanchayanMaity/gem5,KuroeKurose/gem5,briancoutinho0905/2dsampling,Weil0ng/gem5,sobercoder/gem5,cancro7/gem5,gem5/gem5,qizenguf/MLC-STT,TUD-OS/gem5-dtu,yb-kim/gemV,kaiyuanl/gem5,sobercoder/gem5,Weil0ng/gem5,rjschof/gem5,aclifton/cpeg853-gem5,joerocklin/gem5 | src/dev/x86/Pc.py | src/dev/x86/Pc.py | # Copyright (c) 2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from m5.params import *
from m5.proxy import *
from Device import IsaFake
from Pci import PciConfigAll
from Platform import Platform
from SouthBridge import SouthBridge
from Terminal import Terminal
from Uart import Uart8250
def x86IOAddress(port):
IO_address_space_base = 0x8000000000000000
return IO_address_space_base + port;
class Pc(Platform):
type = 'Pc'
system = Param.System(Parent.any, "system")
pciconfig = PciConfigAll()
south_bridge = SouthBridge()
# "Non-existant" port used for timing purposes by the linux kernel
i_dont_exist = IsaFake(pio_addr=x86IOAddress(0x80), pio_size=1)
# Ports behind the pci config and data regsiters. These don't do anything,
# but the linux kernel fiddles with them anway.
behind_pci = IsaFake(pio_addr=x86IOAddress(0xcf8), pio_size=8)
# Serial port and terminal
terminal = Terminal()
com_1 = Uart8250()
com_1.pio_addr = x86IOAddress(0x3f8)
com_1.terminal = terminal
# Devices to catch access to non-existant serial ports.
fake_com_2 = IsaFake(pio_addr=x86IOAddress(0x2f8), pio_size=8)
fake_com_3 = IsaFake(pio_addr=x86IOAddress(0x3e8), pio_size=8)
fake_com_4 = IsaFake(pio_addr=x86IOAddress(0x2e8), pio_size=8)
def attachIO(self, bus):
self.south_bridge.attachIO(bus)
self.i_dont_exist.pio = bus.port
self.behind_pci.pio = bus.port
self.com_1.pio = bus.port
self.fake_com_2.pio = bus.port
self.fake_com_3.pio = bus.port
self.fake_com_4.pio = bus.port
self.pciconfig.pio = bus.default
bus.responder_set = True
bus.responder = self.pciconfig
| # Copyright (c) 2008 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
from m5.params import *
from m5.proxy import *
from Device import IsaFake
from Pci import PciConfigAll
from Platform import Platform
from SouthBridge import SouthBridge
from Terminal import Terminal
from Uart import Uart8250
def x86IOAddress(port):
IO_address_space_base = 0x8000000000000000
return IO_address_space_base + port;
class Pc(Platform):
type = 'Pc'
system = Param.System(Parent.any, "system")
pciconfig = PciConfigAll()
south_bridge = SouthBridge()
# "Non-existant" port used for timing purposes by the linux kernel
i_dont_exist = IsaFake(pio_addr=x86IOAddress(0x80), pio_size=1)
# Ports behind the pci config and data regsiters. These don't do anything,
# but the linux kernel fiddles with them anway.
behind_pci = IsaFake(pio_addr=x86IOAddress(0xcf8), pio_size=8)
# Serial port and terminal
terminal = Terminal()
com_1 = Uart8250()
com_1.pio_addr = x86IOAddress(0x3f8)
com_1.terminal = terminal
def attachIO(self, bus):
self.south_bridge.attachIO(bus)
self.i_dont_exist.pio = bus.port
self.behind_pci.pio = bus.port
self.com_1.pio = bus.port
self.pciconfig.pio = bus.default
bus.responder_set = True
bus.responder = self.pciconfig
| bsd-3-clause | Python |
b474c7368f3a8152296acf9cad7459510b71ada5 | Fix SSHOpener to use the new ClosingSubFS | althonos/fs.sshfs | fs/opener/sshfs.py | fs/opener/sshfs.py | from ._base import Opener
from ._registry import registry
from ..subfs import ClosingSubFS
@registry.install
class SSHOpener(Opener):
protocols = ['ssh']
@staticmethod
def open_fs(fs_url, parse_result, writeable, create, cwd):
from ..sshfs import SSHFS
ssh_host, _, dir_path = parse_result.resource.partition('/')
ssh_host, _, ssh_port = ssh_host.partition(':')
ssh_port = int(ssh_port) if ssh_port.isdigit() else 22
ssh_fs = SSHFS(
ssh_host,
port=ssh_port,
user=parse_result.username,
passwd=parse_result.password,
)
if dir_path:
return ssh_fs.opendir(dir_path, factory=ClosingSubFS)
else:
return ssh_fs
| from ._base import Opener
from ._registry import registry
@registry.install
class SSHOpener(Opener):
protocols = ['ssh']
@staticmethod
def open_fs(fs_url, parse_result, writeable, create, cwd):
from ..sshfs import SSHFS
ssh_host, _, dir_path = parse_result.resource.partition('/')
ssh_host, _, ssh_port = ssh_host.partition(':')
ssh_port = int(ssh_port) if ssh_port.isdigit() else 22
ssh_fs = SSHFS(
ssh_host,
port=ssh_port,
user=parse_result.username,
passwd=parse_result.password,
)
return ssh_fs.opendir(dir_path) if dir_path else ssh_fs
| lgpl-2.1 | Python |
f4c9b193bda5836888e8b90c907bcf5d695401b0 | update git push | munisisazade/developer_portal,munisisazade/developer_portal,munisisazade/developer_portal | api/urls.py | api/urls.py | from django.conf.urls import url, include
from api.views import UserList,BrandDetail,NewsList,ArticleDetail
urlpatterns = [
url(r'^user/(?P<pk>[0-9]+)/$', BrandDetail.as_view(), name='user-detail'),
url(r'^user-list/$', UserList.as_view(), name='user-list'),
url(r'^news-list/$', NewsList.as_view(), name='news-list'),
url(r'^article/(?P<pk>[0-9]+)$', ArticleDetail.as_view(), name='article-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
] | from django.conf.urls import url, include
from rest_framework_jwt.views import obtain_jwt_token
from api.views import UserList,BrandDetail,NewsList,ArticleDetail
urlpatterns = [
url(r'^user/(?P<pk>[0-9]+)/$', BrandDetail.as_view(), name='user-detail'),
url(r'^user-list/$', UserList.as_view(), name='user-list'),
url(r'^news-list/$', NewsList.as_view(), name='news-list'),
url(r'^article/(?P<pk>[0-9]+)$', ArticleDetail.as_view(), name='article-detail'),
url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')),
url(r'^api-token-auth/', obtain_jwt_token),
] | mit | Python |
ad331ef64849ec568fc406183c06a1c9c6a34d44 | Change constants | anishathalye/evolution-chamber,techx/hackmit-evolution-chamber,anishathalye/evolution-chamber,techx/hackmit-evolution-chamber,anishathalye/evolution-chamber,techx/hackmit-evolution-chamber,techx/hackmit-evolution-chamber,anishathalye/evolution-chamber | constants.py | constants.py | class Constants:
APP_NAME = "EVOLUTION_CHAMBER"
COMPARISONS_PER_GENERATION = 100
POPULATION_SIZE = 20
KILL_SIZE = 10
| class Constants:
APP_NAME = "EVOLUTION_CHAMBER"
COMPARISONS_PER_GENERATION = 50
POPULATION_SIZE = 15
KILL_SIZE = 8
| mit | Python |
647e831864ce932278d8c705626f07a96fc56491 | Fix publisher method is published, set tzinfo none | williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,williamroot/opps,opps/opps,williamroot/opps,YACOWS/opps,jeanmask/opps,YACOWS/opps,jeanmask/opps,opps/opps,opps/opps,opps/opps,jeanmask/opps | opps/core/models/publisher.py | opps/core/models/publisher.py | #!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
date_update = models.DateTimeField(_(u"Date update"), auto_now=True)
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
kero = models.Manager()
class Meta:
abstract = True
def is_published(self):
return self.published and \
self.date_available.replace(tzinfo=None) <= datetime.now()
def save(self, *args, **kwargs):
self.date_update = datetime.now()
super(Publisher, self).save(*args, **kwargs)
| #!/usr/bin/env python
from django.db import models
from django.utils.translation import ugettext_lazy as _
from datetime import datetime
class PublisherMnager(models.Manager):
def all_published(self):
return super(PublisherMnager, self).get_query_set().filter(
date_available__lte=datetime.now(), published=True)
class Publisher(models.Model):
date_insert = models.DateTimeField(_(u"Date insert"), auto_now_add=True)
date_update = models.DateTimeField(_(u"Date update"), auto_now=True)
date_available = models.DateTimeField(_(u"Date available"),
default=datetime.now, null=True)
published = models.BooleanField(_(u"Published"), default=False)
objects = PublisherMnager()
kero = models.Manager()
class Meta:
abstract = True
def is_published(self):
return self.published and self.date_available <= datetime.now()
def save(self, *args, **kwargs):
self.date_update = datetime.now()
super(Publisher, self).save(*args, **kwargs)
| mit | Python |
80f3d630673ccd126b3e7cc64970ce3f7feac4d0 | Add flask command to change user password | openmaraude/APITaxi,openmaraude/APITaxi | APITaxi2/commands/users.py | APITaxi2/commands/users.py | import uuid
import click
from flask import Blueprint, current_app
from flask.cli import with_appcontext
from flask_security.utils import hash_password
from APITaxi_models2 import db, Role, User
blueprint = Blueprint('commands_users', __name__, cli_group=None)
@with_appcontext
def valid_role(value):
roles = [role.name for role in Role.query.with_entities(Role.name).all()]
if value not in roles:
raise click.BadParameter('Valid values are: %s' % ', '.join(roles))
return value
@blueprint.cli.command('create_user')
@click.argument('email')
@click.option('--password', prompt=True, hide_input=True, confirmation_prompt=True)
@click.argument('roles', nargs=-1, required=True, type=valid_role)
def create_user(email, password, roles):
user = User.query.filter_by(email=email).one_or_none()
if user:
current_app.logger.error('User already exists')
return
hashed_password = hash_password(password)
user = User(
email=email,
password=hashed_password,
commercial_name=email,
apikey=str(uuid.uuid4()),
active=True
)
db.session.add(user)
db.session.flush()
# Create a set, which removes duplicates.
roles = set(roles)
# Administrators should have the explicit permissions moteur and operateur.
if 'admin' in roles:
roles = roles.union({'moteur', 'operateur'})
for rolename in roles:
role = Role.query.filter_by(name=rolename).one()
user.roles.append(role)
db.session.commit()
@blueprint.cli.command('update_password', help='Update user password')
@click.argument('email')
@click.option('--password', prompt=True, hide_input=True, confirmation_prompt=True)
def update_password(email, password):
user = User.query.filter_by(email=email).one_or_none()
if not user:
current_app.logger.error('User does not exist')
return
user.password = hash_password(password)
db.session.commit()
| import uuid
import click
from flask import Blueprint, current_app
from flask.cli import with_appcontext
from flask_security.utils import hash_password
from APITaxi_models2 import db, Role, User
blueprint = Blueprint('commands_users', __name__, cli_group=None)
@with_appcontext
def valid_role(value):
roles = [role.name for role in Role.query.with_entities(Role.name).all()]
if value not in roles:
raise click.BadParameter('Valid values are: %s' % ', '.join(roles))
return value
@blueprint.cli.command('create_user')
@click.argument('email')
@click.option('--password', prompt=True, hide_input=True, confirmation_prompt=True)
@click.argument('roles', nargs=-1, required=True, type=valid_role)
def create_user(email, password, roles):
user = User.query.filter_by(email=email).one_or_none()
if user:
current_app.logger.error('User already exists')
return
hashed_password = hash_password(password)
user = User(
email=email,
password=hashed_password,
commercial_name=email,
apikey=str(uuid.uuid4()),
active=True
)
db.session.add(user)
db.session.flush()
# Create a set, which removes duplicates.
roles = set(roles)
# Administrators should have the explicit permissions moteur and operateur.
if 'admin' in roles:
roles = roles.union({'moteur', 'operateur'})
for rolename in roles:
role = Role.query.filter_by(name=rolename).one()
user.roles.append(role)
db.session.commit()
| agpl-3.0 | Python |
0ed979ea3e25661c4fab254c758a3c2db1f38d8a | Fix PytzUsageWarning for Python versions >= 3.6 (#1062) | scrapinghub/dateparser | dateparser/date_parser.py | dateparser/date_parser.py | import sys
from tzlocal import get_localzone
from .timezone_parser import pop_tz_offset_from_string
from .utils import strip_braces, apply_timezone, localize_timezone
from .conf import apply_settings
class DateParser:
@apply_settings
def parse(self, date_string, parse_method, settings=None):
date_string = str(date_string)
if not date_string.strip():
raise ValueError("Empty string")
date_string = strip_braces(date_string)
date_string, ptz = pop_tz_offset_from_string(date_string)
date_obj, period = parse_method(date_string, settings=settings)
_settings_tz = settings.TIMEZONE.lower()
if ptz:
if hasattr(ptz, 'localize'):
date_obj = ptz.localize(date_obj)
else:
date_obj = date_obj.replace(tzinfo=ptz)
if 'local' not in _settings_tz:
date_obj = apply_timezone(date_obj, settings.TIMEZONE)
else:
if 'local' in _settings_tz:
stz = get_localzone()
if hasattr(stz, 'localize') and sys.version_info < (3, 6):
date_obj = stz.localize(date_obj)
else:
date_obj = date_obj.replace(tzinfo=stz)
else:
date_obj = localize_timezone(date_obj, settings.TIMEZONE)
if settings.TO_TIMEZONE:
date_obj = apply_timezone(date_obj, settings.TO_TIMEZONE)
if (
not settings.RETURN_AS_TIMEZONE_AWARE
or (settings.RETURN_AS_TIMEZONE_AWARE
and 'default' == settings.RETURN_AS_TIMEZONE_AWARE and not ptz)
):
date_obj = date_obj.replace(tzinfo=None)
return date_obj, period
date_parser = DateParser()
| from tzlocal import get_localzone
from .timezone_parser import pop_tz_offset_from_string
from .utils import strip_braces, apply_timezone, localize_timezone
from .conf import apply_settings
class DateParser:
@apply_settings
def parse(self, date_string, parse_method, settings=None):
date_string = str(date_string)
if not date_string.strip():
raise ValueError("Empty string")
date_string = strip_braces(date_string)
date_string, ptz = pop_tz_offset_from_string(date_string)
date_obj, period = parse_method(date_string, settings=settings)
_settings_tz = settings.TIMEZONE.lower()
if ptz:
if hasattr(ptz, 'localize'):
date_obj = ptz.localize(date_obj)
else:
date_obj = date_obj.replace(tzinfo=ptz)
if 'local' not in _settings_tz:
date_obj = apply_timezone(date_obj, settings.TIMEZONE)
else:
if 'local' in _settings_tz:
stz = get_localzone()
if hasattr(stz, 'localize'):
date_obj = stz.localize(date_obj)
else:
date_obj = date_obj.replace(tzinfo=stz)
else:
date_obj = localize_timezone(date_obj, settings.TIMEZONE)
if settings.TO_TIMEZONE:
date_obj = apply_timezone(date_obj, settings.TO_TIMEZONE)
if (
not settings.RETURN_AS_TIMEZONE_AWARE
or (settings.RETURN_AS_TIMEZONE_AWARE
and 'default' == settings.RETURN_AS_TIMEZONE_AWARE and not ptz)
):
date_obj = date_obj.replace(tzinfo=None)
return date_obj, period
date_parser = DateParser()
| bsd-3-clause | Python |
0c75a5922b506ea12c9bdb69178df91f967eb756 | Update CLI example mgp25/Instagram-API@d7eda0d105b2fb6ff15e757323fa146628361275 | danleyb2/Instagram-API | examples/checkpoint.py | examples/checkpoint.py | from InstagramAPI import Checkpoint
debug = True
print("####################")
print("# #")
print("# CHECKPOINT #")
print("# #")
print("####################")
username = raw_input("\n\nYour username: ").strip()
if username == '':
print("\n\nYou have to set your username\n")
exit()
settingsPath = raw_input("\n\nYour settings path folder ([ENTER] if dedault): ").strip()
print settingsPath
if settingsPath == '':
settingsPath = None
c = Checkpoint(username, settingsPath, debug)
token = c.doCheckpoint()
code = raw_input("\n\nCode you have received via mail: ").strip()
c.checkpointThird(code, token)
print("\n\nDone")
| from InstagramAPI import Checkpoint
username = '' # // Your username
settingsPath = None
debug = False
c = Checkpoint(username, settingsPath, debug)
print("####################")
print("# #")
print("# CHECKPOINT #")
print("# #")
print("####################")
if username == '':
print("\n\nYou have to set your username\n")
exit()
token = c.doCheckpoint()
code = raw_input("\n\nCode you have received via mail: ").strip()
c.checkpointThird(code, token)
print("\n\nDone")
| mit | Python |
9245dc994735e7060414eaa4b3df1b6f2447414f | Add some docstrings | louisswarren/hieretikz | hierarchy.py | hierarchy.py | '''Reason about a directed graph in which the (non-)existance of some edges
must be inferred by the disconnectedness of certain vertices'''
def transitive_closure_set(vertices, edges):
'''Find the transitive closure of a set of vertices.'''
neighbours = {b for a, b in edges if a in vertices}
if neighbours.issubset(vertices):
return vertices
return transitive_closure_set(vertices | neighbours, edges)
def downward_closure(vertex, edges):
'''Find the downward closure of a vertex.'''
return transitive_closure_set({vertex}, edges)
def upward_closure(vertex, edges):
'''Find the upward closure of a vertex.'''
return transitive_closure_set({vertex}, {(b, a) for a, b in edges})
def is_connected(a, b, edges):
'''Check if there is a path from a to b.'''
return b in downward_closure(a, edges)
def is_separated(a, b, edges, disconnections, a_checked=None, b_checked=None):
'''Checks that a and b will remain not connected even if edges are added to
the graph, as long as the vertex pairs listed in disconnections remain
disconected.'''
if (a, b) in disconnections:
return True
a_checked = a_checked or set()
b_checked = b_checked or set()
above_a = upward_closure(a, edges) - a_checked
below_b = downward_closure(b, edges) - b_checked
new_a_checked = a_checked | {a}
new_b_checked = b_checked | {b}
for p in above_a:
for q in below_b:
if is_separated(p, q, edges, disconnections, new_a_checked, new_b_checked):
return True
return False
def find_possible_connections(vertices, edges, disconnections):
'''Find which edges can be added to create new connections, without
connecting any pairs in disconnections.'''
return {(a, b) for a in vertices for b in vertices if
not is_connected(a, b, edges) and
not is_separated(a, b, edges, disconnections)}
def is_isthmus(edge, edges):
a, _ = edge
return not downward_closure(a, edges - {edge}) == downward_closure(a, edges)
def spanning_tree(edges):
for edge in edges:
if not is_isthmus(edge, edges):
return spanning_tree(edges - {edge})
return edges
def rank_possible_edge(edge, vertices, edges, disconnections):
truev = len(find_possible_connections(vertices, edges | {edge}, disconnections))
falsev = len(find_possible_connections(vertices, edges, disconnections | {edge}))
return truev * falsev
def most_valuable_edge(vertices, edges, disconnections):
ranker = lambda e: rank_possible_edge(e, vertices, edges, disconnections)
return max(find_possible_connections(vertices, edges, disconnections), key=ranker)
| """Reason about a directed graph in which the (non-)existance of some edges
must be inferred by the disconnectedness of certain vertices"""
def transitive_closure_set(vertices, edges):
neighbours = {b for a, b in edges if a in vertices}
if neighbours.issubset(vertices):
return vertices
return transitive_closure_set(vertices | neighbours, edges)
def downward_closure(vertex, edges):
return transitive_closure_set({vertex}, edges)
def upward_closure(vertex, edges):
return transitive_closure_set({vertex}, {(b, a) for a, b in edges})
def is_connected(a, b, edges):
return b in downward_closure(a, edges)
def is_separated(a, b, edges, disconnections, a_checked=None, b_checked=None):
"""Checks that a and b will remain not connected, even if edges are added
to the graph, as long as the vertex pairs listed in disconnections remain
disconected."""
if (a, b) in disconnections:
return True
a_checked = a_checked or set()
b_checked = b_checked or set()
above_a = upward_closure(a, edges) - a_checked
below_b = downward_closure(b, edges) - b_checked
new_a_checked = a_checked | {a}
new_b_checked = b_checked | {b}
for p in above_a:
for q in below_b:
if is_separated(p, q, edges, disconnections, new_a_checked, new_b_checked):
return True
return False
def find_possible_connections(vertices, edges, disconnections):
"""Find which edges can be added to create new connections, without
connecting any pairs in disconnections."""
return {(a, b) for a in vertices for b in vertices if
not is_connected(a, b, edges) and
not is_separated(a, b, edges, disconnections)}
def is_isthmus(edge, edges):
a, _ = edge
return not downward_closure(a, edges - {edge}) == downward_closure(a, edges)
def spanning_tree(edges):
for edge in edges:
if not is_isthmus(edge, edges):
return spanning_tree(edges - {edge})
return edges
def rank_possible_edge(edge, vertices, edges, disconnections):
truev = len(find_possible_connections(vertices, edges | {edge}, disconnections))
falsev = len(find_possible_connections(vertices, edges, disconnections | {edge}))
return truev * falsev
def most_valuable_edge(vertices, edges, disconnections):
ranker = lambda e: rank_possible_edge(e, vertices, edges, disconnections)
return max(find_possible_connections(vertices, edges, disconnections), key=ranker)
| mit | Python |
c97e2a9f0bac1c0d15d5e4aea1a420df79dab689 | Add credit API support added | chargebee/chargebee-python | chargebee/models/subscription.py | chargebee/models/subscription.py | import json
from chargebee.model import Model
from chargebee import request
from chargebee import APIError
class Subscription(Model):
class Addon(Model):
fields = ["id", "quantity"]
pass
class Coupon(Model):
fields = ["coupon_id", "apply_till", "applied_count"]
pass
fields = ["id", "plan_id", "plan_quantity", "status", "trial_start", "trial_end", "current_term_start", \
"current_term_end", "remaining_billing_cycles", "created_at", "activated_at", "cancelled_at", \
"cancel_reason", "due_invoices_count", "due_since", "total_dues", "addons", "coupon", "coupons" ]
@staticmethod
def create(params, env=None):
return request.send('post', '/subscriptions', params, env)
@staticmethod
def create_for_customer(id, params, env=None):
return request.send('post', '/customers/%s/subscriptions' % id, params, env)
@staticmethod
def list(params=None, env=None):
return request.send('get', '/subscriptions', params, env)
@staticmethod
def retrieve(id, env=None):
return request.send('get', '/subscriptions/%s' % id, None, env)
@staticmethod
def update(id, params=None, env=None):
return request.send('post', '/subscriptions/%s' % id, params, env)
@staticmethod
def change_term_end(id, params, env=None):
return request.send('post', '/subscriptions/%s/change_term_end' % id, params, env)
@staticmethod
def cancel(id, params=None, env=None):
return request.send('post', '/subscriptions/%s/cancel' % id, params, env)
@staticmethod
def reactivate(id, params=None, env=None):
return request.send('post', '/subscriptions/%s/reactivate' % id, params, env)
@staticmethod
def add_credit(id, params, env=None):
return request.send('post', '/subscriptions/%s/add_credit' % id, params, env)
| import json
from chargebee.model import Model
from chargebee import request
from chargebee import APIError
class Subscription(Model):
class Addon(Model):
fields = ["id", "quantity"]
pass
class Coupon(Model):
fields = ["coupon_id", "apply_till", "applied_count"]
pass
fields = ["id", "plan_id", "plan_quantity", "status", "trial_start", "trial_end", "current_term_start", \
"current_term_end", "remaining_billing_cycles", "created_at", "activated_at", "cancelled_at", \
"cancel_reason", "due_invoices_count", "due_since", "total_dues", "addons", "coupon", "coupons" ]
@staticmethod
def create(params, env=None):
return request.send('post', '/subscriptions', params, env)
@staticmethod
def create_for_customer(id, params, env=None):
return request.send('post', '/customers/%s/subscriptions' % id, params, env)
@staticmethod
def list(params=None, env=None):
return request.send('get', '/subscriptions', params, env)
@staticmethod
def retrieve(id, env=None):
return request.send('get', '/subscriptions/%s' % id, None, env)
@staticmethod
def update(id, params=None, env=None):
return request.send('post', '/subscriptions/%s' % id, params, env)
@staticmethod
def change_term_end(id, params, env=None):
return request.send('post', '/subscriptions/%s/change_term_end' % id, params, env)
@staticmethod
def cancel(id, params=None, env=None):
return request.send('post', '/subscriptions/%s/cancel' % id, params, env)
@staticmethod
def reactivate(id, params=None, env=None):
return request.send('post', '/subscriptions/%s/reactivate' % id, params, env)
| mit | Python |
9c008e02214cb3f126c6e7fd02b7ffcdebf916eb | Fix test imports | choderalab/yank,choderalab/yank | Yank/tests/test_pipeline.py | Yank/tests/test_pipeline.py | #!/usr/bin/env python
# =============================================================================
# MODULE DOCSTRING
# =============================================================================
"""
Test pipeline functions in pipeline.py.
"""
# =============================================================================
# GLOBAL IMPORTS
# =============================================================================
import os
from simtk import openmm
from yank import utils
from yank.pipeline import find_components
# =============================================================================
# TESTS
# =============================================================================
def test_method_find_components():
"""Test find_components() function."""
data_dir = utils.get_data_filename(os.path.join('tests', 'data'))
ben_tol_dir = os.path.join(data_dir, 'benzene-toluene-explicit')
ben_tol_complex_path = os.path.join(ben_tol_dir, 'complex.prmtop')
ben_tol_prmtop = openmm.app.AmberPrmtopFile(ben_tol_complex_path)
topology = ben_tol_prmtop.topology
system = ben_tol_prmtop.createSystem(nonbondedMethod=openmm.app.PME)
n_atoms = system.getNumParticles()
# Standard selection.
atom_indices = find_components(system, topology, ligand_dsl='resname BEN')
assert len(atom_indices['ligand']) == 12
assert len(atom_indices['receptor']) == 15
assert len(atom_indices['solvent']) == n_atoms - 12 - 15
# Select toluene as solvent to test solvent_dsl.
atom_indices = find_components(system, topology, ligand_dsl='resname BEN',
solvent_dsl='resname TOL')
assert len(atom_indices['ligand']) == 12
assert len(atom_indices['solvent']) == 15
assert len(atom_indices['receptor']) == n_atoms - 12 - 15
| #!/usr/bin/env python
# =============================================================================
# MODULE DOCSTRING
# =============================================================================
"""
Test pipeline functions in pipeline.py.
"""
# =============================================================================
# GLOBAL IMPORTS
# =============================================================================
from yank.pipeline import find_components
# =============================================================================
# TESTS
# =============================================================================
def test_method_find_components():
"""Test find_components() function."""
data_dir = utils.get_data_filename(os.path.join('tests', 'data'))
ben_tol_dir = os.path.join(data_dir, 'benzene-toluene-explicit')
ben_tol_complex_path = os.path.join(ben_tol_dir, 'complex.prmtop')
ben_tol_prmtop = openmm.app.AmberPrmtopFile(ben_tol_complex_path)
topology = ben_tol_prmtop.topology
system = ben_tol_prmtop.createSystem(nonbondedMethod=openmm.app.PME)
n_atoms = system.getNumParticles()
# Standard selection.
atom_indices = find_components(system, topology, ligand_dsl='resname BEN')
assert len(atom_indices['ligand']) == 12
assert len(atom_indices['receptor']) == 15
assert len(atom_indices['solvent']) == n_atoms - 12 - 15
# Select toluene as solvent to test solvent_dsl.
atom_indices = find_components(system, topology, ligand_dsl='resname BEN',
solvent_dsl='resname TOL')
assert len(atom_indices['ligand']) == 12
assert len(atom_indices['solvent']) == 15
assert len(atom_indices['receptor']) == n_atoms - 12 - 15
| mit | Python |
311833200540fe1f52345a9e229755e7b523341b | Use str(blah) instead of blah.__str__() as its more idiomatic | florianm/datacats,datacats/datacats,datawagovau/datacats,dborzov/datacats,datacats/datacats,JackMc/datacats,florianm/datacats,poguez/datacats,wardi/datacats,JackMc/datacats,poguez/datacats,wardi/datacats,JJediny/datacats,reneenoble/datacats,reneenoble/datacats,dborzov/datacats,JJediny/datacats,deniszgonjanin/datacats,datawagovau/datacats,deniszgonjanin/datacats | datacats/error.py | datacats/error.py | from clint.textui import colored
class DatacatsError(Exception):
def __init__(self, message, format_args=(), parent_exception=None):
self.message = message
if parent_exception and hasattr(parent_exception, 'user_description'):
vals = {
"original": self.message,
"type_description": parent_exception.user_description,
"message": str(parent_exception),
}
self.message = "".join([str(colored.blue("{original}\n\n")),
"~" * 30,
"\n{type_description}:\n",
str(colored.yellow("{message}\n"))]
).format(**vals)
self.format_args = format_args
super(DatacatsError, self).__init__(message, format_args)
def __str__(self):
return self.message.format(*self.format_args)
def pretty_print(self):
"""
Print the error message to stdout with colors and borders
"""
print colored.blue("-" * 40)
print colored.red("datacats: problem was encountered:")
print self.message.format(*self.format_args)
print colored.blue("-" * 40)
class WebCommandError(Exception):
user_description = "Docker container \"/web\" command failed"
def __init__(self, command, logs):
super(WebCommandError, self).__init__()
self.command = command
self.logs = logs
def __str__(self):
return (' Command: {0}\n'
' Docker Error Log:\n'
' {1}\n'
).format(" ".join(self.command), self.logs)
class PortAllocatedError(Exception):
user_description = "Unable to allocate port"
class UndocumentedError(Exception):
user_description = "Please quote this traceback when reporting this error"
| from clint.textui import colored
class DatacatsError(Exception):
def __init__(self, message, format_args=(), parent_exception=None):
self.message = message
if parent_exception and hasattr(parent_exception, 'user_description'):
vals = {
"original": self.message,
"type_description": parent_exception.user_description,
"message": parent_exception.__str__(),
}
self.message = "".join([colored.blue("{original}\n\n").__str__(),
"~" * 30,
"\n{type_description}:\n",
colored.yellow("{message}\n").__str__()]
).format(**vals)
self.format_args = format_args
super(DatacatsError, self).__init__(message, format_args)
def __str__(self):
return self.message.format(*self.format_args)
def pretty_print(self):
"""
Print the error message to stdout with colors and borders
"""
print colored.blue("-" * 40)
print colored.red("datacats: problem was encountered:")
print self.message.format(*self.format_args)
print colored.blue("-" * 40)
class WebCommandError(Exception):
user_description = "Docker container \"/web\" command failed"
def __init__(self, command, logs):
super(WebCommandError, self).__init__()
self.command = command
self.logs = logs
def __str__(self):
return (' Command: {0}\n'
' Docker Error Log:\n'
' {1}\n'
).format(" ".join(self.command), self.logs)
class PortAllocatedError(Exception):
user_description = "Unable to allocate port"
class UndocumentedError(Exception):
user_description = "Please quote this traceback when reporting this error"
| agpl-3.0 | Python |
3c9b61c4ef302cf3463f8d82b7976be7e3400147 | Add a note to compat.py. | Julian/jsonschema,Julian/jsonschema,python-jsonschema/jsonschema | jsonschema/compat.py | jsonschema/compat.py | """
Python 2/3 compatibility helpers.
Note: This module is *not* public API.
"""
import contextlib
import operator
import sys
try:
from collections.abc import MutableMapping, Sequence # noqa
except ImportError:
from collections import MutableMapping, Sequence # noqa
PY3 = sys.version_info[0] >= 3
if PY3:
zip = zip
from functools import lru_cache
from io import StringIO as NativeIO
from urllib.parse import (
unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit
)
from urllib.request import urlopen
str_types = str,
int_types = int,
iteritems = operator.methodcaller("items")
else:
from itertools import izip as zip # noqa
from io import BytesIO as NativeIO
from urlparse import (
urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa
)
from urllib import unquote # noqa
import urllib2 # noqa
def urlopen(*args, **kwargs):
return contextlib.closing(urllib2.urlopen(*args, **kwargs))
str_types = basestring
int_types = int, long
iteritems = operator.methodcaller("iteritems")
from functools32 import lru_cache
# On python < 3.3 fragments are not handled properly with unknown schemes
def urlsplit(url):
scheme, netloc, path, query, fragment = _urlsplit(url)
if "#" in path:
path, fragment = path.split("#", 1)
return SplitResult(scheme, netloc, path, query, fragment)
def urldefrag(url):
if "#" in url:
s, n, p, q, frag = urlsplit(url)
defrag = urlunsplit((s, n, p, q, ''))
else:
defrag = url
frag = ''
return defrag, frag
# flake8: noqa
| import contextlib
import operator
import sys
try:
from collections.abc import MutableMapping, Sequence # noqa
except ImportError:
from collections import MutableMapping, Sequence # noqa
PY3 = sys.version_info[0] >= 3
if PY3:
zip = zip
from functools import lru_cache
from io import StringIO as NativeIO
from urllib.parse import (
unquote, urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit
)
from urllib.request import urlopen
str_types = str,
int_types = int,
iteritems = operator.methodcaller("items")
else:
from itertools import izip as zip # noqa
from io import BytesIO as NativeIO
from urlparse import (
urljoin, urlunsplit, SplitResult, urlsplit as _urlsplit # noqa
)
from urllib import unquote # noqa
import urllib2 # noqa
def urlopen(*args, **kwargs):
return contextlib.closing(urllib2.urlopen(*args, **kwargs))
str_types = basestring
int_types = int, long
iteritems = operator.methodcaller("iteritems")
from functools32 import lru_cache
# On python < 3.3 fragments are not handled properly with unknown schemes
def urlsplit(url):
scheme, netloc, path, query, fragment = _urlsplit(url)
if "#" in path:
path, fragment = path.split("#", 1)
return SplitResult(scheme, netloc, path, query, fragment)
def urldefrag(url):
if "#" in url:
s, n, p, q, frag = urlsplit(url)
defrag = urlunsplit((s, n, p, q, ''))
else:
defrag = url
frag = ''
return defrag, frag
# flake8: noqa
| mit | Python |
834c451641dfbaf8b275fbb63351b10d34796cfd | Update config | Osirium/vcdriver,Lantero/vcenter-driver,Lantero/vcdriver | vcdriver/config.py | vcdriver/config.py | import os
# Session config
HOST = os.getenv('VCDRIVER_HOST')
PORT = os.getenv('VCDRIVER_PORT')
USERNAME = os.getenv('VCDRIVER_USERNAME')
PASSWORD = os.getenv('VCDRIVER_PASSWORD')
# Virtual machine config
RESOURCE_POOL = os.getenv('VCDRIVER_RESOURCE_POOL')
DATA_STORE = os.getenv('VCDRIVER_DATA_STORE')
FOLDER = os.getenv('VCDRIVER_FOLDER')
VM_SSH_USERNAME = os.getenv('VCDRIVER_VM_SSH_USERNAME')
VM_SSH_PASSWORD = os.getenv('VCDRIVER_VM_SSH_PASSWORD')
VM_WINRM_USERNAME = os.getenv('VCDRIVER_VM_WINRM_USERNAME')
VM_WINRM_PASSWORD = os.getenv('VCDRIVER_VM_WINRM_PASSWORD')
| from getpass import getpass
import os
# Session config
HOST = os.getenv('VCDRIVER_HOST')
PORT = os.getenv('VCDRIVER_PORT')
USERNAME = os.getenv('VCDRIVER_USERNAME')
PASSWORD = os.getenv('VCDRIVER_PASSWORD') or getpass('Vcenter password: ')
# Virtual machine config
RESOURCE_POOL = os.getenv('VCDRIVER_RESOURCE_POOL')
DATA_STORE = os.getenv('VCDRIVER_DATA_STORE')
FOLDER = os.getenv('VCDRIVER_FOLDER')
VM_SSH_USERNAME = os.getenv('VCDRIVER_VM_SSH_USERNAME')
VM_SSH_PASSWORD = os.getenv('VCDRIVER_VM_SSH_PASSWORD')
VM_WINRM_USERNAME = os.getenv('VCDRIVER_VM_WINRM_USERNAME')
VM_WINRM_PASSWORD = os.getenv('VCDRIVER_VM_WINRM_PASSWORD')
| mit | Python |
b7ff4eae91b010e29f03a68e842bb342fd65480c | simplify formular | voc/voctomix,voc/voctomix,h01ger/voctomix,h01ger/voctomix | voctogui/lib/audioleveldisplay.py | voctogui/lib/audioleveldisplay.py | import logging, math
from gi.repository import Gst, Gtk
class AudioLevelDisplay(object):
""" Displays a Level-Meter of another VideoDisplay into a GtkWidget """
def __init__(self, drawing_area):
self.log = logging.getLogger('AudioLevelDisplay[%s]' % drawing_area.get_name())
self.drawing_area = drawing_area
self.levelrms = []
self.levelpeak = []
self.leveldecay = []
self.drawing_area.connect('draw', self.on_draw)
def on_draw(self, widget, cr):
channels = len(self.levelrms)
if channels == 0:
return
width = self.drawing_area.get_allocated_width()
height = self.drawing_area.get_allocated_height()
margin = 2 # px
channel_width = int((width - (margin * (channels - 1))) / channels)
# self.log.debug(
# 'width: %upx filled with %u channels of each %upx '
# 'and %ux margin of %upx',
# width, channels, channel_width, channels-1, margin)
rms_px = [ self.normalize_db(db) * height for db in self.levelrms ]
peak_px = [ self.normalize_db(db) * height for db in self.levelpeak ]
decay_px = [ self.normalize_db(db) * height for db in self.leveldecay ]
cr.set_line_width(channel_width)
for y in range(0, height):
pct = y / height
for channel in range(0, channels):
x = (channel * channel_width) + (channel * margin)
bright = 0.25
if y < rms_px[channel]:
bright = 1
# elif abs(y - peak_px[channel]) < 3:
# bright = 1.5
elif y < decay_px[channel]:
bright = 0.75
cr.set_source_rgb(pct * bright, (1-pct) * bright, 0 * bright)
cr.move_to(x, height-y)
cr.line_to(x + channel_width, height-y)
cr.stroke()
return True
def normalize_db(self, db):
# -60db -> 1.00 (very quiet)
# -30db -> 0.75
# -15db -> 0.50
# -5db -> 0.25
# -0db -> 0.00 (very loud)
logscale = 1 - math.log10(-0.15 * db + 1)
normalized = self.clamp(logscale, 0, 1)
return normalized
def clamp(self, value, min_value, max_value):
return max(min(value, max_value), min_value)
def level_callback(self, rms, peak, decay):
self.levelrms = rms
self.levelpeak = peak
self.leveldecay = decay
self.drawing_area.queue_draw()
| import logging, math
from gi.repository import Gst, Gtk
class AudioLevelDisplay(object):
""" Displays a Level-Meter of another VideoDisplay into a GtkWidget """
def __init__(self, drawing_area):
self.log = logging.getLogger('AudioLevelDisplay[%s]' % drawing_area.get_name())
self.drawing_area = drawing_area
self.levelrms = []
self.levelpeak = []
self.leveldecay = []
self.drawing_area.connect('draw', self.on_draw)
def on_draw(self, widget, cr):
channels = len(self.levelrms)
if channels == 0:
return
width = self.drawing_area.get_allocated_width()
height = self.drawing_area.get_allocated_height()
margin = 2 # px
channel_width = int((width - (margin * (channels - 1))) / channels)
# self.log.debug(
# 'width: %upx filled with %u channels of each %upx '
# 'and %ux margin of %upx',
# width, channels, channel_width, channels-1, margin)
rms_px = [ self.normalize_db(db) * height for db in self.levelrms ]
peak_px = [ self.normalize_db(db) * height for db in self.levelpeak ]
decay_px = [ self.normalize_db(db) * height for db in self.leveldecay ]
cr.set_line_width(channel_width)
for y in range(0, height):
pct = y / height
for channel in range(0, channels):
x = (channel * channel_width) + (channel * margin)
bright = 0.25
if y < rms_px[channel]:
bright = 1
# elif abs(y - peak_px[channel]) < 3:
# bright = 1.5
elif y < decay_px[channel]:
bright = 0.75
cr.set_source_rgb(pct * bright, (1-pct) * bright, 0 * bright)
cr.move_to(x, height-y)
cr.line_to(x + channel_width, height-y)
cr.stroke()
return True
def normalize_db(self, db):
# -60db -> 1.00 (very quiet)
# -30db -> 0.75
# -15db -> 0.50
# -5db -> 0.25
# -0db -> 0.00 (very loud)
logscale = math.log10(-0.15* (db) +1)
normalized = 1 - self.clamp(logscale, 0, 1)
return normalized
def clamp(self, value, min_value, max_value):
return max(min(value, max_value), min_value)
def level_callback(self, rms, peak, decay):
self.levelrms = rms
self.levelpeak = peak
self.leveldecay = decay
self.drawing_area.queue_draw()
| mit | Python |
1941b0bbdc356245eff4d66cc44d18a2396a5493 | fix test | syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin,syscoin/syscoin | test/functional/feature_asset.py | test/functional/feature_asset.py | #!/usr/bin/env python3
# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import assert_equal
class AssetTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.rpc_timeout = 240
self.extra_args = [['-assetindex=1'],['-assetindex=1']]
def run_test(self):
self.nodes[0].generate(200)
asset = self.nodes[0].assetnew('1', 'TST', 'asset description', '0x', 8, '1000', '10000', 31, {})['asset_guid']
self.sync_all()
self.nodes[1].generate(3)
self.sync_all()
assetInfo = self.nodes[0].assetinfo(asset)
assert_equal(assetInfo['asset_guid'], asset)
assetInfo = self.nodes[1].assetinfo(asset)
assert_equal(assetInfo['asset_guid'], asset)
if __name__ == '__main__':
AssetTest().main()
| #!/usr/bin/env python3
# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import SyscoinTestFramework
from test_framework.util import wait_until, get_datadir_path, connect_nodes
import os
class AssetTest(SyscoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
self.rpc_timeout = 240
self.extra_args = [['-assetindex=1'],['-assetindex=1']]
def run_test(self):
self.nodes[0].generate(200)
self.sync_all()
asset = self.nodes[0].assetnew('1', 'TST', 'asset description', '0x', 8, '1000', '10000', 31, {})['asset_guid']
self.sync_all()
self.nodes[1].generate(3)
self.sync_all()
assetInfo = self.nodes[0].assetinfo(asset)
assert_equal(assetInfo['asset_guid'], asset)
assetInfo = self.nodes[1].assetinfo(asset)
assert_equal(assetInfo['asset_guid'], asset)
if __name__ == '__main__':
AssetTest().main()
| mit | Python |
7b4fb97fde74d7d4e4d441900c66f6eb8c04dc13 | create Jupyterhub log file at /var/log | felipenoris/math-server-docker,felipenoris/math-server-docker,felipenoris/AWSFinance,felipenoris/AWSFinance | jupyterhub_config.py | jupyterhub_config.py |
# Whitelist of environment variables for the subprocess to inherit
# c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL']
c.Spawner.env_keep = [ 'PATH', 'LD_LIBRARY_PATH', 'JAVA_HOME', 'CPATH', 'CMAKE_ROOT', 'GOROOT' ]
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
#c.Authenticator.admin_users = set(['admin'])
c.JupyterHub.extra_log_file = '/var/log/jupyterhub.log'
|
# Whitelist of environment variables for the subprocess to inherit
# c.Spawner.env_keep = ['PATH', 'PYTHONPATH', 'CONDA_ROOT', 'CONDA_DEFAULT_ENV', 'VIRTUAL_ENV', 'LANG', 'LC_ALL']
c.Spawner.env_keep = [ 'PATH', 'LD_LIBRARY_PATH', 'JAVA_HOME', 'CPATH', 'CMAKE_ROOT', 'GOROOT' ]
# set of usernames of admin users
#
# If unspecified, only the user that launches the server will be admin.
#c.Authenticator.admin_users = set(['admin'])
| mit | Python |
e3822dee395a2cfa7d3b98e84f01a09c66309364 | fix pep8 errors | pydanny/django-admin2,pydanny/django-admin2 | example/example/urls.py | example/example/urls.py | from __future__ import unicode_literals
from blog.views import BlogListView, BlogDetailView
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from djadmin2.site import djadmin2_site
djadmin2_site.autodiscover()
urlpatterns = [
url(r'^admin2/', include(djadmin2_site.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^blog/', BlogListView.as_view(template_name="blog/blog_list.html"),
name='blog_list'),
url(r'^blog/detail(?P<pk>\d+)/$',
BlogDetailView.as_view(template_name="blog/blog_detail.html"),
name='blog_detail'),
url(r'^$', BlogListView.as_view(template_name="blog/home.html"),
name='home'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| from __future__ import unicode_literals
from blog.views import BlogListView, BlogDetailView
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from djadmin2.site import djadmin2_site
admin.autodiscover()
djadmin2_site.autodiscover()
urlpatterns = [
url(r'^admin2/', include(djadmin2_site.urls)),
url(r'^admin/', include(admin.site.urls)),
url(r'^blog/', BlogListView.as_view(template_name="blog/blog_list.html"), name='blog_list'),
url(r'^blog/detail(?P<pk>\d+)/$', BlogDetailView.as_view(template_name="blog/blog_detail.html"), name='blog_detail'),
url(r'^$', BlogListView.as_view(template_name="blog/home.html"), name='home'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| bsd-3-clause | Python |
6adcc0875218a7e00e260885c2b3caf0fc179122 | Fix #24 | NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint,NLeSC/ODEX-FAIRDataPoint | fdp-api/python/tests/dump_metadata.py | fdp-api/python/tests/dump_metadata.py | #
# This script creates dump files of metadata in different formats upon requests to FDP.
#
import os
import six
from rdflib import Graph
from logging import getLogger, StreamHandler, INFO
from myglobals import *
if six.PY2:
from urllib2 import (urlopen, urlparse, Request)
urljoin = urlparse.urljoin
urlparse = urlparse.urlparse
else:
from urllib.request import (Request, urlopen, urlparse)
from urllib.parse import urljoin
logger = getLogger(__name__)
logger.setLevel(INFO)
ch = StreamHandler()
ch.setLevel(INFO)
logger.addHandler(ch)
def dump():
for fmt,fxt in MIME_TYPES.items():
dump_path = os.path.join(DUMP_DIR, os.path.basename(fmt))
os.makedirs(dump_path)
for url in [urljoin(BASE_URL, p) for p in URL_PATHS]:
logger.info("Request metadata in '%s' from %s\n" % (fmt, url))
req = Request(url)
req.add_header('Accept', fmt)
res = urlopen(req)
fname = '%s.%s' % (os.path.basename(urlparse(url).path), fxt)
fname = os.path.join(dump_path, fname)
logger.info("Write metadata into file './%s'\n" % fname)
with open(fname, 'wb') as fout:
fout.write(res.read())
dump()
| #
# This script creates dump files of metadata in different formats upon requests to FDP.
#
from os import path, makedirs
from urllib2 import urlopen, urlparse, Request
from rdflib import Graph
from logging import getLogger, StreamHandler, INFO
from myglobals import *
logger = getLogger(__name__)
logger.setLevel(INFO)
ch = StreamHandler()
ch.setLevel(INFO)
logger.addHandler(ch)
def dump():
for fmt,fxt in MIME_TYPES.iteritems():
dump_path = path.join(DUMP_DIR, path.basename(fmt))
makedirs(dump_path)
for url in [ urlparse.urljoin(BASE_URL, p) for p in URL_PATHS ]:
logger.info("Request metadata in '%s' from\n %s\n" % (fmt, url))
req = Request(url)
req.add_header('Accept', fmt)
res = urlopen(req)
fname = '%s.%s' % (path.basename(urlparse.urlparse(url).path), fxt)
fname = path.join(dump_path, fname)
logger.info("Write metadata into file './%s'\n" % fname)
with open(fname, 'w') as fout:
fout.write(res.read())
dump()
| apache-2.0 | Python |
b6ef71dd8754bf33672f9954cd1d41cce1773599 | fix import statement | e2dmax/Python_SI1145,THP-JOE/Python_SI1145 | examples/simpletest.py | examples/simpletest.py | #!/usr/bin/python
# Author: Joe Gutting
# With use of Adafruit SI1145 library for Arduino, Adafruit_GPIO.I2C & BMP Library by Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Can enable debug output by uncommenting:
#import logging
#logging.basicConfig(level=logging.DEBUG)
import time
import SI1145.SI1145 as SI1145
# Default constructor will pick a default I2C bus.
#
# For the Raspberry Pi this means you should hook up to the only exposed I2C bus
# from the main GPIO header and the library will figure out the bus number based
# on the Pi's revision.
#
# For the Beaglebone Black the library will assume bus 1 by default, which is
# exposed with SCL = P9_19 and SDA = P9_20.
sensor = SI1145.SI1145()
print 'Press Cntrl + Z to cancel'
while True:
vis = sensor.readVisible()
IR = sensor.readIR()
UV = sensor.readUV()
uvIndex = UV / 100.0
print 'Vis: ' + str(vis)
print 'IR: ' + str(IR)
print 'UV Index: ' + str(uvIndex)
time.sleep(3)
| #!/usr/bin/python
# Author: Joe Gutting
# With use of Adafruit SI1145 library for Arduino, Adafruit_GPIO.I2C & BMP Library by Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Can enable debug output by uncommenting:
#import logging
#logging.basicConfig(level=logging.DEBUG)
import time
import SI1145 as SI1145
# Default constructor will pick a default I2C bus.
#
# For the Raspberry Pi this means you should hook up to the only exposed I2C bus
# from the main GPIO header and the library will figure out the bus number based
# on the Pi's revision.
#
# For the Beaglebone Black the library will assume bus 1 by default, which is
# exposed with SCL = P9_19 and SDA = P9_20.
sensor = SI1145.SI1145()
print 'Press Cntrl + Z to cancel'
while True:
vis = sensor.readVisible()
IR = sensor.readIR()
UV = sensor.readUV()
uvIndex = UV / 100.0
print 'Vis: ' + str(vis)
print 'IR: ' + str(IR)
print 'UV Index: ' + str(uvIndex)
time.sleep(3)
| mit | Python |
1c35f0c68502ab4a41fec2ae9393066a8195b3f8 | Update simpletest.py | adafruit/Adafruit_Python_BMP,DirkUK/Adafruit_Python_BMP,campenberger/Adafruit_Python_BMP | examples/simpletest.py | examples/simpletest.py | # Copyright (c) 2014 Adafruit Industries
# Author: Tony DiCola
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Can enable debug output by uncommenting:
#import logging
#logging.basicConfig(level=logging.DEBUG)
import Adafruit_BMP.BMP085 as BMP085
# Default constructor will pick a default I2C bus.
#
# For the Raspberry Pi this means you should hook up to the only exposed I2C bus
# from the main GPIO header and the library will figure out the bus number based
# on the Pi's revision.
#
# For the Beaglebone Black the library will assume bus 1 by default, which is
# exposed with SCL = P9_19 and SDA = P9_20.
sensor = BMP085.BMP085()
# Optionally you can override the bus number:
#sensor = BMP085.BMP085(busnum=2)
# You can also optionally change the BMP085 mode to one of BMP085_ULTRALOWPOWER,
# BMP085_STANDARD, BMP085_HIGHRES, or BMP085_ULTRAHIGHRES. See the BMP085
# datasheet for more details on the meanings of each mode (accuracy and power
# consumption are primarily the differences). The default mode is STANDARD.
#sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
print 'Temp = {0:0.2f} *C'.format(sensor.read_temperature())
print 'Pressure = {0:0.2f} Pa'.format(sensor.read_pressure())
print 'Altitude = {0:0.2f} m'.format(sensor.read_altitude())
print 'Sealevel Pressure = {0:0.2f} Pa'.format(sensor.read_sealevel_pressure())
| # Can enable debug output by uncommenting:
#import logging
#logging.basicConfig(level=logging.DEBUG)
import Adafruit_BMP.BMP085 as BMP085
# Default constructor will pick a default I2C bus.
#
# For the Raspberry Pi this means you should hook up to the only exposed I2C bus
# from the main GPIO header and the library will figure out the bus number based
# on the Pi's revision.
#
# For the Beaglebone Black the library will assume bus 1 by default, which is
# exposed with SCL = P9_19 and SDA = P9_20.
sensor = BMP085.BMP085()
# Optionally you can override the bus number:
#sensor = BMP085.BMP085(busnum=2)
# You can also optionally change the BMP085 mode to one of BMP085_ULTRALOWPOWER,
# BMP085_STANDARD, BMP085_HIGHRES, or BMP085_ULTRAHIGHRES. See the BMP085
# datasheet for more details on the meanings of each mode (accuracy and power
# consumption are primarily the differences). The default mode is STANDARD.
#sensor = BMP085.BMP085(mode=BMP085.BMP085_ULTRAHIGHRES)
print 'Temp = {0:0.2f} *C'.format(sensor.read_temperature())
print 'Pressure = {0:0.2f} Pa'.format(sensor.read_pressure())
print 'Altitude = {0:0.2f} m'.format(sensor.read_altitude())
print 'Sealevel Pressure = {0:0.2f} Pa'.format(sensor.read_sealevel_pressure())
| mit | Python |
d21648ab963e5ebdf2729e528fc31c8a9ce93772 | update speed_test | Koheron/lase | examples/speed_test.py | examples/speed_test.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import initExample
import os
import time
import numpy as np
import matplotlib.pyplot as plt
from utilities import load_instrument
from ldk.drivers import Oscillo
cmd = os.getenv('CMD','get_adc')
def speed_test(host, n_pts=1000):
time_array = np.zeros(n_pts)
client = load_instrument(host, instrument='oscillo')
driver = Oscillo(client)
driver.set_averaging(False)
t0 = time.time()
t_prev = t0
for i in range(n_pts):
if cmd == 'get_adc':
driver.get_adc()
elif cmd == 'get_num_average':
driver.get_num_average()
t = time.time()
time_array[i] = t - t_prev
print host, i, time_array[i]
t_prev = t
print np.median(time_array)
plt.plot(time_array)
driver.close()
hosts = ['192.168.1.{0}'.format(i) for i in [12]]
for host in hosts:
speed_test(host,n_pts=10000)
plt.ylabel('Time (s)')
plt.show()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import initExample
import os
import time
import numpy as np
import matplotlib.pyplot as plt
from utilities import load_instrument
from ldk.drivers import Oscillo
def speed_test(host, n_pts=1000):
time_array = np.zeros(n_pts)
client = load_instrument(host, instrument='oscillo')
driver = Oscillo(client)
driver.set_averaging(True)
t0 = time.time()
t_prev = t0
for i in range(n_pts):
driver.get_adc()
t = time.time()
time_array[i] = t - t_prev
print host, i, time_array[i]
t_prev = t
print np.median(time_array)
plt.plot(time_array)
driver.close()
hosts = ['192.168.1.{0}'.format(i) for i in [12]]
for host in hosts:
speed_test(host,n_pts=10000)
plt.ylabel('Time (s)')
plt.show()
| mit | Python |
a603e70d4e955c0bd19630a61ba0bd3db5575d6d | fix _cost_func test | benvanwerkhoven/kernel_tuner | test/strategies/test_minimize.py | test/strategies/test_minimize.py | from __future__ import print_function
from collections import OrderedDict
from kernel_tuner.strategies import minimize
from kernel_tuner.interface import Options
try:
from mock import Mock
except ImportError:
from unittest.mock import Mock
def fake_runner():
fake_result = {'time': 5}
runner = Mock()
runner.run.return_value = [[fake_result], None]
return runner
tune_params = OrderedDict([("x", [1, 2, 3]), ("y", [4, 5, 6])])
def test__cost_func():
x = [1, 4]
kernel_options = None
tuning_options = Options(scaling=False, snap=False, tune_params=tune_params, restrictions=None, strategy_options={}, cache={})
runner = fake_runner()
results = []
time = minimize._cost_func(x, kernel_options, tuning_options, runner, results)
assert time == 5
tuning_options.cache["1,4"] = OrderedDict([("x", 1), ("y", 4), ("time", 5)])
time = minimize._cost_func(x, kernel_options, tuning_options, runner, results)
assert time == 5
# check if 1st run is properly cached and runner is only called once
assert runner.run.call_count == 1
# check if restrictions are properly handled
restrictions = ["False"]
tuning_options = Options(scaling=False,
snap=False,
tune_params=tune_params,
restrictions=restrictions,
strategy_options={},
verbose=True,
cache={})
time = minimize._cost_func(x, kernel_options, tuning_options, runner, results)
assert time == 1e20
def test_setup_method_arguments():
# check if returns a dict, the specific options depend on scipy
assert isinstance(minimize.setup_method_arguments("bla", 5), dict)
def test_setup_method_options():
tuning_options = Options(eps=1e-5, tune_params=tune_params, strategy_options={}, verbose=True)
method_options = minimize.setup_method_options("L-BFGS-B", tuning_options)
assert isinstance(method_options, dict)
assert method_options["eps"] == 1e-5
assert method_options["maxfun"] == 100
assert method_options["disp"] is True
| from __future__ import print_function
from collections import OrderedDict
from kernel_tuner.strategies import minimize
from kernel_tuner.interface import Options
try:
from mock import Mock
except ImportError:
from unittest.mock import Mock
def fake_runner():
fake_result = {'time': 5}
runner = Mock()
runner.run.return_value = [[fake_result], None]
return runner
tune_params = OrderedDict([("x", [1, 2, 3]), ("y", [4, 5, 6])])
def test__cost_func():
x = [1, 4]
kernel_options = None
tuning_options = Options(scaling=False, tune_params=tune_params, restrictions=None, strategy_options={}, cache={})
runner = fake_runner()
results = []
time = minimize._cost_func(x, kernel_options, tuning_options, runner, results)
assert time == 5
tuning_options.cache["1,4"] = OrderedDict([("x", 1), ("y", 4), ("time", 5)])
time = minimize._cost_func(x, kernel_options, tuning_options, runner, results)
assert time == 5
# check if 1st run is properly cached and runner is only called once
assert runner.run.call_count == 1
# check if restrictions are properly handled
restrictions = ["False"]
tuning_options = Options(scaling=False,
tune_params=tune_params,
restrictions=restrictions,
strategy_options={},
verbose=True,
cache={})
time = minimize._cost_func(x, kernel_options, tuning_options, runner, results)
assert time == 1e20
def test_setup_method_arguments():
# check if returns a dict, the specific options depend on scipy
assert isinstance(minimize.setup_method_arguments("bla", 5), dict)
def test_setup_method_options():
tuning_options = Options(eps=1e-5, tune_params=tune_params, strategy_options={}, verbose=True)
method_options = minimize.setup_method_options("L-BFGS-B", tuning_options)
assert isinstance(method_options, dict)
assert method_options["eps"] == 1e-5
assert method_options["maxfun"] == 100
assert method_options["disp"] is True
| apache-2.0 | Python |
34d0248510b536483b97e5786ef1a81c2d67dbe1 | add MWPotential | followthesheep/galpy,followthesheep/galpy,jobovy/galpy,followthesheep/galpy,jobovy/galpy,jobovy/galpy,followthesheep/galpy,jobovy/galpy | galpy/potential.py | galpy/potential.py | from galpy.potential_src import Potential
from galpy.potential_src import planarPotential
from galpy.potential_src import linearPotential
from galpy.potential_src import verticalPotential
from galpy.potential_src import MiyamotoNagaiPotential
from galpy.potential_src import LogarithmicHaloPotential
from galpy.potential_src import DoubleExponentialDiskPotential
from galpy.potential_src import PowerSphericalPotential
from galpy.potential_src import TwoPowerSphericalPotential
from galpy.potential_src import plotRotcurve
from galpy.potential_src import plotEscapecurve
from galpy.potential_src import KGPotential
from galpy.potential_src import interpRZPotential
from galpy.potential_src import DehnenBarPotential
from galpy.potential_src import SteadyLogSpiralPotential
from galpy.potential_src import TransientLogSpiralPotential
from galpy.potential_src import MovingObjectPotential
from galpy.potential_src import ForceSoftening
#
# Functions
#
evaluatePotentials= Potential.evaluatePotentials
evaluateDensities= Potential.evaluateDensities
evaluateRforces= Potential.evaluateRforces
evaluatephiforces= Potential.evaluatephiforces
evaluatezforces= Potential.evaluatezforces
RZToplanarPotential= planarPotential.RZToplanarPotential
RZToverticalPotential= verticalPotential.RZToverticalPotential
plotPotentials= Potential.plotPotentials
plotplanarPotentials= planarPotential.plotplanarPotentials
plotlinearPotentials= linearPotential.plotlinearPotentials
calcRotcurve= plotRotcurve.calcRotcurve
plotRotcurve= plotRotcurve.plotRotcurve
calcEscapecurve= plotEscapecurve.calcEscapecurve
plotEscapecurve= plotEscapecurve.plotEscapecurve
evaluateplanarPotentials= planarPotential.evaluateplanarPotentials
evaluateplanarRforces= planarPotential.evaluateplanarRforces
evaluateplanarphiforces= planarPotential.evaluateplanarphiforces
evaluatelinearPotentials= linearPotential.evaluatelinearPotentials
evaluatelinearForces= linearPotential.evaluatelinearForces
#
# Classes
#
Potential= Potential.Potential
planarAxiPotential= planarPotential.planarAxiPotential
planarPotential= planarPotential.planarPotential
linearPotential= linearPotential.linearPotential
MiyamotoNagaiPotential= MiyamotoNagaiPotential.MiyamotoNagaiPotential
DoubleExponentialDiskPotential= DoubleExponentialDiskPotential.DoubleExponentialDiskPotential
LogarithmicHaloPotential= LogarithmicHaloPotential.LogarithmicHaloPotential
KeplerPotential= PowerSphericalPotential.KeplerPotential
PowerSphericalPotential= PowerSphericalPotential.PowerSphericalPotential
NFWPotential= TwoPowerSphericalPotential.NFWPotential
JaffePotential= TwoPowerSphericalPotential.JaffePotential
HernquistPotential= TwoPowerSphericalPotential.HernquistPotential
TwoPowerSphericalPotential= TwoPowerSphericalPotential.TwoPowerSphericalPotential
KGPotential= KGPotential.KGPotential
interpRZPotential= interpRZPotential.interpRZPotential
DehnenBarPotential= DehnenBarPotential.DehnenBarPotential
SteadyLogSpiralPotential= SteadyLogSpiralPotential.SteadyLogSpiralPotential
TransientLogSpiralPotential= TransientLogSpiralPotential.TransientLogSpiralPotential
MovingObjectPotential= MovingObjectPotential.MovingObjectPotential
#Softenings
PlummerSoftening= ForceSoftening.PlummerSoftening
#
# Constants
#
MWPotential= [MiyamotoNagaiPotential(a=0.5,b=0.0375,normalize=.6),
NFWPotential(a=4.5,normalize=.35),
HernquistPotential(a=0.6/8,normalize=0.05)]
| from galpy.potential_src import Potential
from galpy.potential_src import planarPotential
from galpy.potential_src import linearPotential
from galpy.potential_src import verticalPotential
from galpy.potential_src import MiyamotoNagaiPotential
from galpy.potential_src import LogarithmicHaloPotential
from galpy.potential_src import DoubleExponentialDiskPotential
from galpy.potential_src import PowerSphericalPotential
from galpy.potential_src import TwoPowerSphericalPotential
from galpy.potential_src import plotRotcurve
from galpy.potential_src import plotEscapecurve
from galpy.potential_src import KGPotential
from galpy.potential_src import interpRZPotential
from galpy.potential_src import DehnenBarPotential
from galpy.potential_src import SteadyLogSpiralPotential
from galpy.potential_src import TransientLogSpiralPotential
from galpy.potential_src import MovingObjectPotential
from galpy.potential_src import ForceSoftening
#
# Functions
#
evaluatePotentials= Potential.evaluatePotentials
evaluateDensities= Potential.evaluateDensities
evaluateRforces= Potential.evaluateRforces
evaluatephiforces= Potential.evaluatephiforces
evaluatezforces= Potential.evaluatezforces
RZToplanarPotential= planarPotential.RZToplanarPotential
RZToverticalPotential= verticalPotential.RZToverticalPotential
plotPotentials= Potential.plotPotentials
plotplanarPotentials= planarPotential.plotplanarPotentials
plotlinearPotentials= linearPotential.plotlinearPotentials
calcRotcurve= plotRotcurve.calcRotcurve
plotRotcurve= plotRotcurve.plotRotcurve
calcEscapecurve= plotEscapecurve.calcEscapecurve
plotEscapecurve= plotEscapecurve.plotEscapecurve
evaluateplanarPotentials= planarPotential.evaluateplanarPotentials
evaluateplanarRforces= planarPotential.evaluateplanarRforces
evaluateplanarphiforces= planarPotential.evaluateplanarphiforces
evaluatelinearPotentials= linearPotential.evaluatelinearPotentials
evaluatelinearForces= linearPotential.evaluatelinearForces
#
# Classes
#
Potential= Potential.Potential
planarAxiPotential= planarPotential.planarAxiPotential
planarPotential= planarPotential.planarPotential
linearPotential= linearPotential.linearPotential
MiyamotoNagaiPotential= MiyamotoNagaiPotential.MiyamotoNagaiPotential
DoubleExponentialDiskPotential= DoubleExponentialDiskPotential.DoubleExponentialDiskPotential
LogarithmicHaloPotential= LogarithmicHaloPotential.LogarithmicHaloPotential
KeplerPotential= PowerSphericalPotential.KeplerPotential
PowerSphericalPotential= PowerSphericalPotential.PowerSphericalPotential
NFWPotential= TwoPowerSphericalPotential.NFWPotential
JaffePotential= TwoPowerSphericalPotential.JaffePotential
HernquistPotential= TwoPowerSphericalPotential.HernquistPotential
TwoPowerSphericalPotential= TwoPowerSphericalPotential.TwoPowerSphericalPotential
KGPotential= KGPotential.KGPotential
interpRZPotential= interpRZPotential.interpRZPotential
DehnenBarPotential= DehnenBarPotential.DehnenBarPotential
SteadyLogSpiralPotential= SteadyLogSpiralPotential.SteadyLogSpiralPotential
TransientLogSpiralPotential= TransientLogSpiralPotential.TransientLogSpiralPotential
MovingObjectPotential= MovingObjectPotential.MovingObjectPotential
#Softenings
PlummerSoftening= ForceSoftening.PlummerSoftening
| bsd-3-clause | Python |
6b75e9d7e1c84e5ecc8529618260fb4e6a72ce73 | Update package documentation. | eikonomega/flask-authorization-panda | flask_authorization_panda/__init__.py | flask_authorization_panda/__init__.py | """
**Flask-Authorization-Panda is a Flask extension that provides decorators
for various authentication methods for RESTful web services.
Currently, only HTTP Basic Authentication is supported. **
Usage
-----
>>> from flask.ext.flask_authorization_panda import basic_auth
During app initialization, store your required username/password in
the config attribute::
app = Flask(__name__)
app.config['basic_auth_credentials'] = dict(username='admin',
password='secret')
Finally, simple apply the @basic_auth decorator to methods which you
want to require HTTP Basic Auth::
>>> @app.route('/')
>>> @basic_auth
>>> def hello_world():
>>> return jsonify({"statusCode": 200, "message": "Ok"})
This will result in all calls against the decorated method to (1) check for
for credentials on the request.authorization object and (2) verify that
they match the contents of app.config['basic_auth_credentials]'
"""
__version__ = '0.6'
from basic_auth import basic_auth | """
**Flask-Authorization-Panda provides easy loading and access to the data elements of
JSON based configuration files.**
Usage
-----
Assuming that an environment variable 'SHARED_CONFIG_FILES' exists
and points to a directory containing multiple JSON files, including
the following::
ldap.json
{
"primary": {
"url": "ldaps://primaryldap.example.edu:111",
"login": "cn=LDAP Testing",
"password": "LDAP Password"
}
}
smtp.json
{
"TestAccount1": {
"url": "smtp.yourschool.edu",
"login": "testaccount1",
"password": "testaccount1password"
}
}
You would access the contents of those configuration files like this::
>>> from configuration_panda import ConfigurationPanda
>>> program_settings = ConfigurationPanda(['SHARED_CONFIG_FILES'])
>>> program_settings.ldap['primary']['url']
ldaps://primaryldap.example.edu:111
>>> program_settings.smtp['TestAccount1']['login']
testaccount1
Or, if you prefer dictionary-style syntax::
>>> from configuration_panda import ConfigurationPanda
>>> program_settings = ConfigurationPanda(['SHARED_CONFIG_FILES'])
>>> program_settings['ldap']['primary']['url']
ldaps://primaryldap.example.edu:111
>>> program_settings['smtp']['TestAccount1']['login']
testaccount1
"""
__version__ = '0.5'
from basic_auth import basic_auth | mit | Python |
5409845ebf4c5dae7825f8b3f41f6ba063f1360d | Update __init__.py | kulkarnimandar/SU2,KDra/SU2,chenbojian/SU2,KDra/SU2,Heathckliff/SU2,opfeifle/SU2,drewkett/SU2,cspode/SU2,pawhewitt/Dev,shivajimedida/SU2,pawhewitt/Dev,jlabroquere/SU2,bankur16/SU2,srange/SU2,Heathckliff/SU2,shivajimedida/SU2,shivajimedida/SU2,opfeifle/SU2,shivajimedida/SU2,jlabroquere/SU2,srange/SU2,huahbo/SU2,hlkline/SU2,cspode/SU2,bankur16/SU2,srange/SU2,cspode/SU2,hlkline/SU2,chenbojian/SU2,bankur16/SU2,hlkline/SU2,drewkett/SU2,chenbojian/SU2,opfeifle/SU2,jlabroquere/SU2,bankur16/SU2,hlkline/SU2,pawhewitt/Dev,bankur16/SU2,chenbojian/SU2,kulkarnimandar/SU2,huahbo/SU2,Heathckliff/SU2,cspode/SU2,kulkarnimandar/SU2,drewkett/SU2,Heathckliff/SU2,jlabroquere/SU2,shivajimedida/SU2,KDra/SU2,huahbo/SU2,Heathckliff/SU2,shivajimedida/SU2,pawhewitt/Dev,huahbo/SU2,KDra/SU2,opfeifle/SU2,jlabroquere/SU2,Heathckliff/SU2,opfeifle/SU2,huahbo/SU2,drewkett/SU2,cspode/SU2,kulkarnimandar/SU2,chenbojian/SU2,cspode/SU2,kulkarnimandar/SU2,drewkett/SU2,jlabroquere/SU2,hlkline/SU2,huahbo/SU2,srange/SU2,opfeifle/SU2,KDra/SU2,pawhewitt/Dev,pawhewitt/Dev,bankur16/SU2 | SU2_PY/SU2/util/__init__.py | SU2_PY/SU2/util/__init__.py | from switch import switch
from bunch import Bunch as bunch
from ordered_bunch import OrderedBunch as ordered_bunch
from plot import write_plot, tecplot, paraview
from lhc_unif import lhc_unif
from mp_eval import mp_eval
from which import which
|
from switch import switch
from bunch import Bunch as bunch
from ordered_bunch import OrderedBunch as ordered_bunch
from plot import write_plot, tecplot, paraview
from lhc_unif import lhc_unif
from mp_eval import mp_eval | lgpl-2.1 | Python |
cee62120cefa8c773795d2b4e24fb4df40b6532e | Remove declaration of the handlers | dapeng0802/django-blog-zinnia,petecummings/django-blog-zinnia,petecummings/django-blog-zinnia,bywbilly/django-blog-zinnia,Fantomas42/django-blog-zinnia,Maplecroft/django-blog-zinnia,aorzh/django-blog-zinnia,petecummings/django-blog-zinnia,Fantomas42/django-blog-zinnia,Maplecroft/django-blog-zinnia,extertioner/django-blog-zinnia,ZuluPro/django-blog-zinnia,marctc/django-blog-zinnia,Zopieux/django-blog-zinnia,dapeng0802/django-blog-zinnia,1844144/django-blog-zinnia,aorzh/django-blog-zinnia,ghachey/django-blog-zinnia,Maplecroft/django-blog-zinnia,1844144/django-blog-zinnia,extertioner/django-blog-zinnia,bywbilly/django-blog-zinnia,Zopieux/django-blog-zinnia,marctc/django-blog-zinnia,extertioner/django-blog-zinnia,ZuluPro/django-blog-zinnia,ghachey/django-blog-zinnia,Fantomas42/django-blog-zinnia,dapeng0802/django-blog-zinnia,aorzh/django-blog-zinnia,Zopieux/django-blog-zinnia,bywbilly/django-blog-zinnia,ZuluPro/django-blog-zinnia,marctc/django-blog-zinnia,1844144/django-blog-zinnia,ghachey/django-blog-zinnia | demo/urls.py | demo/urls.py | """Urls for the demo of Zinnia"""
from django.conf import settings
from django.contrib import admin
from django.conf.urls import url
from django.conf.urls import include
from django.conf.urls import patterns
from django.views.generic.base import RedirectView
from zinnia.sitemaps import TagSitemap
from zinnia.sitemaps import EntrySitemap
from zinnia.sitemaps import CategorySitemap
from zinnia.sitemaps import AuthorSitemap
admin.autodiscover()
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url='/blog/')),
url(r'^blog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
url(r'^xmlrpc/$', 'django_xmlrpc.views.handle_xmlrpc'),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
sitemaps = {
'tags': TagSitemap,
'blog': EntrySitemap,
'authors': AuthorSitemap,
'categories': CategorySitemap
}
urlpatterns += patterns(
'django.contrib.sitemaps.views',
url(r'^sitemap.xml$', 'index',
{'sitemaps': sitemaps}),
url(r'^sitemap-(?P<section>.+)\.xml$', 'sitemap',
{'sitemaps': sitemaps}),
)
urlpatterns += patterns(
'',
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT})
)
| """Urls for the demo of Zinnia"""
from django.conf import settings
from django.contrib import admin
from django.conf.urls import url
from django.conf.urls import include
from django.conf.urls import patterns
from django.views.generic.base import RedirectView
from zinnia.sitemaps import TagSitemap
from zinnia.sitemaps import EntrySitemap
from zinnia.sitemaps import CategorySitemap
from zinnia.sitemaps import AuthorSitemap
admin.autodiscover()
handler500 = 'django.views.defaults.server_error'
handler404 = 'django.views.defaults.page_not_found'
handler403 = 'django.views.defaults.permission_denied'
urlpatterns = patterns(
'',
url(r'^$', RedirectView.as_view(url='/blog/')),
url(r'^blog/', include('zinnia.urls')),
url(r'^comments/', include('django.contrib.comments.urls')),
url(r'^xmlrpc/$', 'django_xmlrpc.views.handle_xmlrpc'),
url(r'^i18n/', include('django.conf.urls.i18n')),
url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
url(r'^admin/', include(admin.site.urls)),
)
sitemaps = {
'tags': TagSitemap,
'blog': EntrySitemap,
'authors': AuthorSitemap,
'categories': CategorySitemap
}
urlpatterns += patterns(
'django.contrib.sitemaps.views',
url(r'^sitemap.xml$', 'index',
{'sitemaps': sitemaps}),
url(r'^sitemap-(?P<section>.+)\.xml$', 'sitemap',
{'sitemaps': sitemaps}),
)
urlpatterns += patterns(
'',
url(r'^403/$', 'django.views.defaults.permission_denied'),
url(r'^404/$', 'django.views.defaults.page_not_found'),
url(r'^500/$', 'django.views.defaults.server_error'),
)
if settings.DEBUG:
urlpatterns += patterns(
'',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT})
)
| bsd-3-clause | Python |
14278fdf0a4f741203b91ca2869f8b4b93fb9fd5 | Allow file URLs | skosukhin/spack,mfherbst/spack,tmerrick1/spack,iulian787/spack,lgarren/spack,LLNL/spack,EmreAtes/spack,lgarren/spack,tmerrick1/spack,TheTimmy/spack,skosukhin/spack,krafczyk/spack,lgarren/spack,skosukhin/spack,mfherbst/spack,EmreAtes/spack,iulian787/spack,EmreAtes/spack,iulian787/spack,TheTimmy/spack,tmerrick1/spack,lgarren/spack,mfherbst/spack,TheTimmy/spack,matthiasdiener/spack,matthiasdiener/spack,skosukhin/spack,tmerrick1/spack,krafczyk/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack,TheTimmy/spack,krafczyk/spack,krafczyk/spack,LLNL/spack,EmreAtes/spack,skosukhin/spack,iulian787/spack,LLNL/spack,lgarren/spack,matthiasdiener/spack,mfherbst/spack,tmerrick1/spack,mfherbst/spack,EmreAtes/spack,TheTimmy/spack,krafczyk/spack,matthiasdiener/spack,iulian787/spack | lib/spack/spack/validate.py | lib/spack/spack/validate.py | ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import tty
from urlparse import urlparse
from spack.util.compression import allowed_archive
ALLOWED_SCHEMES = ["http", "https", "ftp", "file"]
def url(url_string):
url = urlparse(url_string)
if url.scheme not in ALLOWED_SCHEMES:
tty.die("Invalid protocol in URL: '%s'" % url_string)
if not allowed_archive(url_string):
tty.die("Invalid file type in URL: '%s'" % url_string)
| ##############################################################################
# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://scalability-llnl.github.io/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License (as published by
# the Free Software Foundation) version 2.1 dated February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import tty
from urlparse import urlparse
from spack.util.compression import allowed_archive
ALLOWED_SCHEMES = ["http", "https", "ftp"]
def url(url_string):
url = urlparse(url_string)
if url.scheme not in ALLOWED_SCHEMES:
tty.die("Invalid protocol in URL: '%s'" % url_string)
if not allowed_archive(url_string):
tty.die("Invalid file type in URL: '%s'" % url_string)
| lgpl-2.1 | Python |
48df91d48b21fecfbdb87a0e8b011956594a74c2 | Add surface area class (#2183) | TheAlgorithms/Python | maths/area.py | maths/area.py | """
Find the area of various geometric shapes
"""
from math import pi
from typing import Union
def surface_area_cube(side_length: Union[int, float]) -> float:
"""
Calculate the Surface Area of a Cube.
>>> surface_area_cube(1)
6
>>> surface_area_cube(3)
54
"""
return 6 * pow(side_length, 2)
def surface_area_sphere(radius: float) -> float:
"""
Calculate the Surface Area of a Sphere.
Wikipedia reference: https://en.wikipedia.org/wiki/Sphere
:return 4 * pi * r^2
>>> surface_area_sphere(5)
314.1592653589793
>>> surface_area_sphere(1)
12.566370614359172
"""
return 4 * pi * pow(radius, 2)
def area_rectangle(base, height):
"""
Calculate the area of a rectangle
>> area_rectangle(10,20)
200
"""
return base * height
def area_square(side_length):
"""
Calculate the area of a square
>>> area_square(10)
100
"""
return side_length * side_length
def area_triangle(length, breadth):
"""
Calculate the area of a triangle
>>> area_triangle(10,10)
50.0
"""
return 1 / 2 * length * breadth
def area_parallelogram(base, height):
"""
Calculate the area of a parallelogram
>> area_parallelogram(10,20)
200
"""
return base * height
def area_trapezium(base1, base2, height):
"""
Calculate the area of a trapezium
>> area_trapezium(10,20,30)
450
"""
return 1 / 2 * (base1 + base2) * height
def area_circle(radius):
"""
Calculate the area of a circle
>> area_circle(20)
1256.6370614359173
"""
return pi * radius * radius
def main():
print("Areas of various geometric shapes: \n")
print(f"Rectangle: {area_rectangle(10, 20)=}")
print(f"Square: {area_square(10)=}")
print(f"Triangle: {area_triangle(10, 10)=}")
print(f"Parallelogram: {area_parallelogram(10, 20)=}")
print(f"Trapezium: {area_trapezium(10, 20, 30)=}")
print(f"Circle: {area_circle(20)=}")
print("Surface Areas of various geometric shapes: \n")
print(f"Cube: {surface_area_cube(20)=}")
print(f"Sphere: {surface_area_sphere(20)=}")
if __name__ == "__main__":
main()
| """
Find the area of various geometric shapes
"""
import math
def area_rectangle(base, height):
"""
Calculate the area of a rectangle
>> area_rectangle(10,20)
200
"""
return base * height
def area_square(side_length):
"""
Calculate the area of a square
>>> area_square(10)
100
"""
return side_length * side_length
def area_triangle(length, breadth):
"""
Calculate the area of a triangle
>>> area_triangle(10,10)
50.0
"""
return 1 / 2 * length * breadth
def area_parallelogram(base, height):
"""
Calculate the area of a parallelogram
>> area_parallelogram(10,20)
200
"""
return base * height
def area_trapezium(base1, base2, height):
"""
Calculate the area of a trapezium
>> area_trapezium(10,20,30)
450
"""
return 1 / 2 * (base1 + base2) * height
def area_circle(radius):
"""
Calculate the area of a circle
>> area_circle(20)
1256.6370614359173
"""
return math.pi * radius * radius
def main():
print("Areas of various geometric shapes: \n")
print(f"Rectangle: {area_rectangle(10, 20)=}")
print(f"Square: {area_square(10)=}")
print(f"Triangle: {area_triangle(10, 10)=}")
print(f"Parallelogram: {area_parallelogram(10, 20)=}")
print(f"Trapezium: {area_trapezium(10, 20, 30)=}")
print(f"Circle: {area_circle(20)=}")
if __name__ == "__main__":
main()
| mit | Python |
94b3aa469e3a37a5c0cffa565a139b6b9607e54e | use interim setting for slave location | sassoftware/mcp,sassoftware/mcp | mcp/config.py | mcp/config.py | #
# Copyright (c) 2005-2006 rPath, Inc.
#
# All rights reserved
#
import os
from conary import conarycfg
from conary.lib import cfgtypes
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(basePath, 'log')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
namespace = 'mcp'
slaveTroveName = 'group-jobslave'
slaveTroveLabel = 'jobslave.rb.rpath.com@rpl:devel'
| #
# Copyright (c) 2005-2006 rPath, Inc.
#
# All rights reserved
#
import os
from conary import conarycfg
from conary.lib import cfgtypes
class MCPConfig(conarycfg.ConfigFile):
basePath = os.path.join(os.path.sep, 'srv', 'rbuilder', 'mcp')
logPath = os.path.join(basePath, 'log')
queueHost = '127.0.0.1'
queuePort = (cfgtypes.CfgInt, 61613)
namespace = 'mcp'
slaveTroveName = 'group-core'
slaveTroveLabel = 'conary.rpath.com@rpl:1'
| apache-2.0 | Python |
6d2f22d3a3ef7990dcf2f77b91b619b51d1221c9 | Add support for the new data format (0.2.0) | kerel-fs/ogn-rdb,kerel-fs/ogn-rdb,kerel-fs/ogn-rdb | mkstatistics.py | mkstatistics.py | #!/usr/bin/env python3
import json
from collections import defaultdict
from argparse import ArgumentParser
"""
Generate statistics for receivers.json
Equivalent:
cat receivers.json | jq ".receivers | group_by(.country) | map({(.[0].country): [.[].callsign]})"
"""
def print_stats(all_receivers):
receivers_by_country = defaultdict(list)
for receiver in all_receivers:
receivers_by_country[receiver["country"]].append(receiver)
for country, receivers in receivers_by_country.items():
print('Found {} receivers in {}'.format(len(receiver), country))
for receiver in receivers:
print(' - {}'.format(receiver["callsign"]))
print('Found {} receivers in {} countries.'.format(len(all_receivers), len(receivers_by_country)))
if __name__ == "__main__":
PARSER = ArgumentParser(description="""Generate statistics of a given receiver-wiki.json.""")
PARSER.add_argument("--in",
metavar="IN_FILE", dest="in_file",
default="receivers.json",
help="Input file. Default: 'receiver-wiki.json'")
ARGS = PARSER.parse_args()
with open(ARGS.in_file) as f:
receiverdb = json.load(f)
if receiverdb['version'] == "0.2.0":
receivers = receiverdb['receivers']
print_stats(receivers)
else:
print("Unsupported receiverlist version ({}).".format(receiverdb['version']))
| #!/usr/bin/env python3
import json
from collections import defaultdict
from argparse import ArgumentParser
"""
Generate statistics for receivers.json
"""
def print_stats(stations):
stat_by_country = defaultdict(dict)
for s in stations:
stat_by_country[stations[s]['country']][s] = stations[s]
for c in stat_by_country:
print("Found %i stations in %s" % (len(stat_by_country[c]), c))
for s in stat_by_country[c]:
print(" - %s" % s)
print("Parsed %i stations in %i countries." % (len(stations), len(stat_by_country)))
if __name__ == "__main__":
PARSER = ArgumentParser(description="""Generate statistics of a given receiver-wiki.json.""")
PARSER.add_argument("--in",
metavar="IN_FILE", dest="in_file",
default="receivers.json",
help="Input file. Default: 'receiver-wiki.json'")
ARGS = PARSER.parse_args()
with open(ARGS.in_file) as f:
receiverdb = json.load(f)
stations = receiverdb['receivers']
print_stats(stations)
| agpl-3.0 | Python |
22f305a7cb1daab3dc61b0d22b796916417de9d2 | Increase size of bottleneck layer. | lmjohns3/theanets,chrinide/theanets,devdoer/theanets | examples/mnist-deepautoencoder.py | examples/mnist-deepautoencoder.py | #!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 36, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.run(train, valid)
plot_layers(e.network.weights, tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
| #!/usr/bin/env python
import matplotlib.pyplot as plt
import theanets
from utils import load_mnist, plot_layers, plot_images
train, valid, _ = load_mnist()
e = theanets.Experiment(
theanets.Autoencoder,
layers=(784, 256, 64, 16, 64, 256, 784),
train_batches=100,
tied_weights=True,
)
e.run(train, valid)
plot_layers(e.network.weights, tied_weights=True)
plt.tight_layout()
plt.show()
valid = valid[:16*16]
plot_images(valid, 121, 'Sample data')
plot_images(e.network.predict(valid), 122, 'Reconstructed data')
plt.tight_layout()
plt.show()
| mit | Python |
d9efa6723acbc682593125b9d0603328e6fafb8d | fix a comment | alexshepard/aledison | delete_contact.py | delete_contact.py | #!/usr/bin/python
import yaml
config = yaml.safe_load(open("config.yml"))
from contacts import Contacts, Contact
c = Contacts()
import sys
if len(sys.argv) < 2:
print("usage: delete_contact.py <name>")
sys.exit()
script_name = sys.argv.pop(0)
name = sys.argv.pop(0)
contact = c.find_contact_by_name(name)
if contact:
c.delete_contact(contact)
else:
print("no contact named " + name)
sys.exit()
| #!/usr/bin/python
import yaml
config = yaml.safe_load(open("config.yml"))
from contacts import Contacts, Contact
c = Contacts()
# syntax: add_contact.py <name> <number>
import sys
if len(sys.argv) < 2:
print("usage: delete_contact.py <name>")
sys.exit()
script_name = sys.argv.pop(0)
name = sys.argv.pop(0)
contact = c.find_contact_by_name(name)
if contact:
c.delete_contact(contact)
else:
print("no contact named " + name)
sys.exit()
| mit | Python |
4dde490bfbf27d5aa92b9a310a45ef4bd7a561a7 | test MolCatalogs | rdkit/rdkit-orig,rdkit/rdkit-orig,rdkit/rdkit-orig,rdkit/rdkit-orig,rdkit/rdkit-orig,rdkit/rdkit-orig,rdkit/rdkit-orig,rdkit/rdkit-orig | Code/GraphMol/test_list.py | Code/GraphMol/test_list.py | import sys
tests=[
("testExecs/itertest.exe","",{}),
("testExecs/MolOpsTest.exe","",{}),
("testExecs/testCanon.exe","C1OCCC1 C1CCOC1",{}),
("testExecs/testPickler.exe","",{}),
("testExecs/test1.exe","",{}),
("python","test_list.py",{'dir':'Depictor'}),
("python","test_list.py",{'dir':'FileParsers'}),
("python","test_list.py",{'dir':'SmilesParse'}),
("python","test_list.py",{'dir':'Substruct'}),
("python","test_list.py",{'dir':'Subgraphs'}),
("python","test_list.py",{'dir':'FragCatalog'}),
("python","test_list.py",{'dir':'Fingerprints'}),
("python","test_list.py",{'dir':'MolTransforms'}),
("python","test_list.py",{'dir':'Wrap'}),
("python","test_list.py",{'dir':'Depictor/Wrap'}),
("python","test_list.py",{'dir':'FragCatalog/Wrap'}),
("python","test_list.py",{'dir':'PartialCharges/Wrap'}),
("python","test_list.py",{'dir':'ForceFieldHelpers'}),
("python","test_list.py",{'dir':'DistGeomHelpers'}),
("python","test_list.py",{'dir':'Descriptors'}),
("python","test_list.py",{'dir':'MolChemicalFeatures'}),
("python","test_list.py",{'dir':'MolAlign'}),
("python","test_list.py",{'dir':'ShapeHelpers'}),
("python","test_list.py",{'dir':'ChemTransforms'}),
("python","test_list.py",{'dir':'MolCatalog'}),
("python","test_list.py",{'dir':'MolCatalog/Wrap'}),
]
if sys.platform != 'win32':
tests.extend([
("testExecs/cptest.exe","",{}),
("testExecs/querytest.exe","",{}),
])
longTests=[
]
if __name__=='__main__':
import sys
import TestRunner
failed,tests = TestRunner.RunScript('test_list.py',0,1)
sys.exit(len(failed))
| import sys
tests=[
("testExecs/itertest.exe","",{}),
("testExecs/MolOpsTest.exe","",{}),
("testExecs/testCanon.exe","C1OCCC1 C1CCOC1",{}),
("testExecs/testPickler.exe","",{}),
("testExecs/test1.exe","",{}),
("python","test_list.py",{'dir':'Depictor'}),
("python","test_list.py",{'dir':'FileParsers'}),
("python","test_list.py",{'dir':'SmilesParse'}),
("python","test_list.py",{'dir':'Substruct'}),
("python","test_list.py",{'dir':'Subgraphs'}),
("python","test_list.py",{'dir':'FragCatalog'}),
("python","test_list.py",{'dir':'Fingerprints'}),
("python","test_list.py",{'dir':'MolTransforms'}),
("python","test_list.py",{'dir':'Wrap'}),
("python","test_list.py",{'dir':'Depictor/Wrap'}),
("python","test_list.py",{'dir':'FragCatalog/Wrap'}),
("python","test_list.py",{'dir':'PartialCharges/Wrap'}),
("python","test_list.py",{'dir':'ForceFieldHelpers'}),
("python","test_list.py",{'dir':'DistGeomHelpers'}),
("python","test_list.py",{'dir':'Descriptors'}),
("python","test_list.py",{'dir':'MolChemicalFeatures'}),
("python","test_list.py",{'dir':'MolAlign'}),
("python","test_list.py",{'dir':'ShapeHelpers'}),
("python","test_list.py",{'dir':'ChemTransforms'})
]
if sys.platform != 'win32':
tests.extend([
("testExecs/cptest.exe","",{}),
("testExecs/querytest.exe","",{}),
])
longTests=[
]
if __name__=='__main__':
import sys
import TestRunner
failed,tests = TestRunner.RunScript('test_list.py',0,1)
sys.exit(len(failed))
| bsd-3-clause | Python |
f70bbbdadc044a76f7b90b2cac0191353a6a5048 | Rework the import finding logic | ericdill/depfinder | depfinder.py | depfinder.py | import ast
import os
from collections import deque
import sys
from stdlib_list import stdlib_list
conf = {
'ignore_relative_imports': True,
'ignore_builtin_modules': True,
'pyver': None,
}
def get_imported_libs(code):
tree = ast.parse(code)
imports = deque()
for t in tree.body:
# ast.Import represents lines like 'import foo' and 'import foo, bar'
# the extra for name in t.names is needed, because names is a list that
# would be ['foo'] for the first and ['foo', 'bar'] for the second
if type(t) == ast.Import:
imports.extend([name.name.split('.')[0] for name in t.names])
# ast.ImportFrom represents lines like 'from foo import bar'
# t.level == 0 is to get rid of 'from .foo import bar' and higher levels
# of relative importing
if type(t) == ast.ImportFrom:
if t.level > 0:
if conf['ignore_relative_imports'] or not t.module:
continue
else:
imports.append(t.module.split('.')[0])
return list(imports)
def iterate_over_library(path_to_source_code):
libs = set()
for parent, folders, files in os.walk(path_to_source_code):
for file in files:
if file.endswith('.py'):
print('.', end='')
full_file_path = os.path.join(parent, file)
with open(full_file_path, 'r') as f:
code = f.read()
libs.update(set(get_imported_libs(code)))
if conf['ignore_builtin_modules']:
if not conf['pyver']:
pyver = '%s.%s' % (sys.version_info.major, sys.version_info.minor)
std_libs = stdlib_list("3.4")
# print(std_libs)
libs = [lib for lib in libs if lib not in std_libs]
return libs | import ast
def get_imported_libs(code):
tree = ast.parse(code)
# ast.Import represents lines like 'import foo' and 'import foo, bar'
# the extra for name in t.names is needed, because names is a list that
# would be ['foo'] for the first and ['foo', 'bar'] for the second
imports = [name.name.split('.')[0] for t in tree.body
if type(t) == ast.Import for name in t.names]
# ast.ImportFrom represents lines like 'from foo import bar'
import_froms = [t.module.split('.')[0] for t in tree.body if type(t) == ast.ImportFrom if t.module]
return imports + import_froms
| bsd-3-clause | Python |
9c8972523c3861cb88dd09f51fa1efbf46d4a1b3 | Fix bilibili seeker. | chienius/anicolle | anicolle/seeker/bilibili.py | anicolle/seeker/bilibili.py | import requests
from bs4 import BeautifulSoup
from json import loads
def seek(chk_key, cur_epi):
tepi = cur_epi+1
chk_key = str(chk_key)
try:
int(chk_key)
except ValueError:
query_url = "http://search.bilibili.com/bangumi?keyword=%s" % (chk_key, )
html_content = requests.get(query_url).text
bs = BeautifulSoup(html_content, "html.parser")
s_bgmlist = bs.find('div', class_="ajax-render")
try:
season_id = s_bgmlist.find('div', class_="s-btn-sub").get('data-id')
except AttributeError:
return {}
else:
season_id = chk_key
api_url = "http://app.bilibili.com/bangumi/seasoninfo/%s.ver?callback=episodeJsonCallback" % (season_id,)
apiRes = requests.get(api_url).text
apiRes = apiRes[20:]
apiRes = apiRes[:-2]
apiRes = loads(apiRes)
epi_list = apiRes['result']['episodes']
try:
for epi in epi_list:
if epi['index'] == str(tepi):
av_id = epi['av_id']
av_page= epi['page']
av_name = epi['index_title']
break
else:
raise IndexError;
except IndexError:
return 0
link = "http://www.bilibili.com/video/av%s/index_%s.html" % (av_id, av_page)
title = "%s - %d from Bilibili" % (av_name, tepi)
return {'link': link, 'title': title}
| import requests
from bs4 import BeautifulSoup
from json import loads
def seek(chk_key, cur_epi):
tepi = cur_epi+1
chk_key = str(chk_key)
try:
int(chk_key)
except ValueError:
query_url = "http://www.bilibili.com/search?keyword=%s&orderby=&type=series&tids=&tidsC=&arctype=all&page=1" % (chk_key, )
html_content = requests.get(query_url).text
bs = BeautifulSoup(html_content, "html.parser")
s_bgmlist = bs.find('div', class_="s_bgmlist")
try:
season_id = s_bgmlist.get('data-seasonid')
except AttributeError:
return {}
else:
season_id = chk_key
api_url = "http://app.bilibili.com/bangumi/seasoninfo/%s.ver?callback=episodeJsonCallback" % (season_id,)
apiRes = requests.get(api_url).text
apiRes = apiRes[20:]
apiRes = apiRes[:-1]
apiRes = loads(apiRes)
epi_list = apiRes['result']['episodes']
try:
for epi in epi_list:
if epi['index'] == str(tepi):
av_id = epi['av_id']
av_page= epi['page']
av_name = epi['index_title']
break
else:
raise IndexError;
except IndexError:
return 0
link = "http://www.bilibili.com/video/av%s/index_%s.html" % (av_id, av_page)
title = "%s - %d from Bilibili" % (av_name, tepi)
return {'link': link, 'title': title}
| mit | Python |
2a8c4790bd432fc4dc0fdda64c0cea4f76fac9ff | Fix add_page_if_missing context processor when no pages exist yet | matthiask/feincms2-content,hgrimelid/feincms,nickburlett/feincms,michaelkuty/feincms,michaelkuty/feincms,hgrimelid/feincms,feincms/feincms,pjdelport/feincms,joshuajonah/feincms,pjdelport/feincms,michaelkuty/feincms,joshuajonah/feincms,matthiask/feincms2-content,matthiask/django-content-editor,matthiask/django-content-editor,pjdelport/feincms,matthiask/django-content-editor,mjl/feincms,nickburlett/feincms,nickburlett/feincms,michaelkuty/feincms,matthiask/feincms2-content,mjl/feincms,joshuajonah/feincms,hgrimelid/feincms,nickburlett/feincms,joshuajonah/feincms,feincms/feincms,mjl/feincms,feincms/feincms,matthiask/django-content-editor | feincms/context_processors.py | feincms/context_processors.py | from feincms.module.page.models import Page
def add_page_if_missing(request):
# If this attribute exists, the a page object has been registered already
# by some other part of the code. We let it decide, which page object it
# wants to pass into the template
if hasattr(request, '_feincms_page'):
return {}
try:
return {
'feincms_page': Page.objects.best_match_for_request(request),
}
except Page.DoesNotExist:
return {}
| from feincms.module.page.models import Page
def add_page_if_missing(request):
# If this attribute exists, the a page object has been registered already
# by some other part of the code. We let it decide, which page object it
# wants to pass into the template
if hasattr(request, '_feincms_page'):
return {}
return {
'feincms_page': Page.objects.best_match_for_request(request),
}
| bsd-3-clause | Python |
efa36e1013e44fa75f6a77a74bd8bf21f3120976 | Allow for a longer image path | fivejjs/Django-facebook,troygrosfield/Django-facebook,danosaure/Django-facebook,rafaelgontijo/Django-facebook-fork,abendleiter/Django-facebook,pjdelport/Django-facebook,fivejjs/Django-facebook,abendleiter/Django-facebook,abendleiter/Django-facebook,andriisoldatenko/Django-facebook,ganescoo/Django-facebook,VishvajitP/Django-facebook,javipalanca/Django-facebook,fyndsi/Django-facebook,christer155/Django-facebook,cyrixhero/Django-facebook,Fiedzia/Django-facebook,tuxos/Django-facebook,troygrosfield/Django-facebook,QLGu/Django-facebook,PeterWangPo/Django-facebook,PeterWangPo/Django-facebook,troygrosfield/Django-facebook,pjdelport/Django-facebook,andriisoldatenko/Django-facebook,tuxos/Django-facebook,danosaure/Django-facebook,abhijo89/Django-facebook,cyrixhero/Django-facebook,andriisoldatenko/Django-facebook,cyrixhero/Django-facebook,abhijo89/Django-facebook,VishvajitP/Django-facebook,sitsbeyou/Django-facebook,ganescoo/Django-facebook,sitsbeyou/Django-facebook,takeshineshiro/Django-facebook,christer155/Django-facebook,javipalanca/Django-facebook,takeshineshiro/Django-facebook,fyndsi/Django-facebook,fyndsi/Django-facebook,QLGu/Django-facebook,jcpyun/Django-facebook,takeshineshiro/Django-facebook,selwin/Django-facebook,christer155/Django-facebook,javipalanca/Django-facebook,rafaelgontijo/Django-facebook-fork,PeterWangPo/Django-facebook,selwin/Django-facebook,jcpyun/Django-facebook,Shekharrajak/Django-facebook,Shekharrajak/Django-facebook,ganescoo/Django-facebook,VishvajitP/Django-facebook,rafaelgontijo/Django-facebook-fork,jcpyun/Django-facebook,danosaure/Django-facebook,abhijo89/Django-facebook,QLGu/Django-facebook,Fiedzia/Django-facebook,Shekharrajak/Django-facebook,tuxos/Django-facebook,andriisoldatenko/Django-facebook,Fiedzia/Django-facebook,selwin/Django-facebook,fivejjs/Django-facebook,pjdelport/Django-facebook,sitsbeyou/Django-facebook | django_facebook/models.py | django_facebook/models.py | from django.db import models
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class FacebookProfileModel(models.Model):
'''
Abstract class to add to your profile model.
NOTE: If you don't use this this abstract class, make sure you copy/paste
the fields in.
'''
about_me = models.TextField(blank=True, null=True)
facebook_id = models.IntegerField(blank=True, null=True)
facebook_name = models.CharField(max_length=255, blank=True, null=True)
facebook_profile_url = models.TextField(blank=True, null=True)
website_url = models.TextField(blank=True, null=True)
blog_url = models.TextField(blank=True, null=True)
image = models.ImageField(blank=True, null=True, upload_to='profile_images', max_length=255)
date_of_birth = models.DateField(blank=True, null=True)
raw_data = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.user.__unicode__()
class Meta:
abstract = True
def post_facebook_registration(self):
'''
Behaviour after registering with facebook
'''
url = reverse('facebook_connect')
response = HttpResponseRedirect(url)
response.set_cookie('fresh_registration', self.user.id)
return response
| from django.db import models
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
class FacebookProfileModel(models.Model):
'''
Abstract class to add to your profile model.
NOTE: If you don't use this this abstract class, make sure you copy/paste
the fields in.
'''
about_me = models.TextField(blank=True, null=True)
facebook_id = models.IntegerField(blank=True, null=True)
facebook_name = models.CharField(max_length=255, blank=True, null=True)
facebook_profile_url = models.TextField(blank=True, null=True)
website_url = models.TextField(blank=True, null=True)
blog_url = models.TextField(blank=True, null=True)
image = models.ImageField(blank=True, null=True, upload_to='profile_images')
date_of_birth = models.DateField(blank=True, null=True)
raw_data = models.TextField(blank=True, null=True)
def __unicode__(self):
return self.user.__unicode__()
class Meta:
abstract = True
def post_facebook_registration(self):
'''
Behaviour after registering with facebook
'''
url = reverse('facebook_connect')
response = HttpResponseRedirect(url)
response.set_cookie('fresh_registration', self.user.id)
return response
| bsd-3-clause | Python |
571b632a6dceb6035c2dec3c0392cdac10215fa2 | Increase version | dreipol/djangocms-spa,dreipol/djangocms-spa | djangocms_spa/__init__.py | djangocms_spa/__init__.py | __version__ = '0.1.2'
| __version__ = '0.1.1'
| mit | Python |
65a069970435d012577a5e4f8971b1a91d9331a8 | Update to 0.3.34 | KerkhoffTechnologies/django-connectwise,KerkhoffTechnologies/django-connectwise | djconnectwise/__init__.py | djconnectwise/__init__.py | # -*- coding: utf-8 -*-
VERSION = (0, 3, 34, 'final')
# pragma: no cover
if VERSION[-1] != "final":
__version__ = '.'.join(map(str, VERSION))
else:
# pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))
default_app_config = 'djconnectwise.apps.DjangoConnectwiseConfig'
| # -*- coding: utf-8 -*-
VERSION = (0, 3, 33, 'final')
# pragma: no cover
if VERSION[-1] != "final":
__version__ = '.'.join(map(str, VERSION))
else:
# pragma: no cover
__version__ = '.'.join(map(str, VERSION[:-1]))
default_app_config = 'djconnectwise.apps.DjangoConnectwiseConfig'
| mit | Python |
e1b4a5bebe59ae502e46890f78e666cb50460774 | Update convert_b64.py | GoogleCloudPlatform/ml-on-gcp,GoogleCloudPlatform/ml-on-gcp,GoogleCloudPlatform/ml-on-gcp,GoogleCloudPlatform/ml-on-gcp | dlvm/tools/convert_b64.py | dlvm/tools/convert_b64.py | import base64
INPUT_FILE = 'image.jpg'
OUTPUT_FILE = '/tmp/image_b64.json'
"""Open image and convert it to Base64"""
with open(INPUT_FILE, 'rb') as input_file:
jpeg_bytes = base64.b64encode(input_file.read()).decode('utf-8')
predict_request = '{"image_bytes": {"b64": "%s"}}' % jpeg_bytes
# Write JSON to file
with open(OUTPUT_FILE, 'w') as output_file:
output_file.write(predict_request)
| import base64
INPUT_FILE = 'image.jpg'
OUTPUT_FILE = '/tmp/image_b64.json'
"""Open image and convert it to Base64"""
with open(INPUT_FILE, 'rb') as f:
jpeg_bytes = base64.b64encode(f.read()).decode('utf-8')
predict_request = '{"image_bytes": {"b64": "%s"}}' % jpeg_bytes
# Write JSON to file
with open(OUTPUT_FILE, 'w') as f:
f.write(predict_request)
| apache-2.0 | Python |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.